From 8d0f09b1beafd95763a5da53acc58dac0bd63a53 Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Sun, 26 Feb 2023 14:45:37 -0800 Subject: [PATCH 01/40] gh-101765: unicodeobject: use Py_XDECREF correctly (#102283) --- Objects/unicodeobject.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 2f4c3d3793efd6..1ba30421c66dba 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -14795,7 +14795,7 @@ unicodeiter_reduce(unicodeiterobject *it, PyObject *Py_UNUSED(ignored)) } else { PyObject *u = unicode_new_empty(); if (u == NULL) { - Py_DECREF(iter); + Py_XDECREF(iter); return NULL; } return Py_BuildValue("N(N)", iter, u); From f3cb15c88afa2e87067de3c6106664b3f7cd4035 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Mon, 27 Feb 2023 03:10:34 +0100 Subject: [PATCH 02/40] gh-91038: Change default argument value to `False` instead of `0` (#31621) The argument is used as a switch and corresponds to a boolean logic. Therefore it is more intuitive to use the corresponding constant `False` as default value instead of the integer `0`. Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Co-authored-by: Oleg Iarygin --- Doc/library/platform.rst | 2 +- Lib/platform.py | 2 +- .../next/Library/2023-02-26-12-37-17.gh-issue-91038.S4rFH_.rst | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2023-02-26-12-37-17.gh-issue-91038.S4rFH_.rst diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst index a0c9f63ab9f957..69c4dfc422c98e 100644 --- a/Doc/library/platform.rst +++ b/Doc/library/platform.rst @@ -63,7 +63,7 @@ Cross Platform string is returned if the value cannot be determined. -.. function:: platform(aliased=0, terse=0) +.. function:: platform(aliased=False, terse=False) Returns a single string identifying the underlying platform with as much useful information as possible. diff --git a/Lib/platform.py b/Lib/platform.py index 2dfaf76252db51..f2b0d1d1bd3f5d 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -1246,7 +1246,7 @@ def python_compiler(): _platform_cache = {} -def platform(aliased=0, terse=0): +def platform(aliased=False, terse=False): """ Returns a single string identifying the underlying platform with as much useful information as possible (but no more :). diff --git a/Misc/NEWS.d/next/Library/2023-02-26-12-37-17.gh-issue-91038.S4rFH_.rst b/Misc/NEWS.d/next/Library/2023-02-26-12-37-17.gh-issue-91038.S4rFH_.rst new file mode 100644 index 00000000000000..2667ff120fd402 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-26-12-37-17.gh-issue-91038.S4rFH_.rst @@ -0,0 +1 @@ +:meth:`platform.platform` now has boolean default arguments. From 101a12c5767a8c6ca6e32b8e24a462d2606d24ca Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Mon, 27 Feb 2023 10:26:21 +0300 Subject: [PATCH 03/40] gh-101100: Fix sphinx warnings in `types` module (#102274) Co-authored-by: Alex Waygood --- Doc/library/types.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/library/types.rst b/Doc/library/types.rst index 415413c4cd9747..747ba58bb225d4 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -486,7 +486,7 @@ Coroutine Utility Functions The generator-based coroutine is still a :term:`generator iterator`, but is also considered to be a :term:`coroutine` object and is :term:`awaitable`. However, it may not necessarily implement - the :meth:`__await__` method. + the :meth:`~object.__await__` method. If *gen_func* is a generator function, it will be modified in-place. From e3c3f9fec099fe78d2f98912be337d632f6fcdd1 Mon Sep 17 00:00:00 2001 From: Dennis Sweeney <36520290+sweeneyde@users.noreply.github.com> Date: Mon, 27 Feb 2023 05:46:40 -0500 Subject: [PATCH 04/40] gh-102250: Fix double-decref in COMPARE_AND_BRANCH error case (GH-102287) --- Lib/test/test_bool.py | 20 +++++++++++++++++++ ...-02-26-23-10-32.gh-issue-102250.7MUKoC.rst | 1 + Python/bytecodes.c | 4 +--- Python/generated_cases.c.h | 4 +--- 4 files changed, 23 insertions(+), 6 deletions(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2023-02-26-23-10-32.gh-issue-102250.7MUKoC.rst diff --git a/Lib/test/test_bool.py b/Lib/test/test_bool.py index b711ffb9a3ecd5..916e22a527a8e0 100644 --- a/Lib/test/test_bool.py +++ b/Lib/test/test_bool.py @@ -319,6 +319,26 @@ def __len__(self): return -1 self.assertRaises(ValueError, bool, Eggs()) + def test_interpreter_convert_to_bool_raises(self): + class SymbolicBool: + def __bool__(self): + raise TypeError + + class Symbol: + def __gt__(self, other): + return SymbolicBool() + + x = Symbol() + + with self.assertRaises(TypeError): + if x > 0: + msg = "x > 0 was true" + else: + msg = "x > 0 was false" + + # This used to create negative refcounts, see gh-102250 + del x + def test_from_bytes(self): self.assertIs(bool.from_bytes(b'\x00'*8, 'big'), False) self.assertIs(bool.from_bytes(b'abcd', 'little'), True) diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-02-26-23-10-32.gh-issue-102250.7MUKoC.rst b/Misc/NEWS.d/next/Core and Builtins/2023-02-26-23-10-32.gh-issue-102250.7MUKoC.rst new file mode 100644 index 00000000000000..17ab0cd4367991 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-02-26-23-10-32.gh-issue-102250.7MUKoC.rst @@ -0,0 +1 @@ +Fixed a segfault occurring when the interpreter calls a ``__bool__`` method that raises. diff --git a/Python/bytecodes.c b/Python/bytecodes.c index ad68c794fe7acb..7e9b36f697210a 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -1754,9 +1754,7 @@ dummy_func( int offset = next_instr[1].op.arg; int err = PyObject_IsTrue(cond); Py_DECREF(cond); - if (err < 0) { - goto error; - } + ERROR_IF(err < 0, error); if (jump_on_true == (err != 0)) { JUMPBY(offset); } diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 2987adc3bba566..271ba26f489521 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -2205,9 +2205,7 @@ int offset = next_instr[1].op.arg; int err = PyObject_IsTrue(cond); Py_DECREF(cond); - if (err < 0) { - goto error; - } + if (err < 0) goto pop_2_error; if (jump_on_true == (err != 0)) { JUMPBY(offset); } From 0db6f442598a1994c37f24e704892a2bb71a0a1b Mon Sep 17 00:00:00 2001 From: Gouvernathor <44340603+Gouvernathor@users.noreply.github.com> Date: Mon, 27 Feb 2023 16:13:18 +0100 Subject: [PATCH 05/40] gh-102296 Document that inspect.Parameter kinds support ordering (GH-102297) Automerge-Triggered-By: GH:AlexWaygood --- Doc/library/inspect.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index 9c3be5a250a67e..789e9839d22f71 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -802,8 +802,9 @@ function. .. attribute:: Parameter.kind - Describes how argument values are bound to the parameter. Possible values - (accessible via :class:`Parameter`, like ``Parameter.KEYWORD_ONLY``): + Describes how argument values are bound to the parameter. The possible + values are accessible via :class:`Parameter` (like ``Parameter.KEYWORD_ONLY``), + and support comparison and ordering, in the following order: .. tabularcolumns:: |l|L| From bb0cf8fd60e71581a90179af63e60e8704c3814b Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 27 Feb 2023 09:21:18 -0700 Subject: [PATCH 06/40] gh-102251: Updates to test_imp Toward Fixing Some Refleaks (gh-102254) This is related to fixing the refleaks introduced by commit 096d009. I haven't been able to find the leak yet, but these changes are a consequence of that effort. This includes some cleanup, some tweaks to the existing tests, and a bunch of new test cases. The only change here that might have impact outside the tests in question is in imp.py, where I update imp.load_dynamic() to use spec_from_file_location() instead of creating a ModuleSpec directly. Also note that I've updated the tests to only skip if we're checking for refleaks (regrtest's --huntrleaks), whereas in gh-101969 I had skipped the tests entirely. The tests will be useful for some upcoming work and I'd rather the refleaks not hold that up. (It isn't clear how quickly we'll be able to fix the leaking code, though it will certainly be done in the short term.) https://github.com/python/cpython/issues/102251 --- Lib/imp.py | 4 +- Lib/test/test_imp.py | 1110 +++++++++++++++++++++++++++--------- Modules/_testsinglephase.c | 22 +- Python/import.c | 115 +++- 4 files changed, 952 insertions(+), 299 deletions(-) diff --git a/Lib/imp.py b/Lib/imp.py index fc42c15765852e..fe850f6a001814 100644 --- a/Lib/imp.py +++ b/Lib/imp.py @@ -338,8 +338,8 @@ def load_dynamic(name, path, file=None): # Issue #24748: Skip the sys.modules check in _load_module_shim; # always load new extension - spec = importlib.machinery.ModuleSpec( - name=name, loader=loader, origin=path) + spec = importlib.util.spec_from_file_location( + name, path, loader=loader) return _load(spec) else: diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py index 2292bb20939599..03e3adba221e57 100644 --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -1,4 +1,5 @@ import gc +import json import importlib import importlib.util import os @@ -11,6 +12,7 @@ from test.support import script_helper from test.support import warnings_helper import textwrap +import types import unittest import warnings imp = warnings_helper.import_deprecated('imp') @@ -39,6 +41,169 @@ def requires_load_dynamic(meth): 'imp.load_dynamic() required')(meth) +class ModuleSnapshot(types.SimpleNamespace): + """A representation of a module for testing. + + Fields: + + * id - the module's object ID + * module - the actual module or an adequate substitute + * __file__ + * __spec__ + * name + * origin + * ns - a copy (dict) of the module's __dict__ (or None) + * ns_id - the object ID of the module's __dict__ + * cached - the sys.modules[mod.__spec__.name] entry (or None) + * cached_id - the object ID of the sys.modules entry (or None) + + In cases where the value is not available (e.g. due to serialization), + the value will be None. + """ + _fields = tuple('id module ns ns_id cached cached_id'.split()) + + @classmethod + def from_module(cls, mod): + name = mod.__spec__.name + cached = sys.modules.get(name) + return cls( + id=id(mod), + module=mod, + ns=types.SimpleNamespace(**mod.__dict__), + ns_id=id(mod.__dict__), + cached=cached, + cached_id=id(cached), + ) + + SCRIPT = textwrap.dedent(''' + {imports} + + name = {name!r} + + {prescript} + + mod = {name} + + {body} + + {postscript} + ''') + IMPORTS = textwrap.dedent(''' + import sys + ''').strip() + SCRIPT_BODY = textwrap.dedent(''' + # Capture the snapshot data. + cached = sys.modules.get(name) + snapshot = dict( + id=id(mod), + module=dict( + __file__=mod.__file__, + __spec__=dict( + name=mod.__spec__.name, + origin=mod.__spec__.origin, + ), + ), + ns=None, + ns_id=id(mod.__dict__), + cached=None, + cached_id=id(cached) if cached else None, + ) + ''').strip() + CLEANUP_SCRIPT = textwrap.dedent(''' + # Clean up the module. + sys.modules.pop(name, None) + ''').strip() + + @classmethod + def build_script(cls, name, *, + prescript=None, + import_first=False, + postscript=None, + postcleanup=False, + ): + if postcleanup is True: + postcleanup = cls.CLEANUP_SCRIPT + elif isinstance(postcleanup, str): + postcleanup = textwrap.dedent(postcleanup).strip() + postcleanup = cls.CLEANUP_SCRIPT + os.linesep + postcleanup + else: + postcleanup = '' + prescript = textwrap.dedent(prescript).strip() if prescript else '' + postscript = textwrap.dedent(postscript).strip() if postscript else '' + + if postcleanup: + if postscript: + postscript = postscript + os.linesep * 2 + postcleanup + else: + postscript = postcleanup + + if import_first: + prescript += textwrap.dedent(f''' + + # Now import the module. + assert name not in sys.modules + import {name}''') + + return cls.SCRIPT.format( + imports=cls.IMPORTS.strip(), + name=name, + prescript=prescript.strip(), + body=cls.SCRIPT_BODY.strip(), + postscript=postscript, + ) + + @classmethod + def parse(cls, text): + raw = json.loads(text) + mod = raw['module'] + mod['__spec__'] = types.SimpleNamespace(**mod['__spec__']) + raw['module'] = types.SimpleNamespace(**mod) + return cls(**raw) + + @classmethod + def from_subinterp(cls, name, interpid=None, *, pipe=None, **script_kwds): + if pipe is not None: + return cls._from_subinterp(name, interpid, pipe, script_kwds) + pipe = os.pipe() + try: + return cls._from_subinterp(name, interpid, pipe, script_kwds) + finally: + r, w = pipe + os.close(r) + os.close(w) + + @classmethod + def _from_subinterp(cls, name, interpid, pipe, script_kwargs): + r, w = pipe + + # Build the script. + postscript = textwrap.dedent(f''' + # Send the result over the pipe. + import json + import os + os.write({w}, json.dumps(snapshot).encode()) + + ''') + _postscript = script_kwargs.get('postscript') + if _postscript: + _postscript = textwrap.dedent(_postscript).lstrip() + postscript += _postscript + script_kwargs['postscript'] = postscript.strip() + script = cls.build_script(name, **script_kwargs) + + # Run the script. + if interpid is None: + ret = support.run_in_subinterp(script) + if ret != 0: + raise AssertionError(f'{ret} != 0') + else: + _interpreters.run_string(interpid, script) + + # Parse the results. + text = os.read(r, 1000) + return cls.parse(text.decode()) + + class LockTests(unittest.TestCase): """Very basic test of import lock functions.""" @@ -263,288 +428,6 @@ def test_issue16421_multiple_modules_in_one_dll(self): with self.assertRaises(ImportError): imp.load_dynamic('nonexistent', pathname) - @unittest.skip('known refleak (temporarily skipping)') - @requires_subinterpreters - @requires_load_dynamic - def test_singlephase_multiple_interpreters(self): - # Currently, for every single-phrase init module loaded - # in multiple interpreters, those interpreters share a - # PyModuleDef for that object, which can be a problem. - - # This single-phase module has global state, which is shared - # by the interpreters. - import _testsinglephase - name = _testsinglephase.__name__ - filename = _testsinglephase.__file__ - - del sys.modules[name] - _testsinglephase._clear_globals() - _testinternalcapi.clear_extension(name, filename) - init_count = _testsinglephase.initialized_count() - assert init_count == -1, (init_count,) - - def clean_up(): - _testsinglephase._clear_globals() - _testinternalcapi.clear_extension(name, filename) - self.addCleanup(clean_up) - - interp1 = _interpreters.create(isolated=False) - self.addCleanup(_interpreters.destroy, interp1) - interp2 = _interpreters.create(isolated=False) - self.addCleanup(_interpreters.destroy, interp2) - - script = textwrap.dedent(f''' - import _testsinglephase - - expected = %d - init_count = _testsinglephase.initialized_count() - if init_count != expected: - raise Exception(init_count) - - lookedup = _testsinglephase.look_up_self() - if lookedup is not _testsinglephase: - raise Exception((_testsinglephase, lookedup)) - - # Attrs set in the module init func are in m_copy. - _initialized = _testsinglephase._initialized - initialized = _testsinglephase.initialized() - if _initialized != initialized: - raise Exception((_initialized, initialized)) - - # Attrs set after loading are not in m_copy. - if hasattr(_testsinglephase, 'spam'): - raise Exception(_testsinglephase.spam) - _testsinglephase.spam = expected - ''') - - # Use an interpreter that gets destroyed right away. - ret = support.run_in_subinterp(script % 1) - self.assertEqual(ret, 0) - - # The module's init func gets run again. - # The module's globals did not get destroyed. - _interpreters.run_string(interp1, script % 2) - - # The module's init func is not run again. - # The second interpreter copies the module's m_copy. - # However, globals are still shared. - _interpreters.run_string(interp2, script % 2) - - @unittest.skip('known refleak (temporarily skipping)') - @requires_load_dynamic - def test_singlephase_variants(self): - # Exercise the most meaningful variants described in Python/import.c. - self.maxDiff = None - - basename = '_testsinglephase' - fileobj, pathname, _ = imp.find_module(basename) - fileobj.close() - - def clean_up(): - import _testsinglephase - _testsinglephase._clear_globals() - self.addCleanup(clean_up) - - def add_ext_cleanup(name): - def clean_up(): - _testinternalcapi.clear_extension(name, pathname) - self.addCleanup(clean_up) - - modules = {} - def load(name): - assert name not in modules - module = imp.load_dynamic(name, pathname) - self.assertNotIn(module, modules.values()) - modules[name] = module - return module - - def re_load(name, module): - assert sys.modules[name] is module - before = type(module)(module.__name__) - before.__dict__.update(vars(module)) - - reloaded = imp.load_dynamic(name, pathname) - - return before, reloaded - - def check_common(name, module): - summed = module.sum(1, 2) - lookedup = module.look_up_self() - initialized = module.initialized() - cached = sys.modules[name] - - # module.__name__ might not match, but the spec will. - self.assertEqual(module.__spec__.name, name) - if initialized is not None: - self.assertIsInstance(initialized, float) - self.assertGreater(initialized, 0) - self.assertEqual(summed, 3) - self.assertTrue(issubclass(module.error, Exception)) - self.assertEqual(module.int_const, 1969) - self.assertEqual(module.str_const, 'something different') - self.assertIs(cached, module) - - return lookedup, initialized, cached - - def check_direct(name, module, lookedup): - # The module has its own PyModuleDef, with a matching name. - self.assertEqual(module.__name__, name) - self.assertIs(lookedup, module) - - def check_indirect(name, module, lookedup, orig): - # The module re-uses another's PyModuleDef, with a different name. - assert orig is not module - assert orig.__name__ != name - self.assertNotEqual(module.__name__, name) - self.assertIs(lookedup, module) - - def check_basic(module, initialized): - init_count = module.initialized_count() - - self.assertIsNot(initialized, None) - self.assertIsInstance(init_count, int) - self.assertGreater(init_count, 0) - - return init_count - - def check_common_reloaded(name, module, cached, before, reloaded): - recached = sys.modules[name] - - self.assertEqual(reloaded.__spec__.name, name) - self.assertEqual(reloaded.__name__, before.__name__) - self.assertEqual(before.__dict__, module.__dict__) - self.assertIs(recached, reloaded) - - def check_basic_reloaded(module, lookedup, initialized, init_count, - before, reloaded): - relookedup = reloaded.look_up_self() - reinitialized = reloaded.initialized() - reinit_count = reloaded.initialized_count() - - self.assertIs(reloaded, module) - self.assertIs(reloaded.__dict__, module.__dict__) - # It only happens to be the same but that's good enough here. - # We really just want to verify that the re-loaded attrs - # didn't change. - self.assertIs(relookedup, lookedup) - self.assertEqual(reinitialized, initialized) - self.assertEqual(reinit_count, init_count) - - def check_with_reinit_reloaded(module, lookedup, initialized, - before, reloaded): - relookedup = reloaded.look_up_self() - reinitialized = reloaded.initialized() - - self.assertIsNot(reloaded, module) - self.assertIsNot(reloaded, module) - self.assertNotEqual(reloaded.__dict__, module.__dict__) - self.assertIs(relookedup, reloaded) - if initialized is None: - self.assertIs(reinitialized, None) - else: - self.assertGreater(reinitialized, initialized) - - # Check the "basic" module. - - name = basename - add_ext_cleanup(name) - expected_init_count = 1 - with self.subTest(name): - mod = load(name) - lookedup, initialized, cached = check_common(name, mod) - check_direct(name, mod, lookedup) - init_count = check_basic(mod, initialized) - self.assertEqual(init_count, expected_init_count) - - before, reloaded = re_load(name, mod) - check_common_reloaded(name, mod, cached, before, reloaded) - check_basic_reloaded(mod, lookedup, initialized, init_count, - before, reloaded) - basic = mod - - # Check its indirect variants. - - name = f'{basename}_basic_wrapper' - add_ext_cleanup(name) - expected_init_count += 1 - with self.subTest(name): - mod = load(name) - lookedup, initialized, cached = check_common(name, mod) - check_indirect(name, mod, lookedup, basic) - init_count = check_basic(mod, initialized) - self.assertEqual(init_count, expected_init_count) - - before, reloaded = re_load(name, mod) - check_common_reloaded(name, mod, cached, before, reloaded) - check_basic_reloaded(mod, lookedup, initialized, init_count, - before, reloaded) - - # Currently PyState_AddModule() always replaces the cached module. - self.assertIs(basic.look_up_self(), mod) - self.assertEqual(basic.initialized_count(), expected_init_count) - - # The cached module shouldn't be changed after this point. - basic_lookedup = mod - - # Check its direct variant. - - name = f'{basename}_basic_copy' - add_ext_cleanup(name) - expected_init_count += 1 - with self.subTest(name): - mod = load(name) - lookedup, initialized, cached = check_common(name, mod) - check_direct(name, mod, lookedup) - init_count = check_basic(mod, initialized) - self.assertEqual(init_count, expected_init_count) - - before, reloaded = re_load(name, mod) - check_common_reloaded(name, mod, cached, before, reloaded) - check_basic_reloaded(mod, lookedup, initialized, init_count, - before, reloaded) - - # This should change the cached module for _testsinglephase. - self.assertIs(basic.look_up_self(), basic_lookedup) - self.assertEqual(basic.initialized_count(), expected_init_count) - - # Check the non-basic variant that has no state. - - name = f'{basename}_with_reinit' - add_ext_cleanup(name) - with self.subTest(name): - mod = load(name) - lookedup, initialized, cached = check_common(name, mod) - self.assertIs(initialized, None) - check_direct(name, mod, lookedup) - - before, reloaded = re_load(name, mod) - check_common_reloaded(name, mod, cached, before, reloaded) - check_with_reinit_reloaded(mod, lookedup, initialized, - before, reloaded) - - # This should change the cached module for _testsinglephase. - self.assertIs(basic.look_up_self(), basic_lookedup) - self.assertEqual(basic.initialized_count(), expected_init_count) - - # Check the basic variant that has state. - - name = f'{basename}_with_state' - add_ext_cleanup(name) - with self.subTest(name): - mod = load(name) - lookedup, initialized, cached = check_common(name, mod) - self.assertIsNot(initialized, None) - check_direct(name, mod, lookedup) - - before, reloaded = re_load(name, mod) - check_common_reloaded(name, mod, cached, before, reloaded) - check_with_reinit_reloaded(mod, lookedup, initialized, - before, reloaded) - - # This should change the cached module for _testsinglephase. - self.assertIs(basic.look_up_self(), basic_lookedup) - self.assertEqual(basic.initialized_count(), expected_init_count) - @requires_load_dynamic def test_load_dynamic_ImportError_path(self): # Issue #1559549 added `name` and `path` attributes to ImportError @@ -737,6 +620,669 @@ def check_get_builtins(): check_get_builtins() +class TestSinglePhaseSnapshot(ModuleSnapshot): + + @classmethod + def from_module(cls, mod): + self = super().from_module(mod) + self.summed = mod.sum(1, 2) + self.lookedup = mod.look_up_self() + self.lookedup_id = id(self.lookedup) + self.state_initialized = mod.state_initialized() + if hasattr(mod, 'initialized_count'): + self.init_count = mod.initialized_count() + return self + + SCRIPT_BODY = ModuleSnapshot.SCRIPT_BODY + textwrap.dedent(f''' + snapshot['module'].update(dict( + int_const=mod.int_const, + str_const=mod.str_const, + _module_initialized=mod._module_initialized, + )) + snapshot.update(dict( + summed=mod.sum(1, 2), + lookedup_id=id(mod.look_up_self()), + state_initialized=mod.state_initialized(), + init_count=mod.initialized_count(), + has_spam=hasattr(mod, 'spam'), + spam=getattr(mod, 'spam', None), + )) + ''').rstrip() + + @classmethod + def parse(cls, text): + self = super().parse(text) + if not self.has_spam: + del self.spam + del self.has_spam + return self + + +@requires_load_dynamic +class SinglephaseInitTests(unittest.TestCase): + + NAME = '_testsinglephase' + + @classmethod + def setUpClass(cls): + if '-R' in sys.argv or '--huntrleaks' in sys.argv: + # https://github.com/python/cpython/issues/102251 + raise unittest.SkipTest('unresolved refleaks (see gh-102251)') + fileobj, filename, _ = imp.find_module(cls.NAME) + fileobj.close() + cls.FILE = filename + + # Start fresh. + cls.clean_up() + + def tearDown(self): + # Clean up the module. + self.clean_up() + + @classmethod + def clean_up(cls): + name = cls.NAME + filename = cls.FILE + if name in sys.modules: + if hasattr(sys.modules[name], '_clear_globals'): + assert sys.modules[name].__file__ == filename + sys.modules[name]._clear_globals() + del sys.modules[name] + # Clear all internally cached data for the extension. + _testinternalcapi.clear_extension(name, filename) + + ######################### + # helpers + + def add_module_cleanup(self, name): + def clean_up(): + # Clear all internally cached data for the extension. + _testinternalcapi.clear_extension(name, self.FILE) + self.addCleanup(clean_up) + + def load(self, name): + try: + already_loaded = self.already_loaded + except AttributeError: + already_loaded = self.already_loaded = {} + assert name not in already_loaded + mod = imp.load_dynamic(name, self.FILE) + self.assertNotIn(mod, already_loaded.values()) + already_loaded[name] = mod + return types.SimpleNamespace( + name=name, + module=mod, + snapshot=TestSinglePhaseSnapshot.from_module(mod), + ) + + def re_load(self, name, mod): + assert sys.modules[name] is mod + assert mod.__dict__ == mod.__dict__ + reloaded = imp.load_dynamic(name, self.FILE) + return types.SimpleNamespace( + name=name, + module=reloaded, + snapshot=TestSinglePhaseSnapshot.from_module(reloaded), + ) + + # subinterpreters + + def add_subinterpreter(self): + interpid = _interpreters.create(isolated=False) + _interpreters.run_string(interpid, textwrap.dedent(''' + import sys + import _testinternalcapi + ''')) + def clean_up(): + _interpreters.run_string(interpid, textwrap.dedent(f''' + name = {self.NAME!r} + if name in sys.modules: + sys.modules[name]._clear_globals() + _testinternalcapi.clear_extension(name, {self.FILE!r}) + ''')) + _interpreters.destroy(interpid) + self.addCleanup(clean_up) + return interpid + + def import_in_subinterp(self, interpid=None, *, + postscript=None, + postcleanup=False, + ): + name = self.NAME + + if postcleanup: + import_ = 'import _testinternalcapi' if interpid is None else '' + postcleanup = f''' + {import_} + mod._clear_globals() + _testinternalcapi.clear_extension(name, {self.FILE!r}) + ''' + + try: + pipe = self._pipe + except AttributeError: + r, w = pipe = self._pipe = os.pipe() + self.addCleanup(os.close, r) + self.addCleanup(os.close, w) + + snapshot = TestSinglePhaseSnapshot.from_subinterp( + name, + interpid, + pipe=pipe, + import_first=True, + postscript=postscript, + postcleanup=postcleanup, + ) + + return types.SimpleNamespace( + name=name, + module=None, + snapshot=snapshot, + ) + + # checks + + def check_common(self, loaded): + isolated = False + + mod = loaded.module + if not mod: + # It came from a subinterpreter. + isolated = True + mod = loaded.snapshot.module + # mod.__name__ might not match, but the spec will. + self.assertEqual(mod.__spec__.name, loaded.name) + self.assertEqual(mod.__file__, self.FILE) + self.assertEqual(mod.__spec__.origin, self.FILE) + if not isolated: + self.assertTrue(issubclass(mod.error, Exception)) + self.assertEqual(mod.int_const, 1969) + self.assertEqual(mod.str_const, 'something different') + self.assertIsInstance(mod._module_initialized, float) + self.assertGreater(mod._module_initialized, 0) + + snap = loaded.snapshot + self.assertEqual(snap.summed, 3) + if snap.state_initialized is not None: + self.assertIsInstance(snap.state_initialized, float) + self.assertGreater(snap.state_initialized, 0) + if isolated: + # The "looked up" module is interpreter-specific + # (interp->imports.modules_by_index was set for the module). + self.assertEqual(snap.lookedup_id, snap.id) + self.assertEqual(snap.cached_id, snap.id) + with self.assertRaises(AttributeError): + snap.spam + else: + self.assertIs(snap.lookedup, mod) + self.assertIs(snap.cached, mod) + + def check_direct(self, loaded): + # The module has its own PyModuleDef, with a matching name. + self.assertEqual(loaded.module.__name__, loaded.name) + self.assertIs(loaded.snapshot.lookedup, loaded.module) + + def check_indirect(self, loaded, orig): + # The module re-uses another's PyModuleDef, with a different name. + assert orig is not loaded.module + assert orig.__name__ != loaded.name + self.assertNotEqual(loaded.module.__name__, loaded.name) + self.assertIs(loaded.snapshot.lookedup, loaded.module) + + def check_basic(self, loaded, expected_init_count): + # m_size == -1 + # The module loads fresh the first time and copies m_copy after. + snap = loaded.snapshot + self.assertIsNot(snap.state_initialized, None) + self.assertIsInstance(snap.init_count, int) + self.assertGreater(snap.init_count, 0) + self.assertEqual(snap.init_count, expected_init_count) + + def check_with_reinit(self, loaded): + # m_size >= 0 + # The module loads fresh every time. + pass + + def check_fresh(self, loaded): + """ + The module had not been loaded before (at least since fully reset). + """ + snap = loaded.snapshot + # The module's init func was run. + # A copy of the module's __dict__ was stored in def->m_base.m_copy. + # The previous m_copy was deleted first. + # _PyRuntime.imports.extensions was set. + self.assertEqual(snap.init_count, 1) + # The global state was initialized. + # The module attrs were initialized from that state. + self.assertEqual(snap.module._module_initialized, + snap.state_initialized) + + def check_semi_fresh(self, loaded, base, prev): + """ + The module had been loaded before and then reset + (but the module global state wasn't). + """ + snap = loaded.snapshot + # The module's init func was run again. + # A copy of the module's __dict__ was stored in def->m_base.m_copy. + # The previous m_copy was deleted first. + # The module globals did not get reset. + self.assertNotEqual(snap.id, base.snapshot.id) + self.assertNotEqual(snap.id, prev.snapshot.id) + self.assertEqual(snap.init_count, prev.snapshot.init_count + 1) + # The global state was updated. + # The module attrs were initialized from that state. + self.assertEqual(snap.module._module_initialized, + snap.state_initialized) + self.assertNotEqual(snap.state_initialized, + base.snapshot.state_initialized) + self.assertNotEqual(snap.state_initialized, + prev.snapshot.state_initialized) + + def check_copied(self, loaded, base): + """ + The module had been loaded before and never reset. + """ + snap = loaded.snapshot + # The module's init func was not run again. + # The interpreter copied m_copy, as set by the other interpreter, + # with objects owned by the other interpreter. + # The module globals did not get reset. + self.assertNotEqual(snap.id, base.snapshot.id) + self.assertEqual(snap.init_count, base.snapshot.init_count) + # The global state was not updated since the init func did not run. + # The module attrs were not directly initialized from that state. + # The state and module attrs still match the previous loading. + self.assertEqual(snap.module._module_initialized, + snap.state_initialized) + self.assertEqual(snap.state_initialized, + base.snapshot.state_initialized) + + ######################### + # the tests + + def test_cleared_globals(self): + loaded = self.load(self.NAME) + _testsinglephase = loaded.module + init_before = _testsinglephase.state_initialized() + + _testsinglephase._clear_globals() + init_after = _testsinglephase.state_initialized() + init_count = _testsinglephase.initialized_count() + + self.assertGreater(init_before, 0) + self.assertEqual(init_after, 0) + self.assertEqual(init_count, -1) + + def test_variants(self): + # Exercise the most meaningful variants described in Python/import.c. + self.maxDiff = None + + # Check the "basic" module. + + name = self.NAME + expected_init_count = 1 + with self.subTest(name): + loaded = self.load(name) + + self.check_common(loaded) + self.check_direct(loaded) + self.check_basic(loaded, expected_init_count) + basic = loaded.module + + # Check its indirect variants. + + name = f'{self.NAME}_basic_wrapper' + self.add_module_cleanup(name) + expected_init_count += 1 + with self.subTest(name): + loaded = self.load(name) + + self.check_common(loaded) + self.check_indirect(loaded, basic) + self.check_basic(loaded, expected_init_count) + + # Currently PyState_AddModule() always replaces the cached module. + self.assertIs(basic.look_up_self(), loaded.module) + self.assertEqual(basic.initialized_count(), expected_init_count) + + # The cached module shouldn't change after this point. + basic_lookedup = loaded.module + + # Check its direct variant. + + name = f'{self.NAME}_basic_copy' + self.add_module_cleanup(name) + expected_init_count += 1 + with self.subTest(name): + loaded = self.load(name) + + self.check_common(loaded) + self.check_direct(loaded) + self.check_basic(loaded, expected_init_count) + + # This should change the cached module for _testsinglephase. + self.assertIs(basic.look_up_self(), basic_lookedup) + self.assertEqual(basic.initialized_count(), expected_init_count) + + # Check the non-basic variant that has no state. + + name = f'{self.NAME}_with_reinit' + self.add_module_cleanup(name) + with self.subTest(name): + loaded = self.load(name) + + self.check_common(loaded) + self.assertIs(loaded.snapshot.state_initialized, None) + self.check_direct(loaded) + self.check_with_reinit(loaded) + + # This should change the cached module for _testsinglephase. + self.assertIs(basic.look_up_self(), basic_lookedup) + self.assertEqual(basic.initialized_count(), expected_init_count) + + # Check the basic variant that has state. + + name = f'{self.NAME}_with_state' + self.add_module_cleanup(name) + with self.subTest(name): + loaded = self.load(name) + + self.check_common(loaded) + self.assertIsNot(loaded.snapshot.state_initialized, None) + self.check_direct(loaded) + self.check_with_reinit(loaded) + + # This should change the cached module for _testsinglephase. + self.assertIs(basic.look_up_self(), basic_lookedup) + self.assertEqual(basic.initialized_count(), expected_init_count) + + def test_basic_reloaded(self): + # m_copy is copied into the existing module object. + # Global state is not changed. + self.maxDiff = None + + for name in [ + self.NAME, # the "basic" module + f'{self.NAME}_basic_wrapper', # the indirect variant + f'{self.NAME}_basic_copy', # the direct variant + ]: + self.add_module_cleanup(name) + with self.subTest(name): + loaded = self.load(name) + reloaded = self.re_load(name, loaded.module) + + self.check_common(loaded) + self.check_common(reloaded) + + # Make sure the original __dict__ did not get replaced. + self.assertEqual(id(loaded.module.__dict__), + loaded.snapshot.ns_id) + self.assertEqual(loaded.snapshot.ns.__dict__, + loaded.module.__dict__) + + self.assertEqual(reloaded.module.__spec__.name, reloaded.name) + self.assertEqual(reloaded.module.__name__, + reloaded.snapshot.ns.__name__) + + self.assertIs(reloaded.module, loaded.module) + self.assertIs(reloaded.module.__dict__, loaded.module.__dict__) + # It only happens to be the same but that's good enough here. + # We really just want to verify that the re-loaded attrs + # didn't change. + self.assertIs(reloaded.snapshot.lookedup, + loaded.snapshot.lookedup) + self.assertEqual(reloaded.snapshot.state_initialized, + loaded.snapshot.state_initialized) + self.assertEqual(reloaded.snapshot.init_count, + loaded.snapshot.init_count) + + self.assertIs(reloaded.snapshot.cached, reloaded.module) + + def test_with_reinit_reloaded(self): + # The module's m_init func is run again. + self.maxDiff = None + + # Keep a reference around. + basic = self.load(self.NAME) + + for name in [ + f'{self.NAME}_with_reinit', # m_size == 0 + f'{self.NAME}_with_state', # m_size > 0 + ]: + self.add_module_cleanup(name) + with self.subTest(name): + loaded = self.load(name) + reloaded = self.re_load(name, loaded.module) + + self.check_common(loaded) + self.check_common(reloaded) + + # Make sure the original __dict__ did not get replaced. + self.assertEqual(id(loaded.module.__dict__), + loaded.snapshot.ns_id) + self.assertEqual(loaded.snapshot.ns.__dict__, + loaded.module.__dict__) + + self.assertEqual(reloaded.module.__spec__.name, reloaded.name) + self.assertEqual(reloaded.module.__name__, + reloaded.snapshot.ns.__name__) + + self.assertIsNot(reloaded.module, loaded.module) + self.assertNotEqual(reloaded.module.__dict__, + loaded.module.__dict__) + self.assertIs(reloaded.snapshot.lookedup, reloaded.module) + if loaded.snapshot.state_initialized is None: + self.assertIs(reloaded.snapshot.state_initialized, None) + else: + self.assertGreater(reloaded.snapshot.state_initialized, + loaded.snapshot.state_initialized) + + self.assertIs(reloaded.snapshot.cached, reloaded.module) + + # Currently, for every single-phrase init module loaded + # in multiple interpreters, those interpreters share a + # PyModuleDef for that object, which can be a problem. + # Also, we test with a single-phase module that has global state, + # which is shared by all interpreters. + + @requires_subinterpreters + def test_basic_multiple_interpreters_main_no_reset(self): + # without resetting; already loaded in main interpreter + + # At this point: + # * alive in 0 interpreters + # * module def may or may not be loaded already + # * module def not in _PyRuntime.imports.extensions + # * mod init func has not run yet (since reset, at least) + # * m_copy not set (hasn't been loaded yet or already cleared) + # * module's global state has not been initialized yet + # (or already cleared) + + main_loaded = self.load(self.NAME) + _testsinglephase = main_loaded.module + # Attrs set after loading are not in m_copy. + _testsinglephase.spam = 'spam, spam, spam, spam, eggs, and spam' + + self.check_common(main_loaded) + self.check_fresh(main_loaded) + + interpid1 = self.add_subinterpreter() + interpid2 = self.add_subinterpreter() + + # At this point: + # * alive in 1 interpreter (main) + # * module def in _PyRuntime.imports.extensions + # * mod init func ran for the first time (since reset, at least) + # * m_copy was copied from the main interpreter (was NULL) + # * module's global state was initialized + + # Use an interpreter that gets destroyed right away. + loaded = self.import_in_subinterp() + self.check_common(loaded) + self.check_copied(loaded, main_loaded) + + # At this point: + # * alive in 1 interpreter (main) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy is NULL (claered when the interpreter was destroyed) + # (was from main interpreter) + # * module's global state was updated, not reset + + # Use a subinterpreter that sticks around. + loaded = self.import_in_subinterp(interpid1) + self.check_common(loaded) + self.check_copied(loaded, main_loaded) + + # At this point: + # * alive in 2 interpreters (main, interp1) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp1 + # * module's global state was updated, not reset + + # Use a subinterpreter while the previous one is still alive. + loaded = self.import_in_subinterp(interpid2) + self.check_common(loaded) + self.check_copied(loaded, main_loaded) + + # At this point: + # * alive in 3 interpreters (main, interp1, interp2) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp2 (was from interp1) + # * module's global state was updated, not reset + + @requires_subinterpreters + def test_basic_multiple_interpreters_deleted_no_reset(self): + # without resetting; already loaded in a deleted interpreter + + # At this point: + # * alive in 0 interpreters + # * module def may or may not be loaded already + # * module def not in _PyRuntime.imports.extensions + # * mod init func has not run yet (since reset, at least) + # * m_copy not set (hasn't been loaded yet or already cleared) + # * module's global state has not been initialized yet + # (or already cleared) + + interpid1 = self.add_subinterpreter() + interpid2 = self.add_subinterpreter() + + # First, load in the main interpreter but then completely clear it. + loaded_main = self.load(self.NAME) + loaded_main.module._clear_globals() + _testinternalcapi.clear_extension(self.NAME, self.FILE) + + # At this point: + # * alive in 0 interpreters + # * module def loaded already + # * module def was in _PyRuntime.imports.extensions, but cleared + # * mod init func ran for the first time (since reset, at least) + # * m_copy was set, but cleared (was NULL) + # * module's global state was initialized but cleared + + # Start with an interpreter that gets destroyed right away. + base = self.import_in_subinterp(postscript=''' + # Attrs set after loading are not in m_copy. + mod.spam = 'spam, spam, mash, spam, eggs, and spam' + ''') + self.check_common(base) + self.check_fresh(base) + + # At this point: + # * alive in 0 interpreters + # * module def in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy is NULL (claered when the interpreter was destroyed) + # * module's global state was initialized, not reset + + # Use a subinterpreter that sticks around. + loaded_interp1 = self.import_in_subinterp(interpid1) + self.check_common(loaded_interp1) + self.check_semi_fresh(loaded_interp1, loaded_main, base) + + # At this point: + # * alive in 1 interpreter (interp1) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp1 (was NULL) + # * module's global state was updated, not reset + + # Use a subinterpreter while the previous one is still alive. + loaded_interp2 = self.import_in_subinterp(interpid2) + self.check_common(loaded_interp2) + self.check_copied(loaded_interp2, loaded_interp1) + + # At this point: + # * alive in 2 interpreters (interp1, interp2) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp2 (was from interp1) + # * module's global state was updated, not reset + + @requires_subinterpreters + @requires_load_dynamic + def test_basic_multiple_interpreters_reset_each(self): + # resetting between each interpreter + + # At this point: + # * alive in 0 interpreters + # * module def may or may not be loaded already + # * module def not in _PyRuntime.imports.extensions + # * mod init func has not run yet (since reset, at least) + # * m_copy not set (hasn't been loaded yet or already cleared) + # * module's global state has not been initialized yet + # (or already cleared) + + interpid1 = self.add_subinterpreter() + interpid2 = self.add_subinterpreter() + + # Use an interpreter that gets destroyed right away. + loaded = self.import_in_subinterp( + postscript=''' + # Attrs set after loading are not in m_copy. + mod.spam = 'spam, spam, mash, spam, eggs, and spam' + ''', + postcleanup=True, + ) + self.check_common(loaded) + self.check_fresh(loaded) + + # At this point: + # * alive in 0 interpreters + # * module def in _PyRuntime.imports.extensions + # * mod init func ran for the first time (since reset, at least) + # * m_copy is NULL (claered when the interpreter was destroyed) + # * module's global state was initialized, not reset + + # Use a subinterpreter that sticks around. + loaded = self.import_in_subinterp(interpid1, postcleanup=True) + self.check_common(loaded) + self.check_fresh(loaded) + + # At this point: + # * alive in 1 interpreter (interp1) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp1 (was NULL) + # * module's global state was initialized, not reset + + # Use a subinterpreter while the previous one is still alive. + loaded = self.import_in_subinterp(interpid2, postcleanup=True) + self.check_common(loaded) + self.check_fresh(loaded) + + # At this point: + # * alive in 2 interpreters (interp2, interp2) + # * module def still in _PyRuntime.imports.extensions + # * mod init func ran again + # * m_copy was copied from interp2 (was from interp1) + # * module's global state was initialized, not reset + + class ReloadTests(unittest.TestCase): """Very basic tests to make sure that imp.reload() operates just like diff --git a/Modules/_testsinglephase.c b/Modules/_testsinglephase.c index 565221c887e5ae..a16157702ae789 100644 --- a/Modules/_testsinglephase.c +++ b/Modules/_testsinglephase.c @@ -140,7 +140,7 @@ init_module(PyObject *module, module_state *state) if (initialized == NULL) { return -1; } - if (PyModule_AddObjectRef(module, "_initialized", initialized) != 0) { + if (PyModule_AddObjectRef(module, "_module_initialized", initialized) != 0) { return -1; } @@ -148,13 +148,13 @@ init_module(PyObject *module, module_state *state) } -PyDoc_STRVAR(common_initialized_doc, -"initialized()\n\ +PyDoc_STRVAR(common_state_initialized_doc, +"state_initialized()\n\ \n\ -Return the seconds-since-epoch when the module was initialized."); +Return the seconds-since-epoch when the module state was initialized."); static PyObject * -common_initialized(PyObject *self, PyObject *Py_UNUSED(ignored)) +common_state_initialized(PyObject *self, PyObject *Py_UNUSED(ignored)) { module_state *state = get_module_state(self); if (state == NULL) { @@ -164,9 +164,9 @@ common_initialized(PyObject *self, PyObject *Py_UNUSED(ignored)) return PyFloat_FromDouble(d); } -#define INITIALIZED_METHODDEF \ - {"initialized", common_initialized, METH_NOARGS, \ - common_initialized_doc} +#define STATE_INITIALIZED_METHODDEF \ + {"state_initialized", common_state_initialized, METH_NOARGS, \ + common_state_initialized_doc} PyDoc_STRVAR(common_look_up_self_doc, @@ -265,7 +265,7 @@ basic__clear_globals(PyObject *self, PyObject *Py_UNUSED(ignored)) static PyMethodDef TestMethods_Basic[] = { LOOK_UP_SELF_METHODDEF, SUM_METHODDEF, - INITIALIZED_METHODDEF, + STATE_INITIALIZED_METHODDEF, INITIALIZED_COUNT_METHODDEF, _CLEAR_GLOBALS_METHODDEF, {NULL, NULL} /* sentinel */ @@ -360,7 +360,7 @@ PyInit__testsinglephase_basic_copy(void) static PyMethodDef TestMethods_Reinit[] = { LOOK_UP_SELF_METHODDEF, SUM_METHODDEF, - INITIALIZED_METHODDEF, + STATE_INITIALIZED_METHODDEF, {NULL, NULL} /* sentinel */ }; @@ -421,7 +421,7 @@ PyInit__testsinglephase_with_reinit(void) static PyMethodDef TestMethods_WithState[] = { LOOK_UP_SELF_METHODDEF, SUM_METHODDEF, - INITIALIZED_METHODDEF, + STATE_INITIALIZED_METHODDEF, {NULL, NULL} /* sentinel */ }; diff --git a/Python/import.c b/Python/import.c index fabf03b1c5d698..57d4eea148810f 100644 --- a/Python/import.c +++ b/Python/import.c @@ -465,7 +465,7 @@ _modules_by_index_set(PyInterpreterState *interp, } static int -_modules_by_index_clear(PyInterpreterState *interp, PyModuleDef *def) +_modules_by_index_clear_one(PyInterpreterState *interp, PyModuleDef *def) { Py_ssize_t index = def->m_base.m_index; const char *err = _modules_by_index_check(interp, index); @@ -546,7 +546,7 @@ PyState_RemoveModule(PyModuleDef* def) "PyState_RemoveModule called on module with slots"); return -1; } - return _modules_by_index_clear(tstate->interp, def); + return _modules_by_index_clear_one(tstate->interp, def); } @@ -584,6 +584,109 @@ _PyImport_ClearModulesByIndex(PyInterpreterState *interp) /* extension modules */ /*********************/ +/* + It may help to have a big picture view of what happens + when an extension is loaded. This includes when it is imported + for the first time or via imp.load_dynamic(). + + Here's a summary, using imp.load_dynamic() as the starting point: + + 1. imp.load_dynamic() -> importlib._bootstrap._load() + 2. _load(): acquire import lock + 3. _load() -> importlib._bootstrap._load_unlocked() + 4. _load_unlocked() -> importlib._bootstrap.module_from_spec() + 5. module_from_spec() -> ExtensionFileLoader.create_module() + 6. create_module() -> _imp.create_dynamic() + (see below) + 7. module_from_spec() -> importlib._bootstrap._init_module_attrs() + 8. _load_unlocked(): sys.modules[name] = module + 9. _load_unlocked() -> ExtensionFileLoader.exec_module() + 10. exec_module() -> _imp.exec_dynamic() + (see below) + 11. _load(): release import lock + + + ...for single-phase init modules, where m_size == -1: + + (6). first time (not found in _PyRuntime.imports.extensions): + 1. _imp_create_dynamic_impl() -> import_find_extension() + 2. _imp_create_dynamic_impl() -> _PyImport_LoadDynamicModuleWithSpec() + 3. _PyImport_LoadDynamicModuleWithSpec(): load + 4. _PyImport_LoadDynamicModuleWithSpec(): call + 5. -> PyModule_Create() -> PyModule_Create2() -> PyModule_CreateInitialized() + 6. PyModule_CreateInitialized() -> PyModule_New() + 7. PyModule_CreateInitialized(): allocate mod->md_state + 8. PyModule_CreateInitialized() -> PyModule_AddFunctions() + 9. PyModule_CreateInitialized() -> PyModule_SetDocString() + 10. PyModule_CreateInitialized(): set mod->md_def + 11. : initialize the module + 12. _PyImport_LoadDynamicModuleWithSpec() -> _PyImport_CheckSubinterpIncompatibleExtensionAllowed() + 13. _PyImport_LoadDynamicModuleWithSpec(): set def->m_base.m_init + 14. _PyImport_LoadDynamicModuleWithSpec(): set __file__ + 15. _PyImport_LoadDynamicModuleWithSpec() -> _PyImport_FixupExtensionObject() + 16. _PyImport_FixupExtensionObject(): add it to interp->imports.modules_by_index + 17. _PyImport_FixupExtensionObject(): copy __dict__ into def->m_base.m_copy + 18. _PyImport_FixupExtensionObject(): add it to _PyRuntime.imports.extensions + + (6). subsequent times (found in _PyRuntime.imports.extensions): + 1. _imp_create_dynamic_impl() -> import_find_extension() + 2. import_find_extension() -> import_add_module() + 3. if name in sys.modules: use that module + 4. else: + 1. import_add_module() -> PyModule_NewObject() + 2. import_add_module(): set it on sys.modules + 5. import_find_extension(): copy the "m_copy" dict into __dict__ + 6. _imp_create_dynamic_impl() -> _PyImport_CheckSubinterpIncompatibleExtensionAllowed() + + (10). (every time): + 1. noop + + + ...for single-phase init modules, where m_size >= 0: + + (6). not main interpreter and never loaded there - every time (not found in _PyRuntime.imports.extensions): + 1-16. (same as for m_size == -1) + + (6). main interpreter - first time (not found in _PyRuntime.imports.extensions): + 1-16. (same as for m_size == -1) + 17. _PyImport_FixupExtensionObject(): add it to _PyRuntime.imports.extensions + + (6). previously loaded in main interpreter (found in _PyRuntime.imports.extensions): + 1. _imp_create_dynamic_impl() -> import_find_extension() + 2. import_find_extension(): call def->m_base.m_init + 3. import_find_extension(): add the module to sys.modules + + (10). every time: + 1. noop + + + ...for multi-phase init modules: + + (6). every time: + 1. _imp_create_dynamic_impl() -> import_find_extension() (not found) + 2. _imp_create_dynamic_impl() -> _PyImport_LoadDynamicModuleWithSpec() + 3. _PyImport_LoadDynamicModuleWithSpec(): load module init func + 4. _PyImport_LoadDynamicModuleWithSpec(): call module init func + 5. _PyImport_LoadDynamicModuleWithSpec() -> PyModule_FromDefAndSpec() + 6. PyModule_FromDefAndSpec(): gather/check moduledef slots + 7. if there's a Py_mod_create slot: + 1. PyModule_FromDefAndSpec(): call its function + 8. else: + 1. PyModule_FromDefAndSpec() -> PyModule_NewObject() + 9: PyModule_FromDefAndSpec(): set mod->md_def + 10. PyModule_FromDefAndSpec() -> _add_methods_to_object() + 11. PyModule_FromDefAndSpec() -> PyModule_SetDocString() + + (10). every time: + 1. _imp_exec_dynamic_impl() -> exec_builtin_or_dynamic() + 2. if mod->md_state == NULL (including if m_size == 0): + 1. exec_builtin_or_dynamic() -> PyModule_ExecDef() + 2. PyModule_ExecDef(): allocate mod->md_state + 3. if there's a Py_mod_exec slot: + 1. PyModule_ExecDef(): call its function + */ + + /* Make sure name is fully qualified. This is a bit of a hack: when the shared library is loaded, @@ -1007,13 +1110,17 @@ clear_singlephase_extension(PyInterpreterState *interp, /* Clear the PyState_*Module() cache entry. */ if (_modules_by_index_check(interp, def->m_base.m_index) == NULL) { - if (_modules_by_index_clear(interp, def) < 0) { + if (_modules_by_index_clear_one(interp, def) < 0) { return -1; } } /* Clear the cached module def. */ - return _extensions_cache_delete(filename, name); + if (_extensions_cache_delete(filename, name) < 0) { + return -1; + } + + return 0; } From 4f3786b7616dd464242b88ad6914053d409fe9d2 Mon Sep 17 00:00:00 2001 From: Sergey B Kirpichev Date: Mon, 27 Feb 2023 21:53:22 +0300 Subject: [PATCH 07/40] gh-101773: Optimize creation of Fractions in private methods (#101780) This PR adds a private `Fraction._from_coprime_ints` classmethod for internal creations of `Fraction` objects, replacing the use of `_normalize=False` in the existing constructor. This speeds up creation of `Fraction` objects arising from calculations. The `_normalize` argument to the `Fraction` constructor has been removed. Co-authored-by: Pieter Eendebak Co-authored-by: Mark Dickinson --- Lib/fractions.py | 79 +++++++++++-------- Lib/test/test_fractions.py | 1 + Lib/test/test_numeric_tower.py | 2 +- ...-02-10-11-59-13.gh-issue-101773.J_kI7y.rst | 2 + 4 files changed, 50 insertions(+), 34 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2023-02-10-11-59-13.gh-issue-101773.J_kI7y.rst diff --git a/Lib/fractions.py b/Lib/fractions.py index 49a3f2841a2ed4..f718b35639beee 100644 --- a/Lib/fractions.py +++ b/Lib/fractions.py @@ -183,7 +183,7 @@ class Fraction(numbers.Rational): __slots__ = ('_numerator', '_denominator') # We're immutable, so use __new__ not __init__ - def __new__(cls, numerator=0, denominator=None, *, _normalize=True): + def __new__(cls, numerator=0, denominator=None): """Constructs a Rational. Takes a string like '3/2' or '1.5', another Rational instance, a @@ -279,12 +279,11 @@ def __new__(cls, numerator=0, denominator=None, *, _normalize=True): if denominator == 0: raise ZeroDivisionError('Fraction(%s, 0)' % numerator) - if _normalize: - g = math.gcd(numerator, denominator) - if denominator < 0: - g = -g - numerator //= g - denominator //= g + g = math.gcd(numerator, denominator) + if denominator < 0: + g = -g + numerator //= g + denominator //= g self._numerator = numerator self._denominator = denominator return self @@ -301,7 +300,7 @@ def from_float(cls, f): elif not isinstance(f, float): raise TypeError("%s.from_float() only takes floats, not %r (%s)" % (cls.__name__, f, type(f).__name__)) - return cls(*f.as_integer_ratio()) + return cls._from_coprime_ints(*f.as_integer_ratio()) @classmethod def from_decimal(cls, dec): @@ -313,7 +312,19 @@ def from_decimal(cls, dec): raise TypeError( "%s.from_decimal() only takes Decimals, not %r (%s)" % (cls.__name__, dec, type(dec).__name__)) - return cls(*dec.as_integer_ratio()) + return cls._from_coprime_ints(*dec.as_integer_ratio()) + + @classmethod + def _from_coprime_ints(cls, numerator, denominator, /): + """Convert a pair of ints to a rational number, for internal use. + + The ratio of integers should be in lowest terms and the denominator + should be positive. + """ + obj = super(Fraction, cls).__new__(cls) + obj._numerator = numerator + obj._denominator = denominator + return obj def is_integer(self): """Return True if the Fraction is an integer.""" @@ -380,9 +391,9 @@ def limit_denominator(self, max_denominator=1000000): # the distance from p1/q1 to self is d/(q1*self._denominator). So we # need to compare 2*(q0+k*q1) with self._denominator/d. if 2*d*(q0+k*q1) <= self._denominator: - return Fraction(p1, q1, _normalize=False) + return Fraction._from_coprime_ints(p1, q1) else: - return Fraction(p0+k*p1, q0+k*q1, _normalize=False) + return Fraction._from_coprime_ints(p0+k*p1, q0+k*q1) @property def numerator(a): @@ -703,13 +714,13 @@ def _add(a, b): nb, db = b._numerator, b._denominator g = math.gcd(da, db) if g == 1: - return Fraction(na * db + da * nb, da * db, _normalize=False) + return Fraction._from_coprime_ints(na * db + da * nb, da * db) s = da // g t = na * (db // g) + nb * s g2 = math.gcd(t, g) if g2 == 1: - return Fraction(t, s * db, _normalize=False) - return Fraction(t // g2, s * (db // g2), _normalize=False) + return Fraction._from_coprime_ints(t, s * db) + return Fraction._from_coprime_ints(t // g2, s * (db // g2)) __add__, __radd__ = _operator_fallbacks(_add, operator.add) @@ -719,13 +730,13 @@ def _sub(a, b): nb, db = b._numerator, b._denominator g = math.gcd(da, db) if g == 1: - return Fraction(na * db - da * nb, da * db, _normalize=False) + return Fraction._from_coprime_ints(na * db - da * nb, da * db) s = da // g t = na * (db // g) - nb * s g2 = math.gcd(t, g) if g2 == 1: - return Fraction(t, s * db, _normalize=False) - return Fraction(t // g2, s * (db // g2), _normalize=False) + return Fraction._from_coprime_ints(t, s * db) + return Fraction._from_coprime_ints(t // g2, s * (db // g2)) __sub__, __rsub__ = _operator_fallbacks(_sub, operator.sub) @@ -741,15 +752,17 @@ def _mul(a, b): if g2 > 1: nb //= g2 da //= g2 - return Fraction(na * nb, db * da, _normalize=False) + return Fraction._from_coprime_ints(na * nb, db * da) __mul__, __rmul__ = _operator_fallbacks(_mul, operator.mul) def _div(a, b): """a / b""" # Same as _mul(), with inversed b. - na, da = a._numerator, a._denominator nb, db = b._numerator, b._denominator + if nb == 0: + raise ZeroDivisionError('Fraction(%s, 0)' % db) + na, da = a._numerator, a._denominator g1 = math.gcd(na, nb) if g1 > 1: na //= g1 @@ -761,7 +774,7 @@ def _div(a, b): n, d = na * db, nb * da if d < 0: n, d = -n, -d - return Fraction(n, d, _normalize=False) + return Fraction._from_coprime_ints(n, d) __truediv__, __rtruediv__ = _operator_fallbacks(_div, operator.truediv) @@ -798,17 +811,17 @@ def __pow__(a, b): if b.denominator == 1: power = b.numerator if power >= 0: - return Fraction(a._numerator ** power, - a._denominator ** power, - _normalize=False) - elif a._numerator >= 0: - return Fraction(a._denominator ** -power, - a._numerator ** -power, - _normalize=False) + return Fraction._from_coprime_ints(a._numerator ** power, + a._denominator ** power) + elif a._numerator > 0: + return Fraction._from_coprime_ints(a._denominator ** -power, + a._numerator ** -power) + elif a._numerator == 0: + raise ZeroDivisionError('Fraction(%s, 0)' % + a._denominator ** -power) else: - return Fraction((-a._denominator) ** -power, - (-a._numerator) ** -power, - _normalize=False) + return Fraction._from_coprime_ints((-a._denominator) ** -power, + (-a._numerator) ** -power) else: # A fractional power will generally produce an # irrational number. @@ -832,15 +845,15 @@ def __rpow__(b, a): def __pos__(a): """+a: Coerces a subclass instance to Fraction""" - return Fraction(a._numerator, a._denominator, _normalize=False) + return Fraction._from_coprime_ints(a._numerator, a._denominator) def __neg__(a): """-a""" - return Fraction(-a._numerator, a._denominator, _normalize=False) + return Fraction._from_coprime_ints(-a._numerator, a._denominator) def __abs__(a): """abs(a)""" - return Fraction(abs(a._numerator), a._denominator, _normalize=False) + return Fraction._from_coprime_ints(abs(a._numerator), a._denominator) def __int__(a, _index=operator.index): """int(a)""" diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py index 3bc6b409e05dc3..e112f49d2e7944 100644 --- a/Lib/test/test_fractions.py +++ b/Lib/test/test_fractions.py @@ -488,6 +488,7 @@ def testArithmetic(self): self.assertEqual(F(5, 6), F(2, 3) * F(5, 4)) self.assertEqual(F(1, 4), F(1, 10) / F(2, 5)) self.assertEqual(F(-15, 8), F(3, 4) / F(-2, 5)) + self.assertRaises(ZeroDivisionError, operator.truediv, F(1), F(0)) self.assertTypedEquals(2, F(9, 10) // F(2, 5)) self.assertTypedEquals(10**23, F(10**23, 1) // F(1)) self.assertEqual(F(5, 6), F(7, 3) % F(3, 2)) diff --git a/Lib/test/test_numeric_tower.py b/Lib/test/test_numeric_tower.py index 9cd85e13634c2b..337682d6bac96c 100644 --- a/Lib/test/test_numeric_tower.py +++ b/Lib/test/test_numeric_tower.py @@ -145,7 +145,7 @@ def test_fractions(self): # The numbers ABC doesn't enforce that the "true" division # of integers produces a float. This tests that the # Rational.__float__() method has required type conversions. - x = F(DummyIntegral(1), DummyIntegral(2), _normalize=False) + x = F._from_coprime_ints(DummyIntegral(1), DummyIntegral(2)) self.assertRaises(TypeError, lambda: x.numerator/x.denominator) self.assertEqual(float(x), 0.5) diff --git a/Misc/NEWS.d/next/Library/2023-02-10-11-59-13.gh-issue-101773.J_kI7y.rst b/Misc/NEWS.d/next/Library/2023-02-10-11-59-13.gh-issue-101773.J_kI7y.rst new file mode 100644 index 00000000000000..b577d93d28c2ae --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-10-11-59-13.gh-issue-101773.J_kI7y.rst @@ -0,0 +1,2 @@ +Optimize :class:`fractions.Fraction` for small components. The private argument +``_normalize`` of the :class:`fractions.Fraction` constructor has been removed. From 4624987b296108c2dc1e6e3a24e65d2de7afd451 Mon Sep 17 00:00:00 2001 From: Sergey B Kirpichev Date: Mon, 27 Feb 2023 22:11:28 +0300 Subject: [PATCH 08/40] gh-101825: Clarify that as_integer_ratio() output is always normalized (#101843) Make docstrings for `as_integer_ratio` consistent across types, and document that the returned pair is always normalized (coprime integers, with positive denominator). --------- Co-authored-by: Owain Davies <116417456+OTheDev@users.noreply.github.com> Co-authored-by: Mark Dickinson --- Doc/library/fractions.rst | 3 ++- Doc/library/stdtypes.rst | 6 +++--- Lib/fractions.py | 5 ++--- Objects/clinic/floatobject.c.h | 10 ++++------ Objects/clinic/longobject.c.h | 7 +++---- Objects/floatobject.c | 10 ++++------ Objects/longobject.c | 7 +++---- 7 files changed, 21 insertions(+), 27 deletions(-) diff --git a/Doc/library/fractions.rst b/Doc/library/fractions.rst index c61bbac892e0e4..fe2e8ab655edf8 100644 --- a/Doc/library/fractions.rst +++ b/Doc/library/fractions.rst @@ -118,7 +118,8 @@ another rational number, or from a string. .. method:: as_integer_ratio() Return a tuple of two integers, whose ratio is equal - to the Fraction and with a positive denominator. + to the original Fraction. The ratio is in lowest terms + and has a positive denominator. .. versionadded:: 3.8 diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 41947d66c15134..1240f80b0f11f0 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -602,8 +602,8 @@ class`. In addition, it provides a few more methods: .. method:: int.as_integer_ratio() - Return a pair of integers whose ratio is exactly equal to the original - integer and with a positive denominator. The integer ratio of integers + Return a pair of integers whose ratio is equal to the original + integer and has a positive denominator. The integer ratio of integers (whole numbers) is always the integer as the numerator and ``1`` as the denominator. @@ -624,7 +624,7 @@ class`. float also has the following additional methods. .. method:: float.as_integer_ratio() Return a pair of integers whose ratio is exactly equal to the - original float and with a positive denominator. Raises + original float. The ratio is in lowest terms and has a positive denominator. Raises :exc:`OverflowError` on infinities and a :exc:`ValueError` on NaNs. diff --git a/Lib/fractions.py b/Lib/fractions.py index f718b35639beee..c95db0730e5b6d 100644 --- a/Lib/fractions.py +++ b/Lib/fractions.py @@ -331,10 +331,9 @@ def is_integer(self): return self._denominator == 1 def as_integer_ratio(self): - """Return the integer ratio as a tuple. + """Return a pair of integers, whose ratio is equal to the original Fraction. - Return a tuple of two integers, whose ratio is equal to the - Fraction and with a positive denominator. + The ratio is in lowest terms and has a positive denominator. """ return (self._numerator, self._denominator) diff --git a/Objects/clinic/floatobject.c.h b/Objects/clinic/floatobject.c.h index 6bc25a0a409f97..a99fd74e4b621b 100644 --- a/Objects/clinic/floatobject.c.h +++ b/Objects/clinic/floatobject.c.h @@ -173,12 +173,10 @@ PyDoc_STRVAR(float_as_integer_ratio__doc__, "as_integer_ratio($self, /)\n" "--\n" "\n" -"Return integer ratio.\n" +"Return a pair of integers, whose ratio is exactly equal to the original float.\n" "\n" -"Return a pair of integers, whose ratio is exactly equal to the original float\n" -"and with a positive denominator.\n" -"\n" -"Raise OverflowError on infinities and a ValueError on NaNs.\n" +"The ratio is in lowest terms and has a positive denominator. Raise\n" +"OverflowError on infinities and a ValueError on NaNs.\n" "\n" ">>> (10.0).as_integer_ratio()\n" "(10, 1)\n" @@ -327,4 +325,4 @@ float___format__(PyObject *self, PyObject *arg) exit: return return_value; } -/*[clinic end generated code: output=74bc91bb44014df9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ea329577074911b9 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/longobject.c.h b/Objects/clinic/longobject.c.h index 206bffdd086a5c..c26ceafbc2be0d 100644 --- a/Objects/clinic/longobject.c.h +++ b/Objects/clinic/longobject.c.h @@ -231,10 +231,9 @@ PyDoc_STRVAR(int_as_integer_ratio__doc__, "as_integer_ratio($self, /)\n" "--\n" "\n" -"Return integer ratio.\n" +"Return a pair of integers, whose ratio is equal to the original int.\n" "\n" -"Return a pair of integers, whose ratio is exactly equal to the original int\n" -"and with a positive denominator.\n" +"The ratio is in lowest terms and has a positive denominator.\n" "\n" ">>> (10).as_integer_ratio()\n" "(10, 1)\n" @@ -485,4 +484,4 @@ int_is_integer(PyObject *self, PyObject *Py_UNUSED(ignored)) { return int_is_integer_impl(self); } -/*[clinic end generated code: output=e518fe2b5d519322 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=cfdf35d916158d4f input=a9049054013a1b77]*/ diff --git a/Objects/floatobject.c b/Objects/floatobject.c index 912b742f797d24..d641311f1126cd 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -1546,12 +1546,10 @@ float_fromhex(PyTypeObject *type, PyObject *string) /*[clinic input] float.as_integer_ratio -Return integer ratio. +Return a pair of integers, whose ratio is exactly equal to the original float. -Return a pair of integers, whose ratio is exactly equal to the original float -and with a positive denominator. - -Raise OverflowError on infinities and a ValueError on NaNs. +The ratio is in lowest terms and has a positive denominator. Raise +OverflowError on infinities and a ValueError on NaNs. >>> (10.0).as_integer_ratio() (10, 1) @@ -1563,7 +1561,7 @@ Raise OverflowError on infinities and a ValueError on NaNs. static PyObject * float_as_integer_ratio_impl(PyObject *self) -/*[clinic end generated code: output=65f25f0d8d30a712 input=e21d08b4630c2e44]*/ +/*[clinic end generated code: output=65f25f0d8d30a712 input=d5ba7765655d75bd]*/ { double self_double; double float_part; diff --git a/Objects/longobject.c b/Objects/longobject.c index 8293f133bed213..51655cd0bad9ec 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -6020,10 +6020,9 @@ int_bit_count_impl(PyObject *self) /*[clinic input] int.as_integer_ratio -Return integer ratio. +Return a pair of integers, whose ratio is equal to the original int. -Return a pair of integers, whose ratio is exactly equal to the original int -and with a positive denominator. +The ratio is in lowest terms and has a positive denominator. >>> (10).as_integer_ratio() (10, 1) @@ -6035,7 +6034,7 @@ and with a positive denominator. static PyObject * int_as_integer_ratio_impl(PyObject *self) -/*[clinic end generated code: output=e60803ae1cc8621a input=55ce3058e15de393]*/ +/*[clinic end generated code: output=e60803ae1cc8621a input=384ff1766634bec2]*/ { PyObject *ratio_tuple; PyObject *numerator = long_long(self); From 0f89acf6cc4d4790f7b7a82165d0a6e7e84e4b72 Mon Sep 17 00:00:00 2001 From: Steven Troxler Date: Mon, 27 Feb 2023 13:16:11 -0800 Subject: [PATCH 09/40] gh-101561: Add typing.override decorator (#101564) Co-authored-by: Jelle Zijlstra Co-authored-by: Alex Waygood --- Doc/library/typing.rst | 38 +++++++++++++++++ Doc/whatsnew/3.12.rst | 8 ++++ Lib/test/test_typing.py | 38 +++++++++++++++++ Lib/typing.py | 41 +++++++++++++++++++ Misc/ACKS | 1 + ...-02-04-16-35-46.gh-issue-101561.Xo6pIZ.rst | 1 + 6 files changed, 127 insertions(+) create mode 100644 Misc/NEWS.d/next/Library/2023-02-04-16-35-46.gh-issue-101561.Xo6pIZ.rst diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index bbbf6920ddec88..3395e4bfb95c44 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -91,6 +91,8 @@ annotations. These include: *Introducing* :data:`LiteralString` * :pep:`681`: Data Class Transforms *Introducing* the :func:`@dataclass_transform` decorator +* :pep:`698`: Adding an override decorator to typing + *Introducing* the :func:`@override` decorator .. _type-aliases: @@ -2722,6 +2724,42 @@ Functions and decorators This wraps the decorator with something that wraps the decorated function in :func:`no_type_check`. + +.. decorator:: override + + A decorator for methods that indicates to type checkers that this method + should override a method or attribute with the same name on a base class. + This helps prevent bugs that may occur when a base class is changed without + an equivalent change to a child class. + + For example:: + + class Base: + def log_status(self) + + class Sub(Base): + @override + def log_status(self) -> None: # Okay: overrides Base.log_status + ... + + @override + def done(self) -> None: # Error reported by type checker + ... + + There is no runtime checking of this property. + + The decorator will set the ``__override__`` attribute to ``True`` on + the decorated object. Thus, a check like + ``if getattr(obj, "__override__", False)`` can be used at runtime to determine + whether an object ``obj`` has been marked as an override. If the decorated object + does not support setting attributes, the decorator returns the object unchanged + without raising an exception. + + See :pep:`698` for more details. + + .. versionadded:: 3.12 + + .. decorator:: type_check_only Decorator to mark a class or function to be unavailable at runtime. diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index e551c5b4fd06a9..1a25ec6b70613b 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -350,6 +350,14 @@ tempfile The :class:`tempfile.NamedTemporaryFile` function has a new optional parameter *delete_on_close* (Contributed by Evgeny Zorin in :gh:`58451`.) +typing +------ + +* Add :func:`typing.override`, an override decorator telling to static type + checkers to verify that a method overrides some method or attribute of the + same name on a base class, as per :pep:`698`. (Contributed by Steven Troxler in + :gh:`101564`.) + sys --- diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 7a460d94469fe7..d61dc6e2fbd70b 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -23,6 +23,7 @@ from typing import assert_type, cast, runtime_checkable from typing import get_type_hints from typing import get_origin, get_args +from typing import override from typing import is_typeddict from typing import reveal_type from typing import dataclass_transform @@ -4166,6 +4167,43 @@ def cached(self): ... self.assertIs(True, Methods.cached.__final__) +class OverrideDecoratorTests(BaseTestCase): + def test_override(self): + class Base: + def normal_method(self): ... + @staticmethod + def static_method_good_order(): ... + @staticmethod + def static_method_bad_order(): ... + @staticmethod + def decorator_with_slots(): ... + + class Derived(Base): + @override + def normal_method(self): + return 42 + + @staticmethod + @override + def static_method_good_order(): + return 42 + + @override + @staticmethod + def static_method_bad_order(): + return 42 + + + self.assertIsSubclass(Derived, Base) + instance = Derived() + self.assertEqual(instance.normal_method(), 42) + self.assertIs(True, instance.normal_method.__override__) + self.assertEqual(Derived.static_method_good_order(), 42) + self.assertIs(True, Derived.static_method_good_order.__override__) + self.assertEqual(Derived.static_method_bad_order(), 42) + self.assertIs(False, hasattr(Derived.static_method_bad_order, "__override__")) + + class CastTests(BaseTestCase): def test_basics(self): diff --git a/Lib/typing.py b/Lib/typing.py index bdf51bb5f41595..8d40e923bb1d08 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -138,6 +138,7 @@ def _idfunc(_, x): 'NoReturn', 'NotRequired', 'overload', + 'override', 'ParamSpecArgs', 'ParamSpecKwargs', 'Required', @@ -2657,6 +2658,7 @@ class Other(Leaf): # Error reported by type checker # Internal type variable used for Type[]. CT_co = TypeVar('CT_co', covariant=True, bound=type) + # A useful type variable with constraints. This represents string types. # (This one *is* for export!) AnyStr = TypeVar('AnyStr', bytes, str) @@ -2748,6 +2750,8 @@ def new_user(user_class: Type[U]) -> U: At this point the type checker knows that joe has type BasicUser. """ +# Internal type variable for callables. Not for export. +F = TypeVar("F", bound=Callable[..., Any]) @runtime_checkable class SupportsInt(Protocol): @@ -3448,3 +3452,40 @@ def decorator(cls_or_fn): } return cls_or_fn return decorator + + + +def override(method: F, /) -> F: + """Indicate that a method is intended to override a method in a base class. + + Usage: + + class Base: + def method(self) -> None: ... + pass + + class Child(Base): + @override + def method(self) -> None: + super().method() + + When this decorator is applied to a method, the type checker will + validate that it overrides a method or attribute with the same name on a + base class. This helps prevent bugs that may occur when a base class is + changed without an equivalent change to a child class. + + There is no runtime checking of this property. The decorator sets the + ``__override__`` attribute to ``True`` on the decorated object to allow + runtime introspection. + + See PEP 698 for details. + + """ + try: + method.__override__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return method diff --git a/Misc/ACKS b/Misc/ACKS index 3403aee4cc78ff..2da3d0ab29b81d 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1848,6 +1848,7 @@ Tom Tromey John Tromp Diane Trout Jason Trowbridge +Steven Troxler Brent Tubbs Anthony Tuininga Erno Tukia diff --git a/Misc/NEWS.d/next/Library/2023-02-04-16-35-46.gh-issue-101561.Xo6pIZ.rst b/Misc/NEWS.d/next/Library/2023-02-04-16-35-46.gh-issue-101561.Xo6pIZ.rst new file mode 100644 index 00000000000000..2f6a4153062e5a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-04-16-35-46.gh-issue-101561.Xo6pIZ.rst @@ -0,0 +1 @@ +Add a new decorator :func:`typing.override`. See :pep:`698` for details. Patch by Steven Troxler. From c41af812c948ec3cb07b2ef7a46a238f5cab3dc2 Mon Sep 17 00:00:00 2001 From: JosephSBoyle <48555120+JosephSBoyle@users.noreply.github.com> Date: Tue, 28 Feb 2023 06:11:52 +0000 Subject: [PATCH 10/40] IDLE: Simplify DynOptionsMenu __init__code (#101371) Refactor DynOptionMenu's initializer to not copy kwargs dict and use subscripting; improve its htest. Co-authored-by: Terry Jan Reedy --- Lib/idlelib/dynoption.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/Lib/idlelib/dynoption.py b/Lib/idlelib/dynoption.py index 9c6ffa435a1089..d5dfc3eda13f60 100644 --- a/Lib/idlelib/dynoption.py +++ b/Lib/idlelib/dynoption.py @@ -2,24 +2,19 @@ OptionMenu widget modified to allow dynamic menu reconfiguration and setting of highlightthickness """ -import copy - from tkinter import OptionMenu, _setit, StringVar, Button class DynOptionMenu(OptionMenu): - """ - unlike OptionMenu, our kwargs can include highlightthickness + """Add SetMenu and highlightthickness to OptionMenu. + + Highlightthickness adds space around menu button. """ def __init__(self, master, variable, value, *values, **kwargs): - # TODO copy value instead of whole dict - kwargsCopy=copy.copy(kwargs) - if 'highlightthickness' in list(kwargs.keys()): - del(kwargs['highlightthickness']) + highlightthickness = kwargs.pop('highlightthickness', None) OptionMenu.__init__(self, master, variable, value, *values, **kwargs) - self.config(highlightthickness=kwargsCopy.get('highlightthickness')) - #self.menu=self['menu'] - self.variable=variable - self.command=kwargs.get('command') + self['highlightthickness'] = highlightthickness + self.variable = variable + self.command = kwargs.get('command') def SetMenu(self,valueList,value=None): """ @@ -38,14 +33,15 @@ def _dyn_option_menu(parent): # htest # from tkinter import Toplevel # + StringVar, Button top = Toplevel(parent) - top.title("Tets dynamic option menu") + top.title("Test dynamic option menu") x, y = map(int, parent.geometry().split('+')[1:]) top.geometry("200x100+%d+%d" % (x + 250, y + 175)) top.focus_set() var = StringVar(top) var.set("Old option set") #Set the default value - dyn = DynOptionMenu(top,var, "old1","old2","old3","old4") + dyn = DynOptionMenu(top, var, "old1","old2","old3","old4", + highlightthickness=5) dyn.pack() def update(): @@ -54,5 +50,6 @@ def update(): button.pack() if __name__ == '__main__': + # Only module without unittests because of intention to replace. from idlelib.idle_test.htest import run run(_dyn_option_menu) From 6b2d7c0ddb4d2afe298ee7c8f6a23c400f1465fd Mon Sep 17 00:00:00 2001 From: Petr Viktorin Date: Tue, 28 Feb 2023 09:31:01 +0100 Subject: [PATCH 11/40] gh-101101: Unstable C API tier (PEP 689) (GH-101102) --- Doc/c-api/code.rst | 98 ++++++++++++++- Doc/c-api/stable.rst | 36 +++++- Doc/tools/extensions/c_annotations.py | 16 +++ Doc/whatsnew/3.12.rst | 23 ++++ Include/README.rst | 6 +- Include/cpython/ceval.h | 7 +- Include/cpython/code.h | 47 +++++-- Include/pyport.h | 9 ++ Lib/test/test_code.py | 6 +- ...2-04-21-17-25-22.gh-issue-91744.FgvaMi.rst | 3 + Modules/Setup.stdlib.in | 2 +- Modules/_testcapi/code.c | 115 ++++++++++++++++++ Modules/_testcapi/parts.h | 1 + Modules/_testcapimodule.c | 3 + Objects/codeobject.c | 9 +- PCbuild/_testcapi.vcxproj | 1 + PCbuild/_testcapi.vcxproj.filters | 3 + Python/ceval.c | 2 +- 18 files changed, 358 insertions(+), 29 deletions(-) create mode 100644 Misc/NEWS.d/next/C API/2022-04-21-17-25-22.gh-issue-91744.FgvaMi.rst create mode 100644 Modules/_testcapi/code.c diff --git a/Doc/c-api/code.rst b/Doc/c-api/code.rst index ae75d68901d7ba..062ef3a1fea93c 100644 --- a/Doc/c-api/code.rst +++ b/Doc/c-api/code.rst @@ -33,28 +33,47 @@ bound into a function. Return the number of free variables in *co*. -.. c:function:: PyCodeObject* PyCode_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, int firstlineno, PyObject *linetable, PyObject *exceptiontable) +.. c:function:: PyCodeObject* PyUnstable_Code_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, int firstlineno, PyObject *linetable, PyObject *exceptiontable) Return a new code object. If you need a dummy code object to create a frame, - use :c:func:`PyCode_NewEmpty` instead. Calling :c:func:`PyCode_New` directly - will bind you to a precise Python version since the definition of the bytecode - changes often. The many arguments of this function are inter-dependent in complex + use :c:func:`PyCode_NewEmpty` instead. + + Since the definition of the bytecode changes often, calling + :c:func:`PyCode_New` directly can bind you to a precise Python version. + + The many arguments of this function are inter-dependent in complex ways, meaning that subtle changes to values are likely to result in incorrect execution or VM crashes. Use this function only with extreme care. .. versionchanged:: 3.11 Added ``exceptiontable`` parameter. -.. c:function:: PyCodeObject* PyCode_NewWithPosOnlyArgs(int argcount, int posonlyargcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, int firstlineno, PyObject *linetable, PyObject *exceptiontable) + .. index:: single: PyCode_New + + .. versionchanged:: 3.12 + + Renamed from ``PyCode_New`` as part of :ref:`unstable-c-api`. + The old name is deprecated, but will remain available until the + signature changes again. + +.. c:function:: PyCodeObject* PyUnstable_Code_NewWithPosOnlyArgs(int argcount, int posonlyargcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, int firstlineno, PyObject *linetable, PyObject *exceptiontable) Similar to :c:func:`PyCode_New`, but with an extra "posonlyargcount" for positional-only arguments. The same caveats that apply to ``PyCode_New`` also apply to this function. - .. versionadded:: 3.8 + .. index:: single: PyCode_NewWithPosOnlyArgs + + .. versionadded:: 3.8 as ``PyCode_NewWithPosOnlyArgs`` .. versionchanged:: 3.11 Added ``exceptiontable`` parameter. + .. versionchanged:: 3.12 + + Renamed to ``PyUnstable_Code_NewWithPosOnlyArgs``. + The old name is deprecated, but will remain available until the + signature changes again. + .. c:function:: PyCodeObject* PyCode_NewEmpty(const char *filename, const char *funcname, int firstlineno) Return a new empty code object with the specified filename, @@ -165,3 +184,70 @@ bound into a function. :c:func:`PyErr_WriteUnraisable`. Otherwise it should return ``0``. .. versionadded:: 3.12 + + +Extra information +----------------- + +To support low-level extensions to frame evaluation, such as external +just-in-time compilers, it is possible to attach arbitrary extra data to +code objects. + +These functions are part of the unstable C API tier: +this functionality is a CPython implementation detail, and the API +may change without deprecation warnings. + +.. c:function:: Py_ssize_t PyUnstable_Eval_RequestCodeExtraIndex(freefunc free) + + Return a new an opaque index value used to adding data to code objects. + + You generally call this function once (per interpreter) and use the result + with ``PyCode_GetExtra`` and ``PyCode_SetExtra`` to manipulate + data on individual code objects. + + If *free* is not ``NULL``: when a code object is deallocated, + *free* will be called on non-``NULL`` data stored under the new index. + Use :c:func:`Py_DecRef` when storing :c:type:`PyObject`. + + .. index:: single: _PyEval_RequestCodeExtraIndex + + .. versionadded:: 3.6 as ``_PyEval_RequestCodeExtraIndex`` + + .. versionchanged:: 3.12 + + Renamed to ``PyUnstable_Eval_RequestCodeExtraIndex``. + The old private name is deprecated, but will be available until the API + changes. + +.. c:function:: int PyUnstable_Code_GetExtra(PyObject *code, Py_ssize_t index, void **extra) + + Set *extra* to the extra data stored under the given index. + Return 0 on success. Set an exception and return -1 on failure. + + If no data was set under the index, set *extra* to ``NULL`` and return + 0 without setting an exception. + + .. index:: single: _PyCode_GetExtra + + .. versionadded:: 3.6 as ``_PyCode_GetExtra`` + + .. versionchanged:: 3.12 + + Renamed to ``PyUnstable_Code_GetExtra``. + The old private name is deprecated, but will be available until the API + changes. + +.. c:function:: int PyUnstable_Code_SetExtra(PyObject *code, Py_ssize_t index, void *extra) + + Set the extra data stored under the given index to *extra*. + Return 0 on success. Set an exception and return -1 on failure. + + .. index:: single: _PyCode_SetExtra + + .. versionadded:: 3.6 as ``_PyCode_SetExtra`` + + .. versionchanged:: 3.12 + + Renamed to ``PyUnstable_Code_SetExtra``. + The old private name is deprecated, but will be available until the API + changes. diff --git a/Doc/c-api/stable.rst b/Doc/c-api/stable.rst index 4ae20e93e36785..3721fc0697f5cd 100644 --- a/Doc/c-api/stable.rst +++ b/Doc/c-api/stable.rst @@ -6,9 +6,9 @@ C API Stability *************** -Python's C API is covered by the Backwards Compatibility Policy, :pep:`387`. -While the C API will change with every minor release (e.g. from 3.9 to 3.10), -most changes will be source-compatible, typically by only adding new API. +Unless documented otherwise, Python's C API is covered by the Backwards +Compatibility Policy, :pep:`387`. +Most changes to it are source-compatible (typically by only adding new API). Changing existing API or removing API is only done after a deprecation period or to fix serious issues. @@ -18,8 +18,38 @@ way; see :ref:`stable-abi-platform` below). So, code compiled for Python 3.10.0 will work on 3.10.8 and vice versa, but will need to be compiled separately for 3.9.x and 3.10.x. +There are two tiers of C API with different stability exepectations: + +- *Unstable API*, may change in minor versions without a deprecation period. + It is marked by the ``PyUnstable`` prefix in names. +- *Limited API*, is compatible across several minor releases. + When :c:macro:`Py_LIMITED_API` is defined, only this subset is exposed + from ``Python.h``. + +These are discussed in more detail below. + Names prefixed by an underscore, such as ``_Py_InternalState``, are private API that can change without notice even in patch releases. +If you need to use this API, consider reaching out to +`CPython developers `_ +to discuss adding public API for your use case. + +.. _unstable-c-api: + +Unstable C API +============== + +.. index:: single: PyUnstable + +Any API named with the ``PyUnstable`` prefix exposes CPython implementation +details, and may change in every minor release (e.g. from 3.9 to 3.10) without +any deprecation warnings. +However, it will not change in a bugfix release (e.g. from 3.10.0 to 3.10.1). + +It is generally intended for specialized, low-level tools like debuggers. + +Projects that use this API are expected to follow +CPython development and spend extra effort adjusting to changes. Stable Application Binary Interface diff --git a/Doc/tools/extensions/c_annotations.py b/Doc/tools/extensions/c_annotations.py index 9defb24a0331ef..e5dc82cf0dc2d9 100644 --- a/Doc/tools/extensions/c_annotations.py +++ b/Doc/tools/extensions/c_annotations.py @@ -143,6 +143,22 @@ def add_annotations(self, app, doctree): ' (Only some members are part of the stable ABI.)') node.insert(0, emph_node) + # Unstable API annotation. + if name.startswith('PyUnstable'): + warn_node = nodes.admonition( + classes=['unstable-c-api', 'warning']) + message = 'This is ' + emph_node = nodes.emphasis(message, message) + ref_node = addnodes.pending_xref( + 'Unstable API', refdomain="std", + reftarget='unstable-c-api', + reftype='ref', refexplicit="False") + ref_node += nodes.Text('Unstable API') + emph_node += ref_node + emph_node += nodes.Text('. It may change without warning in minor releases.') + warn_node += emph_node + node.insert(0, warn_node) + # Return value annotation if objtype != 'function': continue diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index 1a25ec6b70613b..c0c021c679147f 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -810,6 +810,29 @@ C API Changes New Features ------------ + +* :pep:`697`: Introduced the :ref:`Unstable C API tier `, + intended for low-level tools like debuggers and JIT compilers. + This API may change in each minor release of CPython without deprecation + warnings. + Its contents are marked by the ``PyUnstable_`` prefix in names. + + Code object constructors: + + - ``PyUnstable_Code_New()`` (renamed from ``PyCode_New``) + - ``PyUnstable_Code_NewWithPosOnlyArgs()`` (renamed from ``PyCode_NewWithPosOnlyArgs``) + + Extra storage for code objects (:pep:`523`): + + - ``PyUnstable_Eval_RequestCodeExtraIndex()`` (renamed from ``_PyEval_RequestCodeExtraIndex``) + - ``PyUnstable_Code_GetExtra()`` (renamed from ``_PyCode_GetExtra``) + - ``PyUnstable_Code_SetExtra()`` (renamed from ``_PyCode_SetExtra``) + + The original names will continue to be available until the respective + API changes. + + (Contributed by Petr Viktorin in :gh:`101101`.) + * Added the new limited C API function :c:func:`PyType_FromMetaclass`, which generalizes the existing :c:func:`PyType_FromModuleAndSpec` using an additional metaclass argument. diff --git a/Include/README.rst b/Include/README.rst index f52e690eac9a91..531f09692f783f 100644 --- a/Include/README.rst +++ b/Include/README.rst @@ -1,11 +1,13 @@ The Python C API ================ -The C API is divided into three sections: +The C API is divided into these sections: 1. ``Include/``: Limited API 2. ``Include/cpython/``: CPython implementation details -3. ``Include/internal/``: The internal API +3. ``Include/cpython/``, names with the ``PyUnstable_`` prefix: API that can + change between minor releases +4. ``Include/internal/``, and any name with ``_`` prefix: The internal API Information on changing the C API is available `in the developer guide`_ diff --git a/Include/cpython/ceval.h b/Include/cpython/ceval.h index 74665c9fa10580..0fbbee10c2edce 100644 --- a/Include/cpython/ceval.h +++ b/Include/cpython/ceval.h @@ -22,7 +22,12 @@ PyAPI_FUNC(PyObject *) _PyEval_EvalFrameDefault(PyThreadState *tstate, struct _P PyAPI_FUNC(void) _PyEval_SetSwitchInterval(unsigned long microseconds); PyAPI_FUNC(unsigned long) _PyEval_GetSwitchInterval(void); -PyAPI_FUNC(Py_ssize_t) _PyEval_RequestCodeExtraIndex(freefunc); +PyAPI_FUNC(Py_ssize_t) PyUnstable_Eval_RequestCodeExtraIndex(freefunc); +// Old name -- remove when this API changes: +_Py_DEPRECATED_EXTERNALLY(3.12) static inline Py_ssize_t +_PyEval_RequestCodeExtraIndex(freefunc f) { + return PyUnstable_Eval_RequestCodeExtraIndex(f); +} PyAPI_FUNC(int) _PyEval_SliceIndex(PyObject *, Py_ssize_t *); PyAPI_FUNC(int) _PyEval_SliceIndexNotNone(PyObject *, Py_ssize_t *); diff --git a/Include/cpython/code.h b/Include/cpython/code.h index fba9296aedc99d..0e4bd8a58c165b 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -178,19 +178,40 @@ static inline int PyCode_GetFirstFree(PyCodeObject *op) { #define _PyCode_CODE(CO) _Py_RVALUE((_Py_CODEUNIT *)(CO)->co_code_adaptive) #define _PyCode_NBYTES(CO) (Py_SIZE(CO) * (Py_ssize_t)sizeof(_Py_CODEUNIT)) -/* Public interface */ -PyAPI_FUNC(PyCodeObject *) PyCode_New( +/* Unstable public interface */ +PyAPI_FUNC(PyCodeObject *) PyUnstable_Code_New( int, int, int, int, int, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, int, PyObject *, PyObject *); -PyAPI_FUNC(PyCodeObject *) PyCode_NewWithPosOnlyArgs( +PyAPI_FUNC(PyCodeObject *) PyUnstable_Code_NewWithPosOnlyArgs( int, int, int, int, int, int, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *, int, PyObject *, PyObject *); /* same as struct above */ +// Old names -- remove when this API changes: +_Py_DEPRECATED_EXTERNALLY(3.12) static inline PyCodeObject * +PyCode_New( + int a, int b, int c, int d, int e, PyObject *f, PyObject *g, + PyObject *h, PyObject *i, PyObject *j, PyObject *k, + PyObject *l, PyObject *m, PyObject *n, int o, PyObject *p, + PyObject *q) +{ + return PyUnstable_Code_New( + a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q); +} +_Py_DEPRECATED_EXTERNALLY(3.12) static inline PyCodeObject * +PyCode_NewWithPosOnlyArgs( + int a, int poac, int b, int c, int d, int e, PyObject *f, PyObject *g, + PyObject *h, PyObject *i, PyObject *j, PyObject *k, + PyObject *l, PyObject *m, PyObject *n, int o, PyObject *p, + PyObject *q) +{ + return PyUnstable_Code_NewWithPosOnlyArgs( + a, poac, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q); +} /* Creates a new empty code object with the specified source location. */ PyAPI_FUNC(PyCodeObject *) @@ -269,11 +290,21 @@ PyAPI_FUNC(PyObject*) _PyCode_ConstantKey(PyObject *obj); PyAPI_FUNC(PyObject*) PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, PyObject *lnotab); - -PyAPI_FUNC(int) _PyCode_GetExtra(PyObject *code, Py_ssize_t index, - void **extra); -PyAPI_FUNC(int) _PyCode_SetExtra(PyObject *code, Py_ssize_t index, - void *extra); +PyAPI_FUNC(int) PyUnstable_Code_GetExtra( + PyObject *code, Py_ssize_t index, void **extra); +PyAPI_FUNC(int) PyUnstable_Code_SetExtra( + PyObject *code, Py_ssize_t index, void *extra); +// Old names -- remove when this API changes: +_Py_DEPRECATED_EXTERNALLY(3.12) static inline int +_PyCode_GetExtra(PyObject *code, Py_ssize_t index, void **extra) +{ + return PyUnstable_Code_GetExtra(code, index, extra); +} +_Py_DEPRECATED_EXTERNALLY(3.12) static inline int +_PyCode_SetExtra(PyObject *code, Py_ssize_t index, void *extra) +{ + return PyUnstable_Code_SetExtra(code, index, extra); +} /* Equivalent to getattr(code, 'co_code') in Python. Returns a strong reference to a bytes object. */ diff --git a/Include/pyport.h b/Include/pyport.h index 40092c2f81ad48..eef0fe1bfd71d8 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -323,6 +323,15 @@ extern "C" { #define Py_DEPRECATED(VERSION_UNUSED) #endif +// _Py_DEPRECATED_EXTERNALLY(version) +// Deprecated outside CPython core. +#ifdef Py_BUILD_CORE +#define _Py_DEPRECATED_EXTERNALLY(VERSION_UNUSED) +#else +#define _Py_DEPRECATED_EXTERNALLY(version) Py_DEPRECATED(version) +#endif + + #if defined(__clang__) #define _Py_COMP_DIAG_PUSH _Pragma("clang diagnostic push") #define _Py_COMP_DIAG_IGNORE_DEPR_DECLS \ diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py index 9c2ac83e1b69e3..0cd1fb3f9728e5 100644 --- a/Lib/test/test_code.py +++ b/Lib/test/test_code.py @@ -752,15 +752,15 @@ def f(): py = ctypes.pythonapi freefunc = ctypes.CFUNCTYPE(None,ctypes.c_voidp) - RequestCodeExtraIndex = py._PyEval_RequestCodeExtraIndex + RequestCodeExtraIndex = py.PyUnstable_Eval_RequestCodeExtraIndex RequestCodeExtraIndex.argtypes = (freefunc,) RequestCodeExtraIndex.restype = ctypes.c_ssize_t - SetExtra = py._PyCode_SetExtra + SetExtra = py.PyUnstable_Code_SetExtra SetExtra.argtypes = (ctypes.py_object, ctypes.c_ssize_t, ctypes.c_voidp) SetExtra.restype = ctypes.c_int - GetExtra = py._PyCode_GetExtra + GetExtra = py.PyUnstable_Code_GetExtra GetExtra.argtypes = (ctypes.py_object, ctypes.c_ssize_t, ctypes.POINTER(ctypes.c_voidp)) GetExtra.restype = ctypes.c_int diff --git a/Misc/NEWS.d/next/C API/2022-04-21-17-25-22.gh-issue-91744.FgvaMi.rst b/Misc/NEWS.d/next/C API/2022-04-21-17-25-22.gh-issue-91744.FgvaMi.rst new file mode 100644 index 00000000000000..20db25ddd0c1f6 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2022-04-21-17-25-22.gh-issue-91744.FgvaMi.rst @@ -0,0 +1,3 @@ +Introduced the *Unstable C API tier*, marking APi that is allowed to change +in minor releases without a deprecation period. +See :pep:`689` for details. diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index 7551e5b349430e..b12290d436cbeb 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -169,7 +169,7 @@ @MODULE__XXTESTFUZZ_TRUE@_xxtestfuzz _xxtestfuzz/_xxtestfuzz.c _xxtestfuzz/fuzzer.c @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c -@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/unicode.c _testcapi/getargs.c _testcapi/pytime.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c _testcapi/exceptions.c +@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/unicode.c _testcapi/getargs.c _testcapi/pytime.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c @MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c # Some testing modules MUST be built as shared libraries. diff --git a/Modules/_testcapi/code.c b/Modules/_testcapi/code.c new file mode 100644 index 00000000000000..588dc67971ea5a --- /dev/null +++ b/Modules/_testcapi/code.c @@ -0,0 +1,115 @@ +#include "parts.h" + +static Py_ssize_t +get_code_extra_index(PyInterpreterState* interp) { + Py_ssize_t result = -1; + + static const char *key = "_testcapi.frame_evaluation.code_index"; + + PyObject *interp_dict = PyInterpreterState_GetDict(interp); // borrowed + assert(interp_dict); // real users would handle missing dict... somehow + + PyObject *index_obj = PyDict_GetItemString(interp_dict, key); // borrowed + Py_ssize_t index = 0; + if (!index_obj) { + if (PyErr_Occurred()) { + goto finally; + } + index = PyUnstable_Eval_RequestCodeExtraIndex(NULL); + if (index < 0 || PyErr_Occurred()) { + goto finally; + } + index_obj = PyLong_FromSsize_t(index); // strong ref + if (!index_obj) { + goto finally; + } + int res = PyDict_SetItemString(interp_dict, key, index_obj); + Py_DECREF(index_obj); + if (res < 0) { + goto finally; + } + } + else { + index = PyLong_AsSsize_t(index_obj); + if (index == -1 && PyErr_Occurred()) { + goto finally; + } + } + + result = index; +finally: + return result; +} + +static PyObject * +test_code_extra(PyObject* self, PyObject *Py_UNUSED(callable)) +{ + PyObject *result = NULL; + PyObject *test_module = NULL; + PyObject *test_func = NULL; + + // Get or initialize interpreter-specific code object storage index + PyInterpreterState *interp = PyInterpreterState_Get(); + if (!interp) { + return NULL; + } + Py_ssize_t code_extra_index = get_code_extra_index(interp); + if (PyErr_Occurred()) { + goto finally; + } + + // Get a function to test with + // This can be any Python function. Use `test.test_misc.testfunction`. + test_module = PyImport_ImportModule("test.test_capi.test_misc"); + if (!test_module) { + goto finally; + } + test_func = PyObject_GetAttrString(test_module, "testfunction"); + if (!test_func) { + goto finally; + } + PyObject *test_func_code = PyFunction_GetCode(test_func); // borrowed + if (!test_func_code) { + goto finally; + } + + // Check the value is initially NULL + void *extra; + int res = PyUnstable_Code_GetExtra(test_func_code, code_extra_index, &extra); + if (res < 0) { + goto finally; + } + assert (extra == NULL); + + // Set another code extra value + res = PyUnstable_Code_SetExtra(test_func_code, code_extra_index, (void*)(uintptr_t)77); + if (res < 0) { + goto finally; + } + // Assert it was set correctly + res = PyUnstable_Code_GetExtra(test_func_code, code_extra_index, &extra); + if (res < 0) { + goto finally; + } + assert ((uintptr_t)extra == 77); + + result = Py_NewRef(Py_None); +finally: + Py_XDECREF(test_module); + Py_XDECREF(test_func); + return result; +} + +static PyMethodDef TestMethods[] = { + {"test_code_extra", test_code_extra, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_Code(PyObject *m) { + if (PyModule_AddFunctions(m, TestMethods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/parts.h b/Modules/_testcapi/parts.h index 1689f186b833f6..c8f31dc8e39fae 100644 --- a/Modules/_testcapi/parts.h +++ b/Modules/_testcapi/parts.h @@ -37,6 +37,7 @@ int _PyTestCapi_Init_Long(PyObject *module); int _PyTestCapi_Init_Float(PyObject *module); int _PyTestCapi_Init_Structmember(PyObject *module); int _PyTestCapi_Init_Exceptions(PyObject *module); +int _PyTestCapi_Init_Code(PyObject *module); #ifdef LIMITED_API_AVAILABLE int _PyTestCapi_Init_VectorcallLimited(PyObject *module); diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index fc716a3564d39a..10e507d6b481de 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -4083,6 +4083,9 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_Exceptions(m) < 0) { return NULL; } + if (_PyTestCapi_Init_Code(m) < 0) { + return NULL; + } #ifndef LIMITED_API_AVAILABLE PyModule_AddObjectRef(m, "LIMITED_API_AVAILABLE", Py_False); diff --git a/Objects/codeobject.c b/Objects/codeobject.c index a03b14edea8d4c..175bd57568f8f6 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -567,7 +567,8 @@ _PyCode_New(struct _PyCodeConstructor *con) ******************/ PyCodeObject * -PyCode_NewWithPosOnlyArgs(int argcount, int posonlyargcount, int kwonlyargcount, +PyUnstable_Code_NewWithPosOnlyArgs( + int argcount, int posonlyargcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, @@ -691,7 +692,7 @@ PyCode_NewWithPosOnlyArgs(int argcount, int posonlyargcount, int kwonlyargcount, } PyCodeObject * -PyCode_New(int argcount, int kwonlyargcount, +PyUnstable_Code_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, @@ -1371,7 +1372,7 @@ typedef struct { int -_PyCode_GetExtra(PyObject *code, Py_ssize_t index, void **extra) +PyUnstable_Code_GetExtra(PyObject *code, Py_ssize_t index, void **extra) { if (!PyCode_Check(code)) { PyErr_BadInternalCall(); @@ -1392,7 +1393,7 @@ _PyCode_GetExtra(PyObject *code, Py_ssize_t index, void **extra) int -_PyCode_SetExtra(PyObject *code, Py_ssize_t index, void *extra) +PyUnstable_Code_SetExtra(PyObject *code, Py_ssize_t index, void *extra) { PyInterpreterState *interp = _PyInterpreterState_GET(); diff --git a/PCbuild/_testcapi.vcxproj b/PCbuild/_testcapi.vcxproj index 742eb3ed2d9056..4cc184bfc1ac82 100644 --- a/PCbuild/_testcapi.vcxproj +++ b/PCbuild/_testcapi.vcxproj @@ -108,6 +108,7 @@ + diff --git a/PCbuild/_testcapi.vcxproj.filters b/PCbuild/_testcapi.vcxproj.filters index ab5afc150c32f5..fbdaf04ce37cb1 100644 --- a/PCbuild/_testcapi.vcxproj.filters +++ b/PCbuild/_testcapi.vcxproj.filters @@ -54,6 +54,9 @@ Source Files + + Source Files + diff --git a/Python/ceval.c b/Python/ceval.c index b382d2109b93b7..5540c93d5e3dd7 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -2988,7 +2988,7 @@ format_awaitable_error(PyThreadState *tstate, PyTypeObject *type, int oparg) Py_ssize_t -_PyEval_RequestCodeExtraIndex(freefunc free) +PyUnstable_Eval_RequestCodeExtraIndex(freefunc free) { PyInterpreterState *interp = _PyInterpreterState_GET(); Py_ssize_t new_index; From 9f799ab0200f07d762eb8fe822c606e1b4b4866e Mon Sep 17 00:00:00 2001 From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com> Date: Tue, 28 Feb 2023 11:29:32 +0000 Subject: [PATCH 12/40] gh-87092: Make jump target label equal to the offset of the target in the instructions sequence (#102093) --- Lib/test/support/bytecode_helper.py | 83 +++++++++----------- Lib/test/test_compiler_codegen.py | 8 +- Lib/test/test_peepholer.py | 13 ++-- Python/compile.c | 115 ++++++++++++++++------------ 4 files changed, 111 insertions(+), 108 deletions(-) diff --git a/Lib/test/support/bytecode_helper.py b/Lib/test/support/bytecode_helper.py index 65ae7a227baafe..190fe8723b1fb5 100644 --- a/Lib/test/support/bytecode_helper.py +++ b/Lib/test/support/bytecode_helper.py @@ -50,18 +50,13 @@ class CompilationStepTestCase(unittest.TestCase): HAS_TARGET = set(dis.hasjrel + dis.hasjabs + dis.hasexc) HAS_ARG_OR_TARGET = HAS_ARG.union(HAS_TARGET) - def setUp(self): - self.last_label = 0 - - def Label(self): - self.last_label += 1 - return self.last_label + class Label: + pass def assertInstructionsMatch(self, actual_, expected_): # get two lists where each entry is a label or - # an instruction tuple. Compare them, while mapping - # each actual label to a corresponding expected label - # based on their locations. + # an instruction tuple. Normalize the labels to the + # instruction count of the target, and compare the lists. self.assertIsInstance(actual_, list) self.assertIsInstance(expected_, list) @@ -82,39 +77,35 @@ def assertInstructionsMatch(self, actual_, expected_): act = act[:len(exp)] self.assertEqual(exp, act) + def resolveAndRemoveLabels(self, insts): + idx = 0 + res = [] + for item in insts: + assert isinstance(item, (self.Label, tuple)) + if isinstance(item, self.Label): + item.value = idx + else: + idx += 1 + res.append(item) + + return res + def normalize_insts(self, insts): """ Map labels to instruction index. - Remove labels which are not used as jump targets. Map opcodes to opnames. """ - labels_map = {} - targets = set() - idx = 1 - for item in insts: - assert isinstance(item, (int, tuple)) - if isinstance(item, tuple): - opcode, oparg, *_ = item - if dis.opmap.get(opcode, opcode) in self.HAS_TARGET: - targets.add(oparg) - idx += 1 - elif isinstance(item, int): - assert item not in labels_map, "label reused" - labels_map[item] = idx - + insts = self.resolveAndRemoveLabels(insts) res = [] for item in insts: - if isinstance(item, int) and item in targets: - if not res or labels_map[item] != res[-1]: - res.append(labels_map[item]) - elif isinstance(item, tuple): - opcode, oparg, *loc = item - opcode = dis.opmap.get(opcode, opcode) - if opcode in self.HAS_TARGET: - arg = labels_map[oparg] - else: - arg = oparg if opcode in self.HAS_TARGET else None - opcode = dis.opname[opcode] - res.append((opcode, arg, *loc)) + assert isinstance(item, tuple) + opcode, oparg, *loc = item + opcode = dis.opmap.get(opcode, opcode) + if isinstance(oparg, self.Label): + arg = oparg.value + else: + arg = oparg if opcode in self.HAS_ARG else None + opcode = dis.opname[opcode] + res.append((opcode, arg, *loc)) return res @@ -129,20 +120,18 @@ class CfgOptimizationTestCase(CompilationStepTestCase): def complete_insts_info(self, insts): # fill in omitted fields in location, and oparg 0 for ops with no arg. - instructions = [] + res = [] for item in insts: - if isinstance(item, int): - instructions.append(item) - else: - assert isinstance(item, tuple) - inst = list(reversed(item)) - opcode = dis.opmap[inst.pop()] - oparg = inst.pop() if opcode in self.HAS_ARG_OR_TARGET else 0 - loc = inst + [-1] * (4 - len(inst)) - instructions.append((opcode, oparg, *loc)) - return instructions + assert isinstance(item, tuple) + inst = list(reversed(item)) + opcode = dis.opmap[inst.pop()] + oparg = inst.pop() if opcode in self.HAS_ARG_OR_TARGET else 0 + loc = inst + [-1] * (4 - len(inst)) + res.append((opcode, oparg, *loc)) + return res def get_optimized(self, insts, consts): + insts = self.normalize_insts(insts) insts = self.complete_insts_info(insts) insts = optimize_cfg(insts, consts) return insts, consts diff --git a/Lib/test/test_compiler_codegen.py b/Lib/test/test_compiler_codegen.py index f2e14c1e628c01..022753e0c99483 100644 --- a/Lib/test/test_compiler_codegen.py +++ b/Lib/test/test_compiler_codegen.py @@ -37,11 +37,11 @@ def test_for_loop(self): ('GET_ITER', None, 1), loop_lbl := self.Label(), ('FOR_ITER', exit_lbl := self.Label(), 1), - ('STORE_NAME', None, 1), + ('STORE_NAME', 1, 1), ('PUSH_NULL', None, 2), - ('LOAD_NAME', None, 2), - ('LOAD_NAME', None, 2), - ('CALL', None, 2), + ('LOAD_NAME', 2, 2), + ('LOAD_NAME', 1, 2), + ('CALL', 1, 2), ('POP_TOP', None), ('JUMP', loop_lbl), exit_lbl, diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py index 707ff821b31a8a..aea234e38705a8 100644 --- a/Lib/test/test_peepholer.py +++ b/Lib/test/test_peepholer.py @@ -984,6 +984,7 @@ def cfg_optimization_test(self, insts, expected_insts, if expected_consts is None: expected_consts = consts opt_insts, opt_consts = self.get_optimized(insts, consts) + expected_insts = self.normalize_insts(expected_insts) self.assertInstructionsMatch(opt_insts, expected_insts) self.assertEqual(opt_consts, expected_consts) @@ -996,11 +997,11 @@ def test_conditional_jump_forward_non_const_condition(self): ('LOAD_CONST', 3, 14), ] expected = [ - ('LOAD_NAME', '1', 11), + ('LOAD_NAME', 1, 11), ('POP_JUMP_IF_TRUE', lbl := self.Label(), 12), - ('LOAD_CONST', '2', 13), + ('LOAD_CONST', 2, 13), lbl, - ('LOAD_CONST', '3', 14) + ('LOAD_CONST', 3, 14) ] self.cfg_optimization_test(insts, expected, consts=list(range(5))) @@ -1018,7 +1019,7 @@ def test_conditional_jump_forward_const_condition(self): expected = [ ('NOP', None, 11), ('NOP', None, 12), - ('LOAD_CONST', '3', 14) + ('LOAD_CONST', 3, 14) ] self.cfg_optimization_test(insts, expected, consts=list(range(5))) @@ -1031,9 +1032,9 @@ def test_conditional_jump_backward_non_const_condition(self): ] expected = [ lbl := self.Label(), - ('LOAD_NAME', '1', 11), + ('LOAD_NAME', 1, 11), ('POP_JUMP_IF_TRUE', lbl, 12), - ('LOAD_CONST', '2', 13) + ('LOAD_CONST', 2, 13) ] self.cfg_optimization_test(insts, expected, consts=list(range(5))) diff --git a/Python/compile.c b/Python/compile.c index 3f620beb0d0205..2f1130e62ee161 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -9704,56 +9704,77 @@ instructions_to_cfg(PyObject *instructions, cfg_builder *g) { assert(PyList_Check(instructions)); - Py_ssize_t instr_size = PyList_GET_SIZE(instructions); - for (Py_ssize_t i = 0; i < instr_size; i++) { + Py_ssize_t num_insts = PyList_GET_SIZE(instructions); + bool *is_target = PyMem_Calloc(num_insts, sizeof(bool)); + if (is_target == NULL) { + return ERROR; + } + for (Py_ssize_t i = 0; i < num_insts; i++) { PyObject *item = PyList_GET_ITEM(instructions, i); - if (PyLong_Check(item)) { - int lbl_id = PyLong_AsLong(item); + if (!PyTuple_Check(item) || PyTuple_GET_SIZE(item) != 6) { + PyErr_SetString(PyExc_ValueError, "expected a 6-tuple"); + goto error; + } + int opcode = PyLong_AsLong(PyTuple_GET_ITEM(item, 0)); + if (PyErr_Occurred()) { + goto error; + } + if (HAS_TARGET(opcode)) { + int oparg = PyLong_AsLong(PyTuple_GET_ITEM(item, 1)); if (PyErr_Occurred()) { - return ERROR; + goto error; } - if (lbl_id <= 0 || lbl_id > instr_size) { - /* expect label in a reasonable range */ + if (oparg < 0 || oparg >= num_insts) { PyErr_SetString(PyExc_ValueError, "label out of range"); - return ERROR; + goto error; } - jump_target_label lbl = {lbl_id}; + is_target[oparg] = true; + } + } + + for (int i = 0; i < num_insts; i++) { + if (is_target[i]) { + jump_target_label lbl = {i}; RETURN_IF_ERROR(cfg_builder_use_label(g, lbl)); } - else { - if (!PyTuple_Check(item) || PyTuple_GET_SIZE(item) != 6) { - PyErr_SetString(PyExc_ValueError, "expected a 6-tuple"); - return ERROR; - } - int opcode = PyLong_AsLong(PyTuple_GET_ITEM(item, 0)); - if (PyErr_Occurred()) { - return ERROR; - } - int oparg = PyLong_AsLong(PyTuple_GET_ITEM(item, 1)); - if (PyErr_Occurred()) { - return ERROR; - } - location loc; - loc.lineno = PyLong_AsLong(PyTuple_GET_ITEM(item, 2)); - if (PyErr_Occurred()) { - return ERROR; - } - loc.end_lineno = PyLong_AsLong(PyTuple_GET_ITEM(item, 3)); - if (PyErr_Occurred()) { - return ERROR; - } - loc.col_offset = PyLong_AsLong(PyTuple_GET_ITEM(item, 4)); - if (PyErr_Occurred()) { - return ERROR; - } - loc.end_col_offset = PyLong_AsLong(PyTuple_GET_ITEM(item, 5)); - if (PyErr_Occurred()) { - return ERROR; - } - RETURN_IF_ERROR(cfg_builder_addop(g, opcode, oparg, loc)); + PyObject *item = PyList_GET_ITEM(instructions, i); + if (!PyTuple_Check(item) || PyTuple_GET_SIZE(item) != 6) { + PyErr_SetString(PyExc_ValueError, "expected a 6-tuple"); + goto error; + } + int opcode = PyLong_AsLong(PyTuple_GET_ITEM(item, 0)); + if (PyErr_Occurred()) { + goto error; + } + int oparg = PyLong_AsLong(PyTuple_GET_ITEM(item, 1)); + if (PyErr_Occurred()) { + goto error; + } + location loc; + loc.lineno = PyLong_AsLong(PyTuple_GET_ITEM(item, 2)); + if (PyErr_Occurred()) { + goto error; } + loc.end_lineno = PyLong_AsLong(PyTuple_GET_ITEM(item, 3)); + if (PyErr_Occurred()) { + goto error; + } + loc.col_offset = PyLong_AsLong(PyTuple_GET_ITEM(item, 4)); + if (PyErr_Occurred()) { + goto error; + } + loc.end_col_offset = PyLong_AsLong(PyTuple_GET_ITEM(item, 5)); + if (PyErr_Occurred()) { + goto error; + } + RETURN_IF_ERROR(cfg_builder_addop(g, opcode, oparg, loc)); } + + PyMem_Free(is_target); return SUCCESS; +error: + PyMem_Free(is_target); + return ERROR; } static PyObject * @@ -9763,20 +9784,12 @@ cfg_to_instructions(cfg_builder *g) if (instructions == NULL) { return NULL; } - int lbl = 1; + int lbl = 0; for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - b->b_label = lbl++; + b->b_label = lbl; + lbl += b->b_iused; } for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - PyObject *lbl = PyLong_FromLong(b->b_label); - if (lbl == NULL) { - goto error; - } - if (PyList_Append(instructions, lbl) != 0) { - Py_DECREF(lbl); - goto error; - } - Py_DECREF(lbl); for (int i = 0; i < b->b_iused; i++) { struct instr *instr = &b->b_instr[i]; location loc = instr->i_loc; From 85b1fc1fc5a2e49d521382eaf5e7793175d00371 Mon Sep 17 00:00:00 2001 From: Furkan Onder Date: Tue, 28 Feb 2023 11:43:00 +0000 Subject: [PATCH 13/40] GH-90744: Fix erroneous doc links in the sys module (#101319) Co-authored-by: Hugo van Kemenade --- Doc/library/sys.rst | 98 ++++++++++++++++++++++----------------------- 1 file changed, 49 insertions(+), 49 deletions(-) diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index 605e2c9a6710c1..23c5bbed0c6f9c 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -568,55 +568,55 @@ always available. .. tabularcolumns:: |l|l|L| - +---------------------+----------------+--------------------------------------------------+ - | attribute | float.h macro | explanation | - +=====================+================+==================================================+ - | :const:`epsilon` | DBL_EPSILON | difference between 1.0 and the least value | - | | | greater than 1.0 that is representable as a float| - | | | | - | | | See also :func:`math.ulp`. | - +---------------------+----------------+--------------------------------------------------+ - | :const:`dig` | DBL_DIG | maximum number of decimal digits that can be | - | | | faithfully represented in a float; see below | - +---------------------+----------------+--------------------------------------------------+ - | :const:`mant_dig` | DBL_MANT_DIG | float precision: the number of base-``radix`` | - | | | digits in the significand of a float | - +---------------------+----------------+--------------------------------------------------+ - | :const:`max` | DBL_MAX | maximum representable positive finite float | - +---------------------+----------------+--------------------------------------------------+ - | :const:`max_exp` | DBL_MAX_EXP | maximum integer *e* such that ``radix**(e-1)`` is| - | | | a representable finite float | - +---------------------+----------------+--------------------------------------------------+ - | :const:`max_10_exp` | DBL_MAX_10_EXP | maximum integer *e* such that ``10**e`` is in the| - | | | range of representable finite floats | - +---------------------+----------------+--------------------------------------------------+ - | :const:`min` | DBL_MIN | minimum representable positive *normalized* float| - | | | | - | | | Use :func:`math.ulp(0.0) ` to get the | - | | | smallest positive *denormalized* representable | - | | | float. | - +---------------------+----------------+--------------------------------------------------+ - | :const:`min_exp` | DBL_MIN_EXP | minimum integer *e* such that ``radix**(e-1)`` is| - | | | a normalized float | - +---------------------+----------------+--------------------------------------------------+ - | :const:`min_10_exp` | DBL_MIN_10_EXP | minimum integer *e* such that ``10**e`` is a | - | | | normalized float | - +---------------------+----------------+--------------------------------------------------+ - | :const:`radix` | FLT_RADIX | radix of exponent representation | - +---------------------+----------------+--------------------------------------------------+ - | :const:`rounds` | FLT_ROUNDS | integer representing the rounding mode for | - | | | floating-point arithmetic. This reflects the | - | | | value of the system FLT_ROUNDS macro at | - | | | interpreter startup time: | - | | | ``-1`` indeterminable, | - | | | ``0`` toward zero, | - | | | ``1`` to nearest, | - | | | ``2`` toward positive infinity, | - | | | ``3`` toward negative infinity | - | | | | - | | | All other values for FLT_ROUNDS characterize | - | | | implementation-defined rounding behavior. | - +---------------------+----------------+--------------------------------------------------+ + +---------------------+---------------------+--------------------------------------------------+ + | attribute | float.h macro | explanation | + +=====================+=====================+==================================================+ + | ``epsilon`` | ``DBL_EPSILON`` | difference between 1.0 and the least value | + | | | greater than 1.0 that is representable as a float| + | | | | + | | | See also :func:`math.ulp`. | + +---------------------+---------------------+--------------------------------------------------+ + | ``dig`` | ``DBL_DIG`` | maximum number of decimal digits that can be | + | | | faithfully represented in a float; see below | + +---------------------+---------------------+--------------------------------------------------+ + | ``mant_dig`` | ``DBL_MANT_DIG`` | float precision: the number of base-``radix`` | + | | | digits in the significand of a float | + +---------------------+---------------------+--------------------------------------------------+ + | ``max`` | ``DBL_MAX`` | maximum representable positive finite float | + +---------------------+---------------------+--------------------------------------------------+ + | ``max_exp`` | ``DBL_MAX_EXP`` | maximum integer *e* such that ``radix**(e-1)`` is| + | | | a representable finite float | + +---------------------+---------------------+--------------------------------------------------+ + | ``max_10_exp`` | ``DBL_MAX_10_EXP`` | maximum integer *e* such that ``10**e`` is in the| + | | | range of representable finite floats | + +---------------------+---------------------+--------------------------------------------------+ + | ``min`` | ``DBL_MIN`` | minimum representable positive *normalized* float| + | | | | + | | | Use :func:`math.ulp(0.0) ` to get the | + | | | smallest positive *denormalized* representable | + | | | float. | + +---------------------+---------------------+--------------------------------------------------+ + | ``min_exp`` | ``DBL_MIN_EXP`` | minimum integer *e* such that ``radix**(e-1)`` is| + | | | a normalized float | + +---------------------+---------------------+--------------------------------------------------+ + | ``min_10_exp`` | ``DBL_MIN_10_EXP`` | minimum integer *e* such that ``10**e`` is a | + | | | normalized float | + +---------------------+---------------------+--------------------------------------------------+ + | ``radix`` | ``FLT_RADIX`` | radix of exponent representation | + +---------------------+---------------------+--------------------------------------------------+ + | ``rounds`` | ``FLT_ROUNDS`` | integer representing the rounding mode for | + | | | floating-point arithmetic. This reflects the | + | | | value of the system ``FLT_ROUNDS`` macro at | + | | | interpreter startup time: | + | | | ``-1`` indeterminable, | + | | | ``0`` toward zero, | + | | | ``1`` to nearest, | + | | | ``2`` toward positive infinity, | + | | | ``3`` toward negative infinity | + | | | | + | | | All other values for ``FLT_ROUNDS`` characterize | + | | | implementation-defined rounding behavior. | + +---------------------+---------------------+--------------------------------------------------+ The attribute :attr:`sys.float_info.dig` needs further explanation. If ``s`` is any string representing a decimal number with at most From 4c87537efb5fd28b4e4ee9631076ed5953720156 Mon Sep 17 00:00:00 2001 From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com> Date: Tue, 28 Feb 2023 11:50:52 +0000 Subject: [PATCH 14/40] gh-102192: Replace PyErr_Fetch/Restore etc by more efficient alternatives (in Python/) (#102193) --- Python/bytecodes.c | 2 +- Python/ceval.c | 49 +++++++++++++++------------------------------ Python/ceval_gil.c | 7 +++---- Python/compile.c | 5 ++--- Python/frame.c | 9 +++------ Python/initconfig.c | 2 +- Python/modsupport.c | 14 +++++-------- Python/sysmodule.c | 26 ++++++++++-------------- 8 files changed, 42 insertions(+), 72 deletions(-) diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 7e9b36f697210a..63dbecad3b45fc 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -17,7 +17,7 @@ #include "pycore_object.h" // _PyObject_GC_TRACK() #include "pycore_moduleobject.h" // PyModuleObject #include "pycore_opcode.h" // EXTRA_CASES -#include "pycore_pyerrors.h" // _PyErr_Fetch() +#include "pycore_pyerrors.h" // _PyErr_GetRaisedException() #include "pycore_pymem.h" // _PyMem_IsPtrFreed() #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_range.h" // _PyRangeIterObject diff --git a/Python/ceval.c b/Python/ceval.c index 5540c93d5e3dd7..b422d0ed34ede3 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -13,7 +13,7 @@ #include "pycore_object.h" // _PyObject_GC_TRACK() #include "pycore_moduleobject.h" // PyModuleObject #include "pycore_opcode.h" // EXTRA_CASES -#include "pycore_pyerrors.h" // _PyErr_Fetch() +#include "pycore_pyerrors.h" // _PyErr_Fetch(), _PyErr_GetRaisedException() #include "pycore_pymem.h" // _PyMem_IsPtrFreed() #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_range.h" // _PyRangeIterObject @@ -105,8 +105,7 @@ static void dump_stack(_PyInterpreterFrame *frame, PyObject **stack_pointer) { PyObject **stack_base = _PyFrame_Stackbase(frame); - PyObject *type, *value, *traceback; - PyErr_Fetch(&type, &value, &traceback); + PyObject *exc = PyErr_GetRaisedException(); printf(" stack=["); for (PyObject **ptr = stack_base; ptr < stack_pointer; ptr++) { if (ptr != stack_base) { @@ -120,7 +119,7 @@ dump_stack(_PyInterpreterFrame *frame, PyObject **stack_pointer) } printf("]\n"); fflush(stdout); - PyErr_Restore(type, value, traceback); + PyErr_SetRaisedException(exc); } static void @@ -157,8 +156,7 @@ lltrace_resume_frame(_PyInterpreterFrame *frame) return; } PyFunctionObject *f = (PyFunctionObject *)fobj; - PyObject *type, *value, *traceback; - PyErr_Fetch(&type, &value, &traceback); + PyObject *exc = PyErr_GetRaisedException(); PyObject *name = f->func_qualname; if (name == NULL) { name = f->func_name; @@ -178,7 +176,7 @@ lltrace_resume_frame(_PyInterpreterFrame *frame) } printf("\n"); fflush(stdout); - PyErr_Restore(type, value, traceback); + PyErr_SetRaisedException(exc); } #endif static int call_trace(Py_tracefunc, PyObject *, @@ -1032,7 +1030,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int PyObject *v = POP(); Py_XDECREF(v); } - PyObject *exc, *val, *tb; if (lasti) { int frame_lasti = _PyInterpreterFrame_LASTI(frame); PyObject *lasti = PyLong_FromLong(frame_lasti); @@ -1041,19 +1038,12 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int } PUSH(lasti); } - _PyErr_Fetch(tstate, &exc, &val, &tb); + /* Make the raw exception data available to the handler, so a program can emulate the Python main loop. */ - _PyErr_NormalizeException(tstate, &exc, &val, &tb); - if (tb != NULL) - PyException_SetTraceback(val, tb); - else - PyException_SetTraceback(val, Py_None); - Py_XDECREF(tb); - Py_XDECREF(exc); - PUSH(val); + PUSH(_PyErr_GetRaisedException(tstate)); JUMPTO(handler); /* Resume normal execution */ DISPATCH(); @@ -2075,19 +2065,15 @@ call_trace_protected(Py_tracefunc func, PyObject *obj, PyThreadState *tstate, _PyInterpreterFrame *frame, int what, PyObject *arg) { - PyObject *type, *value, *traceback; - int err; - _PyErr_Fetch(tstate, &type, &value, &traceback); - err = call_trace(func, obj, tstate, frame, what, arg); + PyObject *exc = _PyErr_GetRaisedException(tstate); + int err = call_trace(func, obj, tstate, frame, what, arg); if (err == 0) { - _PyErr_Restore(tstate, type, value, traceback); + _PyErr_SetRaisedException(tstate, exc); return 0; } else { - Py_XDECREF(type); - Py_XDECREF(value); - Py_XDECREF(traceback); + Py_XDECREF(exc); return -1; } } @@ -2935,18 +2921,15 @@ format_exc_check_arg(PyThreadState *tstate, PyObject *exc, if (exc == PyExc_NameError) { // Include the name in the NameError exceptions to offer suggestions later. - PyObject *type, *value, *traceback; - PyErr_Fetch(&type, &value, &traceback); - PyErr_NormalizeException(&type, &value, &traceback); - if (PyErr_GivenExceptionMatches(value, PyExc_NameError)) { - PyNameErrorObject* exc = (PyNameErrorObject*) value; - if (exc->name == NULL) { + PyObject *exc = PyErr_GetRaisedException(); + if (PyErr_GivenExceptionMatches(exc, PyExc_NameError)) { + if (((PyNameErrorObject*)exc)->name == NULL) { // We do not care if this fails because we are going to restore the // NameError anyway. - (void)PyObject_SetAttr(value, &_Py_ID(name), obj); + (void)PyObject_SetAttr(exc, &_Py_ID(name), obj); } } - PyErr_Restore(type, value, traceback); + PyErr_SetRaisedException(exc); } } diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index 1bf223348d28fa..749d8144bf7a23 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -2,7 +2,7 @@ #include "Python.h" #include "pycore_atomic.h" // _Py_atomic_int #include "pycore_ceval.h" // _PyEval_SignalReceived() -#include "pycore_pyerrors.h" // _PyErr_Fetch() +#include "pycore_pyerrors.h" // _PyErr_GetRaisedException() #include "pycore_pylifecycle.h" // _PyErr_Print() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_interp.h" // _Py_RunGC() @@ -870,10 +870,9 @@ _Py_FinishPendingCalls(PyThreadState *tstate) } if (make_pending_calls(tstate->interp) < 0) { - PyObject *exc, *val, *tb; - _PyErr_Fetch(tstate, &exc, &val, &tb); + PyObject *exc = _PyErr_GetRaisedException(tstate); PyErr_BadInternalCall(); - _PyErr_ChainExceptions(exc, val, tb); + _PyErr_ChainExceptions1(exc); _PyErr_Print(tstate); } } diff --git a/Python/compile.c b/Python/compile.c index 2f1130e62ee161..b14c637210ff55 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -1633,8 +1633,7 @@ static void compiler_exit_scope(struct compiler *c) { // Don't call PySequence_DelItem() with an exception raised - PyObject *exc_type, *exc_val, *exc_tb; - PyErr_Fetch(&exc_type, &exc_val, &exc_tb); + PyObject *exc = PyErr_GetRaisedException(); c->c_nestlevel--; compiler_unit_free(c->u); @@ -1655,7 +1654,7 @@ compiler_exit_scope(struct compiler *c) c->u = NULL; } - PyErr_Restore(exc_type, exc_val, exc_tb); + PyErr_SetRaisedException(exc); } /* Search if variable annotations are present statically in a block. */ diff --git a/Python/frame.c b/Python/frame.c index b562709ce10fee..c2c0be30113912 100644 --- a/Python/frame.c +++ b/Python/frame.c @@ -29,17 +29,14 @@ PyFrameObject * _PyFrame_MakeAndSetFrameObject(_PyInterpreterFrame *frame) { assert(frame->frame_obj == NULL); - PyObject *error_type, *error_value, *error_traceback; - PyErr_Fetch(&error_type, &error_value, &error_traceback); + PyObject *exc = PyErr_GetRaisedException(); PyFrameObject *f = _PyFrame_New_NoTrack(frame->f_code); if (f == NULL) { - Py_XDECREF(error_type); - Py_XDECREF(error_value); - Py_XDECREF(error_traceback); + Py_XDECREF(exc); return NULL; } - PyErr_Restore(error_type, error_value, error_traceback); + PyErr_SetRaisedException(exc); if (frame->frame_obj) { // GH-97002: How did we get into this horrible situation? Most likely, // allocating f triggered a GC collection, which ran some code that diff --git a/Python/initconfig.c b/Python/initconfig.c index deec805a6b1ca4..db7f11e17d6662 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -5,7 +5,7 @@ #include "pycore_interp.h" // _PyInterpreterState.runtime #include "pycore_long.h" // _PY_LONG_MAX_STR_DIGITS_THRESHOLD #include "pycore_pathconfig.h" // _Py_path_config -#include "pycore_pyerrors.h" // _PyErr_Fetch() +#include "pycore_pyerrors.h" // _PyErr_GetRaisedException() #include "pycore_pylifecycle.h" // _Py_PreInitializeFromConfig() #include "pycore_pymem.h" // _PyMem_SetDefaultAllocator() #include "pycore_pystate.h" // _PyThreadState_GET() diff --git a/Python/modsupport.c b/Python/modsupport.c index b9a10dc157e7e5..75698455c88166 100644 --- a/Python/modsupport.c +++ b/Python/modsupport.c @@ -93,16 +93,12 @@ static PyObject *do_mkvalue(const char**, va_list *, int); static void do_ignore(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int flags) { - PyObject *v; - Py_ssize_t i; assert(PyErr_Occurred()); - v = PyTuple_New(n); - for (i = 0; i < n; i++) { - PyObject *exception, *value, *tb, *w; - - PyErr_Fetch(&exception, &value, &tb); - w = do_mkvalue(p_format, p_va, flags); - PyErr_Restore(exception, value, tb); + PyObject *v = PyTuple_New(n); + for (Py_ssize_t i = 0; i < n; i++) { + PyObject *exc = PyErr_GetRaisedException(); + PyObject *w = do_mkvalue(p_format, p_va, flags); + PyErr_SetRaisedException(exc); if (w != NULL) { if (v != NULL) { PyTuple_SET_ITEM(v, i, w); diff --git a/Python/sysmodule.c b/Python/sysmodule.c index b69b803560924c..207abb964bcac9 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -23,7 +23,7 @@ Data members: #include "pycore_namespace.h" // _PyNamespace_New() #include "pycore_object.h" // _PyObject_IS_GC() #include "pycore_pathconfig.h" // _PyPathConfig_ComputeSysPath0() -#include "pycore_pyerrors.h" // _PyErr_Fetch() +#include "pycore_pyerrors.h" // _PyErr_GetRaisedException() #include "pycore_pylifecycle.h" // _PyErr_WriteUnraisableDefaultHook() #include "pycore_pymath.h" // _PY_SHORT_FLOAT_REPR #include "pycore_pymem.h" // _PyMem_SetDefaultAllocator() @@ -89,12 +89,11 @@ PySys_GetObject(const char *name) { PyThreadState *tstate = _PyThreadState_GET(); - PyObject *exc_type, *exc_value, *exc_tb; - _PyErr_Fetch(tstate, &exc_type, &exc_value, &exc_tb); + PyObject *exc = _PyErr_GetRaisedException(tstate); PyObject *value = _PySys_GetObject(tstate->interp, name); /* XXX Suppress a new exception if it was raised and restore * the old one. */ - _PyErr_Restore(tstate, exc_type, exc_value, exc_tb); + _PyErr_SetRaisedException(tstate, exc); return value; } @@ -203,8 +202,8 @@ sys_audit_tstate(PyThreadState *ts, const char *event, int dtrace = PyDTrace_AUDIT_ENABLED(); - PyObject *exc_type, *exc_value, *exc_tb; - _PyErr_Fetch(ts, &exc_type, &exc_value, &exc_tb); + + PyObject *exc = _PyErr_GetRaisedException(ts); /* Initialize event args now */ if (argFormat && argFormat[0]) { @@ -287,13 +286,11 @@ sys_audit_tstate(PyThreadState *ts, const char *event, Py_XDECREF(eventArgs); if (!res) { - _PyErr_Restore(ts, exc_type, exc_value, exc_tb); + _PyErr_SetRaisedException(ts, exc); } else { assert(_PyErr_Occurred(ts)); - Py_XDECREF(exc_type); - Py_XDECREF(exc_value); - Py_XDECREF(exc_tb); + Py_XDECREF(exc); } return res; @@ -3661,12 +3658,11 @@ static void sys_write(PyObject *key, FILE *fp, const char *format, va_list va) { PyObject *file; - PyObject *error_type, *error_value, *error_traceback; char buffer[1001]; int written; PyThreadState *tstate = _PyThreadState_GET(); - _PyErr_Fetch(tstate, &error_type, &error_value, &error_traceback); + PyObject *exc = _PyErr_GetRaisedException(tstate); file = _PySys_GetAttr(tstate, key); written = PyOS_vsnprintf(buffer, sizeof(buffer), format, va); if (sys_pyfile_write(buffer, file) != 0) { @@ -3678,7 +3674,7 @@ sys_write(PyObject *key, FILE *fp, const char *format, va_list va) if (sys_pyfile_write(truncated, file) != 0) fputs(truncated, fp); } - _PyErr_Restore(tstate, error_type, error_value, error_traceback); + _PyErr_SetRaisedException(tstate, exc); } void @@ -3708,7 +3704,7 @@ sys_format(PyObject *key, FILE *fp, const char *format, va_list va) const char *utf8; PyThreadState *tstate = _PyThreadState_GET(); - PyObject *error = _PyErr_GetRaisedException(tstate); + PyObject *exc = _PyErr_GetRaisedException(tstate); file = _PySys_GetAttr(tstate, key); message = PyUnicode_FromFormatV(format, va); if (message != NULL) { @@ -3720,7 +3716,7 @@ sys_format(PyObject *key, FILE *fp, const char *format, va_list va) } Py_DECREF(message); } - _PyErr_SetRaisedException(tstate, error); + _PyErr_SetRaisedException(tstate, exc); } void From e1a90ec75cd0f5cab21b467a73404081eaa1077f Mon Sep 17 00:00:00 2001 From: Ee Durbin Date: Tue, 28 Feb 2023 08:23:39 -0500 Subject: [PATCH 15/40] Migrate to new PSF mailgun account (#102284) Our legacy mailgun account is associated with a parent rackspace account that I am trying to decomission. The necessary secret has been added to the GitHub Actions Secrets already, so this is ready to go on approval. --- .github/workflows/new-bugs-announce-notifier.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/new-bugs-announce-notifier.yml b/.github/workflows/new-bugs-announce-notifier.yml index b2b63472d83421..b2a76ef7d36153 100644 --- a/.github/workflows/new-bugs-announce-notifier.yml +++ b/.github/workflows/new-bugs-announce-notifier.yml @@ -19,13 +19,13 @@ jobs: - name: Send notification uses: actions/github-script@v6 env: - MAILGUN_API_KEY: ${{ secrets.PSF_MAILGUN_KEY }} + MAILGUN_API_KEY: ${{ secrets.MAILGUN_PYTHON_ORG_MAILGUN_KEY }} with: script: | const Mailgun = require("mailgun.js"); const formData = require('form-data'); const mailgun = new Mailgun(formData); - const DOMAIN = "mg.python.org"; + const DOMAIN = "mailgun.python.org"; const mg = mailgun.client({username: 'api', key: process.env.MAILGUN_API_KEY}); github.rest.issues.get({ issue_number: context.issue.number, @@ -44,7 +44,7 @@ jobs: }; const data = { - from: "CPython Issues ", + from: "CPython Issues ", to: "new-bugs-announce@python.org", subject: `[Issue ${issue.data.number}] ${issue.data.title}`, template: "new-github-issue", From b5ff38243355c06d665ba8245d461a0d82504581 Mon Sep 17 00:00:00 2001 From: Guido van Rossum Date: Tue, 28 Feb 2023 08:49:35 -0800 Subject: [PATCH 16/40] GH-102305: Expand some macros in generated_cases.c.h (#102309) * Emit straight stack_pointer[-i] instead of PEEK(i), POKE(i, ...) * Expand JUMPBY() and NEXTOPARG(), and fix a perf bug --- Python/generated_cases.c.h | 845 ++++++++++++------------ Tools/cases_generator/generate_cases.py | 48 +- 2 files changed, 441 insertions(+), 452 deletions(-) diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 271ba26f489521..f59f7c17451c17 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -22,7 +22,7 @@ if (value == NULL) goto unbound_local_error; Py_INCREF(value); STACK_GROW(1); - POKE(1, value); + stack_pointer[-1] = value; DISPATCH(); } @@ -32,7 +32,7 @@ if (value == NULL) goto unbound_local_error; Py_INCREF(value); STACK_GROW(1); - POKE(1, value); + stack_pointer[-1] = value; DISPATCH(); } @@ -42,7 +42,7 @@ assert(value != NULL); Py_INCREF(value); STACK_GROW(1); - POKE(1, value); + stack_pointer[-1] = value; DISPATCH(); } @@ -52,12 +52,12 @@ value = GETITEM(consts, oparg); Py_INCREF(value); STACK_GROW(1); - POKE(1, value); + stack_pointer[-1] = value; DISPATCH(); } TARGET(STORE_FAST) { - PyObject *value = PEEK(1); + PyObject *value = stack_pointer[-1]; SETLOCAL(oparg, value); STACK_SHRINK(1); DISPATCH(); @@ -73,8 +73,7 @@ Py_INCREF(value); _tmp_2 = value; } - NEXTOPARG(); - JUMPBY(1); + oparg = (next_instr++)->op.arg; { PyObject *value; value = GETLOCAL(oparg); @@ -83,8 +82,8 @@ _tmp_1 = value; } STACK_GROW(2); - POKE(1, _tmp_1); - POKE(2, _tmp_2); + stack_pointer[-1] = _tmp_1; + stack_pointer[-2] = _tmp_2; DISPATCH(); } @@ -98,8 +97,7 @@ Py_INCREF(value); _tmp_2 = value; } - NEXTOPARG(); - JUMPBY(1); + oparg = (next_instr++)->op.arg; { PyObject *value; value = GETITEM(consts, oparg); @@ -107,19 +105,18 @@ _tmp_1 = value; } STACK_GROW(2); - POKE(1, _tmp_1); - POKE(2, _tmp_2); + stack_pointer[-1] = _tmp_1; + stack_pointer[-2] = _tmp_2; DISPATCH(); } TARGET(STORE_FAST__LOAD_FAST) { - PyObject *_tmp_1 = PEEK(1); + PyObject *_tmp_1 = stack_pointer[-1]; { PyObject *value = _tmp_1; SETLOCAL(oparg, value); } - NEXTOPARG(); - JUMPBY(1); + oparg = (next_instr++)->op.arg; { PyObject *value; value = GETLOCAL(oparg); @@ -127,19 +124,18 @@ Py_INCREF(value); _tmp_1 = value; } - POKE(1, _tmp_1); + stack_pointer[-1] = _tmp_1; DISPATCH(); } TARGET(STORE_FAST__STORE_FAST) { - PyObject *_tmp_1 = PEEK(1); - PyObject *_tmp_2 = PEEK(2); + PyObject *_tmp_1 = stack_pointer[-1]; + PyObject *_tmp_2 = stack_pointer[-2]; { PyObject *value = _tmp_1; SETLOCAL(oparg, value); } - NEXTOPARG(); - JUMPBY(1); + oparg = (next_instr++)->op.arg; { PyObject *value = _tmp_2; SETLOCAL(oparg, value); @@ -157,8 +153,7 @@ Py_INCREF(value); _tmp_2 = value; } - NEXTOPARG(); - JUMPBY(1); + oparg = (next_instr++)->op.arg; { PyObject *value; value = GETLOCAL(oparg); @@ -167,13 +162,13 @@ _tmp_1 = value; } STACK_GROW(2); - POKE(1, _tmp_1); - POKE(2, _tmp_2); + stack_pointer[-1] = _tmp_1; + stack_pointer[-2] = _tmp_2; DISPATCH(); } TARGET(POP_TOP) { - PyObject *value = PEEK(1); + PyObject *value = stack_pointer[-1]; Py_DECREF(value); STACK_SHRINK(1); DISPATCH(); @@ -183,13 +178,13 @@ PyObject *res; res = NULL; STACK_GROW(1); - POKE(1, res); + stack_pointer[-1] = res; DISPATCH(); } TARGET(END_FOR) { - PyObject *_tmp_1 = PEEK(1); - PyObject *_tmp_2 = PEEK(2); + PyObject *_tmp_1 = stack_pointer[-1]; + PyObject *_tmp_2 = stack_pointer[-2]; { PyObject *value = _tmp_1; Py_DECREF(value); @@ -203,17 +198,17 @@ } TARGET(UNARY_NEGATIVE) { - PyObject *value = PEEK(1); + PyObject *value = stack_pointer[-1]; PyObject *res; res = PyNumber_Negative(value); Py_DECREF(value); if (res == NULL) goto pop_1_error; - POKE(1, res); + stack_pointer[-1] = res; DISPATCH(); } TARGET(UNARY_NOT) { - PyObject *value = PEEK(1); + PyObject *value = stack_pointer[-1]; PyObject *res; int err = PyObject_IsTrue(value); Py_DECREF(value); @@ -225,23 +220,23 @@ res = Py_False; } Py_INCREF(res); - POKE(1, res); + stack_pointer[-1] = res; DISPATCH(); } TARGET(UNARY_INVERT) { - PyObject *value = PEEK(1); + PyObject *value = stack_pointer[-1]; PyObject *res; res = PyNumber_Invert(value); Py_DECREF(value); if (res == NULL) goto pop_1_error; - POKE(1, res); + stack_pointer[-1] = res; DISPATCH(); } TARGET(BINARY_OP_MULTIPLY_INT) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; PyObject *prod; assert(cframe.use_tracing == 0); DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); @@ -252,14 +247,14 @@ _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (prod == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, prod); - JUMPBY(1); + stack_pointer[-1] = prod; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_MULTIPLY_FLOAT) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; PyObject *prod; assert(cframe.use_tracing == 0); DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); @@ -272,14 +267,14 @@ _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); if (prod == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, prod); - JUMPBY(1); + stack_pointer[-1] = prod; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_SUBTRACT_INT) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; PyObject *sub; assert(cframe.use_tracing == 0); DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); @@ -290,14 +285,14 @@ _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (sub == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, sub); - JUMPBY(1); + stack_pointer[-1] = sub; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_SUBTRACT_FLOAT) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; PyObject *sub; assert(cframe.use_tracing == 0); DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); @@ -309,14 +304,14 @@ _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); if (sub == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, sub); - JUMPBY(1); + stack_pointer[-1] = sub; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_ADD_UNICODE) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; PyObject *res; assert(cframe.use_tracing == 0); DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); @@ -327,14 +322,14 @@ _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); if (res == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, res); - JUMPBY(1); + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_INPLACE_ADD_UNICODE) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; assert(cframe.use_tracing == 0); DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP); @@ -367,8 +362,8 @@ } TARGET(BINARY_OP_ADD_FLOAT) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; PyObject *sum; assert(cframe.use_tracing == 0); DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); @@ -381,14 +376,14 @@ _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc); if (sum == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, sum); - JUMPBY(1); + stack_pointer[-1] = sum; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_ADD_INT) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; PyObject *sum; assert(cframe.use_tracing == 0); DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); @@ -399,16 +394,16 @@ _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (sum == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, sum); - JUMPBY(1); + stack_pointer[-1] = sum; + next_instr += 1; DISPATCH(); } TARGET(BINARY_SUBSCR) { PREDICTED(BINARY_SUBSCR); static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 4, "incorrect cache size"); - PyObject *sub = PEEK(1); - PyObject *container = PEEK(2); + PyObject *sub = stack_pointer[-1]; + PyObject *container = stack_pointer[-2]; PyObject *res; #if ENABLE_SPECIALIZATION _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; @@ -426,15 +421,15 @@ Py_DECREF(sub); if (res == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; DISPATCH(); } TARGET(BINARY_SLICE) { - PyObject *stop = PEEK(1); - PyObject *start = PEEK(2); - PyObject *container = PEEK(3); + PyObject *stop = stack_pointer[-1]; + PyObject *start = stack_pointer[-2]; + PyObject *container = stack_pointer[-3]; PyObject *res; PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); // Can't use ERROR_IF() here, because we haven't @@ -449,15 +444,15 @@ Py_DECREF(container); if (res == NULL) goto pop_3_error; STACK_SHRINK(2); - POKE(1, res); + stack_pointer[-1] = res; DISPATCH(); } TARGET(STORE_SLICE) { - PyObject *stop = PEEK(1); - PyObject *start = PEEK(2); - PyObject *container = PEEK(3); - PyObject *v = PEEK(4); + PyObject *stop = stack_pointer[-1]; + PyObject *start = stack_pointer[-2]; + PyObject *container = stack_pointer[-3]; + PyObject *v = stack_pointer[-4]; PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); int err; if (slice == NULL) { @@ -475,8 +470,8 @@ } TARGET(BINARY_SUBSCR_LIST_INT) { - PyObject *sub = PEEK(1); - PyObject *list = PEEK(2); + PyObject *sub = stack_pointer[-1]; + PyObject *list = stack_pointer[-2]; PyObject *res; assert(cframe.use_tracing == 0); DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); @@ -494,14 +489,14 @@ _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); Py_DECREF(list); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; DISPATCH(); } TARGET(BINARY_SUBSCR_TUPLE_INT) { - PyObject *sub = PEEK(1); - PyObject *tuple = PEEK(2); + PyObject *sub = stack_pointer[-1]; + PyObject *tuple = stack_pointer[-2]; PyObject *res; assert(cframe.use_tracing == 0); DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); @@ -519,14 +514,14 @@ _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); Py_DECREF(tuple); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; DISPATCH(); } TARGET(BINARY_SUBSCR_DICT) { - PyObject *sub = PEEK(1); - PyObject *dict = PEEK(2); + PyObject *sub = stack_pointer[-1]; + PyObject *dict = stack_pointer[-2]; PyObject *res; assert(cframe.use_tracing == 0); DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); @@ -544,14 +539,14 @@ Py_DECREF(dict); Py_DECREF(sub); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; DISPATCH(); } TARGET(BINARY_SUBSCR_GETITEM) { - PyObject *sub = PEEK(1); - PyObject *container = PEEK(2); + PyObject *sub = stack_pointer[-1]; + PyObject *container = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t func_version = read_u16(&next_instr[3].cache); PyTypeObject *tp = Py_TYPE(container); @@ -575,8 +570,8 @@ } TARGET(LIST_APPEND) { - PyObject *v = PEEK(1); - PyObject *list = PEEK(2 + (oparg-1)); + PyObject *v = stack_pointer[-1]; + PyObject *list = stack_pointer[-(2 + (oparg-1))]; if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) goto pop_1_error; STACK_SHRINK(1); PREDICT(JUMP_BACKWARD); @@ -584,8 +579,8 @@ } TARGET(SET_ADD) { - PyObject *v = PEEK(1); - PyObject *set = PEEK(2 + (oparg-1)); + PyObject *v = stack_pointer[-1]; + PyObject *set = stack_pointer[-(2 + (oparg-1))]; int err = PySet_Add(set, v); Py_DECREF(v); if (err) goto pop_1_error; @@ -597,9 +592,9 @@ TARGET(STORE_SUBSCR) { PREDICTED(STORE_SUBSCR); static_assert(INLINE_CACHE_ENTRIES_STORE_SUBSCR == 1, "incorrect cache size"); - PyObject *sub = PEEK(1); - PyObject *container = PEEK(2); - PyObject *v = PEEK(3); + PyObject *sub = stack_pointer[-1]; + PyObject *container = stack_pointer[-2]; + PyObject *v = stack_pointer[-3]; uint16_t counter = read_u16(&next_instr[0].cache); #if ENABLE_SPECIALIZATION if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { @@ -621,14 +616,14 @@ Py_DECREF(sub); if (err) goto pop_3_error; STACK_SHRINK(3); - JUMPBY(1); + next_instr += 1; DISPATCH(); } TARGET(STORE_SUBSCR_LIST_INT) { - PyObject *sub = PEEK(1); - PyObject *list = PEEK(2); - PyObject *value = PEEK(3); + PyObject *sub = stack_pointer[-1]; + PyObject *list = stack_pointer[-2]; + PyObject *value = stack_pointer[-3]; assert(cframe.use_tracing == 0); DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); @@ -647,14 +642,14 @@ _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); Py_DECREF(list); STACK_SHRINK(3); - JUMPBY(1); + next_instr += 1; DISPATCH(); } TARGET(STORE_SUBSCR_DICT) { - PyObject *sub = PEEK(1); - PyObject *dict = PEEK(2); - PyObject *value = PEEK(3); + PyObject *sub = stack_pointer[-1]; + PyObject *dict = stack_pointer[-2]; + PyObject *value = stack_pointer[-3]; assert(cframe.use_tracing == 0); DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); STAT_INC(STORE_SUBSCR, hit); @@ -662,13 +657,13 @@ Py_DECREF(dict); if (err) goto pop_3_error; STACK_SHRINK(3); - JUMPBY(1); + next_instr += 1; DISPATCH(); } TARGET(DELETE_SUBSCR) { - PyObject *sub = PEEK(1); - PyObject *container = PEEK(2); + PyObject *sub = stack_pointer[-1]; + PyObject *container = stack_pointer[-2]; /* del container[sub] */ int err = PyObject_DelItem(container, sub); Py_DECREF(container); @@ -679,19 +674,19 @@ } TARGET(CALL_INTRINSIC_1) { - PyObject *value = PEEK(1); + PyObject *value = stack_pointer[-1]; PyObject *res; assert(oparg <= MAX_INTRINSIC_1); res = _PyIntrinsics_UnaryFunctions[oparg](tstate, value); Py_DECREF(value); if (res == NULL) goto pop_1_error; - POKE(1, res); + stack_pointer[-1] = res; DISPATCH(); } TARGET(CALL_INTRINSIC_2) { - PyObject *value1 = PEEK(1); - PyObject *value2 = PEEK(2); + PyObject *value1 = stack_pointer[-1]; + PyObject *value2 = stack_pointer[-2]; PyObject *res; assert(oparg <= MAX_INTRINSIC_2); res = _PyIntrinsics_BinaryFunctions[oparg](tstate, value2, value1); @@ -699,12 +694,12 @@ Py_DECREF(value1); if (res == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, res); + stack_pointer[-1] = res; DISPATCH(); } TARGET(RAISE_VARARGS) { - PyObject **args = &PEEK(oparg); + PyObject **args = (stack_pointer - oparg); PyObject *cause = NULL, *exc = NULL; switch (oparg) { case 2: @@ -725,7 +720,7 @@ } TARGET(INTERPRETER_EXIT) { - PyObject *retval = PEEK(1); + PyObject *retval = stack_pointer[-1]; assert(frame == &entry_frame); assert(_PyFrame_IsIncomplete(frame)); STACK_SHRINK(1); // Since we're not going to DISPATCH() @@ -740,7 +735,7 @@ } TARGET(RETURN_VALUE) { - PyObject *retval = PEEK(1); + PyObject *retval = stack_pointer[-1]; STACK_SHRINK(1); assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); @@ -774,7 +769,7 @@ } TARGET(GET_AITER) { - PyObject *obj = PEEK(1); + PyObject *obj = stack_pointer[-1]; PyObject *iter; unaryfunc getter = NULL; PyTypeObject *type = Py_TYPE(obj); @@ -806,12 +801,12 @@ Py_DECREF(iter); if (true) goto pop_1_error; } - POKE(1, iter); + stack_pointer[-1] = iter; DISPATCH(); } TARGET(GET_ANEXT) { - PyObject *aiter = PEEK(1); + PyObject *aiter = stack_pointer[-1]; PyObject *awaitable; unaryfunc getter = NULL; PyObject *next_iter = NULL; @@ -857,14 +852,14 @@ } STACK_GROW(1); - POKE(1, awaitable); + stack_pointer[-1] = awaitable; PREDICT(LOAD_CONST); DISPATCH(); } TARGET(GET_AWAITABLE) { PREDICTED(GET_AWAITABLE); - PyObject *iterable = PEEK(1); + PyObject *iterable = stack_pointer[-1]; PyObject *iter; iter = _PyCoro_GetAwaitableIter(iterable); @@ -890,15 +885,15 @@ if (iter == NULL) goto pop_1_error; - POKE(1, iter); + stack_pointer[-1] = iter; PREDICT(LOAD_CONST); DISPATCH(); } TARGET(SEND) { PREDICTED(SEND); - PyObject *v = PEEK(1); - PyObject *receiver = PEEK(2); + PyObject *v = stack_pointer[-1]; + PyObject *receiver = stack_pointer[-2]; PyObject *retval; #if ENABLE_SPECIALIZATION _PySendCache *cache = (_PySendCache *)next_instr; @@ -935,14 +930,14 @@ assert(retval != NULL); } Py_DECREF(v); - POKE(1, retval); - JUMPBY(1); + stack_pointer[-1] = retval; + next_instr += 1; DISPATCH(); } TARGET(SEND_GEN) { - PyObject *v = PEEK(1); - PyObject *receiver = PEEK(2); + PyObject *v = stack_pointer[-1]; + PyObject *receiver = stack_pointer[-2]; assert(cframe.use_tracing == 0); PyGenObject *gen = (PyGenObject *)receiver; DEOPT_IF(Py_TYPE(gen) != &PyGen_Type && @@ -961,7 +956,7 @@ } TARGET(YIELD_VALUE) { - PyObject *retval = PEEK(1); + PyObject *retval = stack_pointer[-1]; // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. @@ -983,7 +978,7 @@ } TARGET(POP_EXCEPT) { - PyObject *exc_value = PEEK(1); + PyObject *exc_value = stack_pointer[-1]; _PyErr_StackItem *exc_info = tstate->exc_info; Py_XSETREF(exc_info->exc_value, exc_value); STACK_SHRINK(1); @@ -991,8 +986,8 @@ } TARGET(RERAISE) { - PyObject *exc = PEEK(1); - PyObject **values = &PEEK(1 + oparg); + PyObject *exc = stack_pointer[-1]; + PyObject **values = (stack_pointer - (1 + oparg)); assert(oparg >= 0 && oparg <= 2); if (oparg) { PyObject *lasti = values[0]; @@ -1015,8 +1010,8 @@ } TARGET(END_ASYNC_FOR) { - PyObject *exc = PEEK(1); - PyObject *awaitable = PEEK(2); + PyObject *exc = stack_pointer[-1]; + PyObject *awaitable = stack_pointer[-2]; assert(exc && PyExceptionInstance_Check(exc)); if (PyErr_GivenExceptionMatches(exc, PyExc_StopAsyncIteration)) { Py_DECREF(awaitable); @@ -1034,9 +1029,9 @@ } TARGET(CLEANUP_THROW) { - PyObject *exc_value = PEEK(1); - PyObject *last_sent_val = PEEK(2); - PyObject *sub_iter = PEEK(3); + PyObject *exc_value = stack_pointer[-1]; + PyObject *last_sent_val = stack_pointer[-2]; + PyObject *sub_iter = stack_pointer[-3]; PyObject *none; PyObject *value; assert(throwflag); @@ -1053,8 +1048,8 @@ goto exception_unwind; } STACK_SHRINK(1); - POKE(1, value); - POKE(2, none); + stack_pointer[-1] = value; + stack_pointer[-2] = none; DISPATCH(); } @@ -1062,7 +1057,7 @@ PyObject *value; value = Py_NewRef(PyExc_AssertionError); STACK_GROW(1); - POKE(1, value); + stack_pointer[-1] = value; DISPATCH(); } @@ -1090,12 +1085,12 @@ } } STACK_GROW(1); - POKE(1, bc); + stack_pointer[-1] = bc; DISPATCH(); } TARGET(STORE_NAME) { - PyObject *v = PEEK(1); + PyObject *v = stack_pointer[-1]; PyObject *name = GETITEM(names, oparg); PyObject *ns = LOCALS(); int err; @@ -1138,7 +1133,7 @@ TARGET(UNPACK_SEQUENCE) { PREDICTED(UNPACK_SEQUENCE); static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size"); - PyObject *seq = PEEK(1); + PyObject *seq = stack_pointer[-1]; #if ENABLE_SPECIALIZATION _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1156,12 +1151,12 @@ if (res == 0) goto pop_1_error; STACK_SHRINK(1); STACK_GROW(oparg); - JUMPBY(1); + next_instr += 1; DISPATCH(); } TARGET(UNPACK_SEQUENCE_TWO_TUPLE) { - PyObject *seq = PEEK(1); + PyObject *seq = stack_pointer[-1]; PyObject **values = stack_pointer - (1); DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE); @@ -1172,12 +1167,12 @@ Py_DECREF(seq); STACK_SHRINK(1); STACK_GROW(oparg); - JUMPBY(1); + next_instr += 1; DISPATCH(); } TARGET(UNPACK_SEQUENCE_TUPLE) { - PyObject *seq = PEEK(1); + PyObject *seq = stack_pointer[-1]; PyObject **values = stack_pointer - (1); DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); @@ -1189,12 +1184,12 @@ Py_DECREF(seq); STACK_SHRINK(1); STACK_GROW(oparg); - JUMPBY(1); + next_instr += 1; DISPATCH(); } TARGET(UNPACK_SEQUENCE_LIST) { - PyObject *seq = PEEK(1); + PyObject *seq = stack_pointer[-1]; PyObject **values = stack_pointer - (1); DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); @@ -1206,12 +1201,12 @@ Py_DECREF(seq); STACK_SHRINK(1); STACK_GROW(oparg); - JUMPBY(1); + next_instr += 1; DISPATCH(); } TARGET(UNPACK_EX) { - PyObject *seq = PEEK(1); + PyObject *seq = stack_pointer[-1]; int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); PyObject **top = stack_pointer + totalargs - 1; int res = unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top); @@ -1224,8 +1219,8 @@ TARGET(STORE_ATTR) { PREDICTED(STORE_ATTR); static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); - PyObject *owner = PEEK(1); - PyObject *v = PEEK(2); + PyObject *owner = stack_pointer[-1]; + PyObject *v = stack_pointer[-2]; uint16_t counter = read_u16(&next_instr[0].cache); #if ENABLE_SPECIALIZATION if (ADAPTIVE_COUNTER_IS_ZERO(counter)) { @@ -1247,12 +1242,12 @@ Py_DECREF(owner); if (err) goto pop_2_error; STACK_SHRINK(2); - JUMPBY(4); + next_instr += 4; DISPATCH(); } TARGET(DELETE_ATTR) { - PyObject *owner = PEEK(1); + PyObject *owner = stack_pointer[-1]; PyObject *name = GETITEM(names, oparg); int err = PyObject_SetAttr(owner, name, (PyObject *)NULL); Py_DECREF(owner); @@ -1262,7 +1257,7 @@ } TARGET(STORE_GLOBAL) { - PyObject *v = PEEK(1); + PyObject *v = stack_pointer[-1]; PyObject *name = GETITEM(names, oparg); int err = PyDict_SetItem(GLOBALS(), name, v); Py_DECREF(v); @@ -1347,7 +1342,7 @@ } } STACK_GROW(1); - POKE(1, v); + stack_pointer[-1] = v; DISPATCH(); } @@ -1410,9 +1405,9 @@ null = NULL; STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, v); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), null); } - JUMPBY(5); + stack_pointer[-1] = v; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = null; } + next_instr += 5; DISPATCH(); } @@ -1434,9 +1429,9 @@ null = NULL; STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, res); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), null); } - JUMPBY(5); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = null; } + next_instr += 5; DISPATCH(); } @@ -1462,9 +1457,9 @@ null = NULL; STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, res); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), null); } - JUMPBY(5); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = null; } + next_instr += 5; DISPATCH(); } @@ -1535,7 +1530,7 @@ Py_INCREF(value); } STACK_GROW(1); - POKE(1, value); + stack_pointer[-1] = value; DISPATCH(); } @@ -1549,12 +1544,12 @@ } Py_INCREF(value); STACK_GROW(1); - POKE(1, value); + stack_pointer[-1] = value; DISPATCH(); } TARGET(STORE_DEREF) { - PyObject *v = PEEK(1); + PyObject *v = stack_pointer[-1]; PyObject *cell = GETLOCAL(oparg); PyObject *oldobj = PyCell_GET(cell); PyCell_SET(cell, v); @@ -1578,7 +1573,7 @@ } TARGET(BUILD_STRING) { - PyObject **pieces = &PEEK(oparg); + PyObject **pieces = (stack_pointer - oparg); PyObject *str; str = _PyUnicode_JoinArray(&_Py_STR(empty), pieces, oparg); for (int i = 0; i < oparg; i++) { @@ -1587,35 +1582,35 @@ if (str == NULL) { STACK_SHRINK(oparg); goto error; } STACK_SHRINK(oparg); STACK_GROW(1); - POKE(1, str); + stack_pointer[-1] = str; DISPATCH(); } TARGET(BUILD_TUPLE) { - PyObject **values = &PEEK(oparg); + PyObject **values = (stack_pointer - oparg); PyObject *tup; tup = _PyTuple_FromArraySteal(values, oparg); if (tup == NULL) { STACK_SHRINK(oparg); goto error; } STACK_SHRINK(oparg); STACK_GROW(1); - POKE(1, tup); + stack_pointer[-1] = tup; DISPATCH(); } TARGET(BUILD_LIST) { - PyObject **values = &PEEK(oparg); + PyObject **values = (stack_pointer - oparg); PyObject *list; list = _PyList_FromArraySteal(values, oparg); if (list == NULL) { STACK_SHRINK(oparg); goto error; } STACK_SHRINK(oparg); STACK_GROW(1); - POKE(1, list); + stack_pointer[-1] = list; DISPATCH(); } TARGET(LIST_EXTEND) { - PyObject *iterable = PEEK(1); - PyObject *list = PEEK(2 + (oparg-1)); + PyObject *iterable = stack_pointer[-1]; + PyObject *list = stack_pointer[-(2 + (oparg-1))]; PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); if (none_val == NULL) { if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) && @@ -1636,8 +1631,8 @@ } TARGET(SET_UPDATE) { - PyObject *iterable = PEEK(1); - PyObject *set = PEEK(2 + (oparg-1)); + PyObject *iterable = stack_pointer[-1]; + PyObject *set = stack_pointer[-(2 + (oparg-1))]; int err = _PySet_Update(set, iterable); Py_DECREF(iterable); if (err < 0) goto pop_1_error; @@ -1646,7 +1641,7 @@ } TARGET(BUILD_SET) { - PyObject **values = &PEEK(oparg); + PyObject **values = (stack_pointer - oparg); PyObject *set; set = PySet_New(NULL); if (set == NULL) @@ -1664,12 +1659,12 @@ } STACK_SHRINK(oparg); STACK_GROW(1); - POKE(1, set); + stack_pointer[-1] = set; DISPATCH(); } TARGET(BUILD_MAP) { - PyObject **values = &PEEK(oparg*2); + PyObject **values = (stack_pointer - oparg*2); PyObject *map; map = _PyDict_FromItems( values, 2, @@ -1685,7 +1680,7 @@ if (map == NULL) { STACK_SHRINK(oparg*2); goto error; } STACK_SHRINK(oparg*2); STACK_GROW(1); - POKE(1, map); + stack_pointer[-1] = map; DISPATCH(); } @@ -1733,8 +1728,8 @@ } TARGET(BUILD_CONST_KEY_MAP) { - PyObject *keys = PEEK(1); - PyObject **values = &PEEK(1 + oparg); + PyObject *keys = stack_pointer[-1]; + PyObject **values = (stack_pointer - (1 + oparg)); PyObject *map; if (!PyTuple_CheckExact(keys) || PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { @@ -1751,12 +1746,12 @@ } if (map == NULL) { STACK_SHRINK(oparg); goto pop_1_error; } STACK_SHRINK(oparg); - POKE(1, map); + stack_pointer[-1] = map; DISPATCH(); } TARGET(DICT_UPDATE) { - PyObject *update = PEEK(1); + PyObject *update = stack_pointer[-1]; PyObject *dict = PEEK(oparg + 1); // update is still on the stack if (PyDict_Update(dict, update) < 0) { if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { @@ -1773,7 +1768,7 @@ } TARGET(DICT_MERGE) { - PyObject *update = PEEK(1); + PyObject *update = stack_pointer[-1]; PyObject *dict = PEEK(oparg + 1); // update is still on the stack if (_PyDict_MergeEx(dict, update, 2) < 0) { @@ -1788,8 +1783,8 @@ } TARGET(MAP_ADD) { - PyObject *value = PEEK(1); - PyObject *key = PEEK(2); + PyObject *value = stack_pointer[-1]; + PyObject *key = stack_pointer[-2]; PyObject *dict = PEEK(oparg + 2); // key, value are still on the stack assert(PyDict_CheckExact(dict)); /* dict[key] = value */ @@ -1803,7 +1798,7 @@ TARGET(LOAD_ATTR) { PREDICTED(LOAD_ATTR); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); - PyObject *owner = PEEK(1); + PyObject *owner = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; #if ENABLE_SPECIALIZATION @@ -1853,14 +1848,14 @@ if (res == NULL) goto pop_1_error; } STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, res); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), res2); } - JUMPBY(9); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_INSTANCE_VALUE) { - PyObject *owner = PEEK(1); + PyObject *owner = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); @@ -1880,14 +1875,14 @@ res2 = NULL; Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, res); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), res2); } - JUMPBY(9); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_MODULE) { - PyObject *owner = PEEK(1); + PyObject *owner = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); @@ -1907,14 +1902,14 @@ res2 = NULL; Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, res); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), res2); } - JUMPBY(9); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_WITH_HINT) { - PyObject *owner = PEEK(1); + PyObject *owner = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); @@ -1948,14 +1943,14 @@ res2 = NULL; Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, res); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), res2); } - JUMPBY(9); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_SLOT) { - PyObject *owner = PEEK(1); + PyObject *owner = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); @@ -1972,14 +1967,14 @@ res2 = NULL; Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, res); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), res2); } - JUMPBY(9); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_CLASS) { - PyObject *cls = PEEK(1); + PyObject *cls = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); @@ -1998,14 +1993,14 @@ Py_INCREF(res); Py_DECREF(cls); STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, res); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), res2); } - JUMPBY(9); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_PROPERTY) { - PyObject *owner = PEEK(1); + PyObject *owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t func_version = read_u32(&next_instr[3].cache); PyObject *fget = read_obj(&next_instr[5].cache); @@ -2035,7 +2030,7 @@ } TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { - PyObject *owner = PEEK(1); + PyObject *owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t func_version = read_u32(&next_instr[3].cache); PyObject *getattribute = read_obj(&next_instr[5].cache); @@ -2067,8 +2062,8 @@ } TARGET(STORE_ATTR_INSTANCE_VALUE) { - PyObject *owner = PEEK(1); - PyObject *value = PEEK(2); + PyObject *owner = stack_pointer[-1]; + PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); assert(cframe.use_tracing == 0); @@ -2090,13 +2085,13 @@ } Py_DECREF(owner); STACK_SHRINK(2); - JUMPBY(4); + next_instr += 4; DISPATCH(); } TARGET(STORE_ATTR_WITH_HINT) { - PyObject *owner = PEEK(1); - PyObject *value = PEEK(2); + PyObject *owner = stack_pointer[-1]; + PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t hint = read_u16(&next_instr[3].cache); assert(cframe.use_tracing == 0); @@ -2139,13 +2134,13 @@ dict->ma_version_tag = new_version; Py_DECREF(owner); STACK_SHRINK(2); - JUMPBY(4); + next_instr += 4; DISPATCH(); } TARGET(STORE_ATTR_SLOT) { - PyObject *owner = PEEK(1); - PyObject *value = PEEK(2); + PyObject *owner = stack_pointer[-1]; + PyObject *value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); assert(cframe.use_tracing == 0); @@ -2159,13 +2154,13 @@ Py_XDECREF(old_value); Py_DECREF(owner); STACK_SHRINK(2); - JUMPBY(4); + next_instr += 4; DISPATCH(); } TARGET(COMPARE_OP) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; PyObject *res; STAT_INC(COMPARE_OP, deferred); assert((oparg >> 4) <= Py_GE); @@ -2174,15 +2169,15 @@ Py_DECREF(right); if (res == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, res); - JUMPBY(1); + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } TARGET(COMPARE_AND_BRANCH) { PREDICTED(COMPARE_AND_BRANCH); - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; #if ENABLE_SPECIALIZATION _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -2210,13 +2205,13 @@ JUMPBY(offset); } STACK_SHRINK(2); - JUMPBY(2); + next_instr += 2; DISPATCH(); } TARGET(COMPARE_AND_BRANCH_FLOAT) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; assert(cframe.use_tracing == 0); DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_AND_BRANCH); DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_AND_BRANCH); @@ -2232,13 +2227,13 @@ JUMPBY(offset); } STACK_SHRINK(2); - JUMPBY(2); + next_instr += 2; DISPATCH(); } TARGET(COMPARE_AND_BRANCH_INT) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; assert(cframe.use_tracing == 0); DEOPT_IF(!PyLong_CheckExact(left), COMPARE_AND_BRANCH); DEOPT_IF(!PyLong_CheckExact(right), COMPARE_AND_BRANCH); @@ -2257,13 +2252,13 @@ JUMPBY(offset); } STACK_SHRINK(2); - JUMPBY(2); + next_instr += 2; DISPATCH(); } TARGET(COMPARE_AND_BRANCH_STR) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; assert(cframe.use_tracing == 0); DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_AND_BRANCH); DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_AND_BRANCH); @@ -2280,26 +2275,26 @@ JUMPBY(offset); } STACK_SHRINK(2); - JUMPBY(2); + next_instr += 2; DISPATCH(); } TARGET(IS_OP) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; PyObject *b; int res = Py_Is(left, right) ^ oparg; Py_DECREF(left); Py_DECREF(right); b = Py_NewRef(res ? Py_True : Py_False); STACK_SHRINK(1); - POKE(1, b); + stack_pointer[-1] = b; DISPATCH(); } TARGET(CONTAINS_OP) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; PyObject *b; int res = PySequence_Contains(right, left); Py_DECREF(left); @@ -2307,13 +2302,13 @@ if (res < 0) goto pop_2_error; b = Py_NewRef((res^oparg) ? Py_True : Py_False); STACK_SHRINK(1); - POKE(1, b); + stack_pointer[-1] = b; DISPATCH(); } TARGET(CHECK_EG_MATCH) { - PyObject *match_type = PEEK(1); - PyObject *exc_value = PEEK(2); + PyObject *match_type = stack_pointer[-1]; + PyObject *exc_value = stack_pointer[-2]; PyObject *rest; PyObject *match; if (check_except_star_type_valid(tstate, match_type) < 0) { @@ -2336,14 +2331,14 @@ if (!Py_IsNone(match)) { PyErr_SetExcInfo(NULL, Py_NewRef(match), NULL); } - POKE(1, match); - POKE(2, rest); + stack_pointer[-1] = match; + stack_pointer[-2] = rest; DISPATCH(); } TARGET(CHECK_EXC_MATCH) { - PyObject *right = PEEK(1); - PyObject *left = PEEK(2); + PyObject *right = stack_pointer[-1]; + PyObject *left = stack_pointer[-2]; PyObject *b; assert(PyExceptionInstance_Check(left)); if (check_except_type_valid(tstate, right) < 0) { @@ -2354,13 +2349,13 @@ int res = PyErr_GivenExceptionMatches(left, right); Py_DECREF(right); b = Py_NewRef(res ? Py_True : Py_False); - POKE(1, b); + stack_pointer[-1] = b; DISPATCH(); } TARGET(IMPORT_NAME) { - PyObject *fromlist = PEEK(1); - PyObject *level = PEEK(2); + PyObject *fromlist = stack_pointer[-1]; + PyObject *level = stack_pointer[-2]; PyObject *res; PyObject *name = GETITEM(names, oparg); res = import_name(tstate, frame, name, fromlist, level); @@ -2368,18 +2363,18 @@ Py_DECREF(fromlist); if (res == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, res); + stack_pointer[-1] = res; DISPATCH(); } TARGET(IMPORT_FROM) { - PyObject *from = PEEK(1); + PyObject *from = stack_pointer[-1]; PyObject *res; PyObject *name = GETITEM(names, oparg); res = import_from(tstate, from, name); if (res == NULL) goto error; STACK_GROW(1); - POKE(1, res); + stack_pointer[-1] = res; DISPATCH(); } @@ -2398,7 +2393,7 @@ TARGET(POP_JUMP_IF_FALSE) { PREDICTED(POP_JUMP_IF_FALSE); - PyObject *cond = PEEK(1); + PyObject *cond = stack_pointer[-1]; if (Py_IsTrue(cond)) { _Py_DECREF_NO_DEALLOC(cond); } @@ -2421,7 +2416,7 @@ } TARGET(POP_JUMP_IF_TRUE) { - PyObject *cond = PEEK(1); + PyObject *cond = stack_pointer[-1]; if (Py_IsFalse(cond)) { _Py_DECREF_NO_DEALLOC(cond); } @@ -2444,7 +2439,7 @@ } TARGET(POP_JUMP_IF_NOT_NONE) { - PyObject *value = PEEK(1); + PyObject *value = stack_pointer[-1]; if (!Py_IsNone(value)) { Py_DECREF(value); JUMPBY(oparg); @@ -2457,7 +2452,7 @@ } TARGET(POP_JUMP_IF_NONE) { - PyObject *value = PEEK(1); + PyObject *value = stack_pointer[-1]; if (Py_IsNone(value)) { _Py_DECREF_NO_DEALLOC(value); JUMPBY(oparg); @@ -2470,7 +2465,7 @@ } TARGET(JUMP_IF_FALSE_OR_POP) { - PyObject *cond = PEEK(1); + PyObject *cond = stack_pointer[-1]; bool jump = false; int err; if (Py_IsTrue(cond)) { @@ -2499,7 +2494,7 @@ } TARGET(JUMP_IF_TRUE_OR_POP) { - PyObject *cond = PEEK(1); + PyObject *cond = stack_pointer[-1]; bool jump = false; int err; if (Py_IsFalse(cond)) { @@ -2538,7 +2533,7 @@ } TARGET(GET_LEN) { - PyObject *obj = PEEK(1); + PyObject *obj = stack_pointer[-1]; PyObject *len_o; // PUSH(len(TOS)) Py_ssize_t len_i = PyObject_Length(obj); @@ -2546,14 +2541,14 @@ len_o = PyLong_FromSsize_t(len_i); if (len_o == NULL) goto error; STACK_GROW(1); - POKE(1, len_o); + stack_pointer[-1] = len_o; DISPATCH(); } TARGET(MATCH_CLASS) { - PyObject *names = PEEK(1); - PyObject *type = PEEK(2); - PyObject *subject = PEEK(3); + PyObject *names = stack_pointer[-1]; + PyObject *type = stack_pointer[-2]; + PyObject *subject = stack_pointer[-3]; PyObject *attrs; // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or // None on failure. @@ -2570,57 +2565,57 @@ attrs = Py_NewRef(Py_None); // Failure! } STACK_SHRINK(2); - POKE(1, attrs); + stack_pointer[-1] = attrs; DISPATCH(); } TARGET(MATCH_MAPPING) { - PyObject *subject = PEEK(1); + PyObject *subject = stack_pointer[-1]; PyObject *res; int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; res = Py_NewRef(match ? Py_True : Py_False); STACK_GROW(1); - POKE(1, res); + stack_pointer[-1] = res; PREDICT(POP_JUMP_IF_FALSE); DISPATCH(); } TARGET(MATCH_SEQUENCE) { - PyObject *subject = PEEK(1); + PyObject *subject = stack_pointer[-1]; PyObject *res; int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; res = Py_NewRef(match ? Py_True : Py_False); STACK_GROW(1); - POKE(1, res); + stack_pointer[-1] = res; PREDICT(POP_JUMP_IF_FALSE); DISPATCH(); } TARGET(MATCH_KEYS) { - PyObject *keys = PEEK(1); - PyObject *subject = PEEK(2); + PyObject *keys = stack_pointer[-1]; + PyObject *subject = stack_pointer[-2]; PyObject *values_or_none; // On successful match, PUSH(values). Otherwise, PUSH(None). values_or_none = match_keys(tstate, subject, keys); if (values_or_none == NULL) goto error; STACK_GROW(1); - POKE(1, values_or_none); + stack_pointer[-1] = values_or_none; DISPATCH(); } TARGET(GET_ITER) { - PyObject *iterable = PEEK(1); + PyObject *iterable = stack_pointer[-1]; PyObject *iter; /* before: [obj]; after [getiter(obj)] */ iter = PyObject_GetIter(iterable); Py_DECREF(iterable); if (iter == NULL) goto pop_1_error; - POKE(1, iter); + stack_pointer[-1] = iter; DISPATCH(); } TARGET(GET_YIELD_FROM_ITER) { - PyObject *iterable = PEEK(1); + PyObject *iterable = stack_pointer[-1]; PyObject *iter; /* before: [obj]; after [getiter(obj)] */ if (PyCoro_CheckExact(iterable)) { @@ -2646,7 +2641,7 @@ } Py_DECREF(iterable); } - POKE(1, iter); + stack_pointer[-1] = iter; PREDICT(LOAD_CONST); DISPATCH(); } @@ -2654,7 +2649,7 @@ TARGET(FOR_ITER) { PREDICTED(FOR_ITER); static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size"); - PyObject *iter = PEEK(1); + PyObject *iter = stack_pointer[-1]; PyObject *next; #if ENABLE_SPECIALIZATION _PyForIterCache *cache = (_PyForIterCache *)next_instr; @@ -2689,13 +2684,13 @@ } // Common case: no jump, leave it to the code generator STACK_GROW(1); - POKE(1, next); - JUMPBY(1); + stack_pointer[-1] = next; + next_instr += 1; DISPATCH(); } TARGET(FOR_ITER_LIST) { - PyObject *iter = PEEK(1); + PyObject *iter = stack_pointer[-1]; PyObject *next; assert(cframe.use_tracing == 0); DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER); @@ -2718,13 +2713,13 @@ end_for_iter_list: // Common case: no jump, leave it to the code generator STACK_GROW(1); - POKE(1, next); - JUMPBY(1); + stack_pointer[-1] = next; + next_instr += 1; DISPATCH(); } TARGET(FOR_ITER_TUPLE) { - PyObject *iter = PEEK(1); + PyObject *iter = stack_pointer[-1]; PyObject *next; assert(cframe.use_tracing == 0); _PyTupleIterObject *it = (_PyTupleIterObject *)iter; @@ -2747,13 +2742,13 @@ end_for_iter_tuple: // Common case: no jump, leave it to the code generator STACK_GROW(1); - POKE(1, next); - JUMPBY(1); + stack_pointer[-1] = next; + next_instr += 1; DISPATCH(); } TARGET(FOR_ITER_RANGE) { - PyObject *iter = PEEK(1); + PyObject *iter = stack_pointer[-1]; PyObject *next; assert(cframe.use_tracing == 0); _PyRangeIterObject *r = (_PyRangeIterObject *)iter; @@ -2774,13 +2769,13 @@ goto error; } STACK_GROW(1); - POKE(1, next); - JUMPBY(1); + stack_pointer[-1] = next; + next_instr += 1; DISPATCH(); } TARGET(FOR_ITER_GEN) { - PyObject *iter = PEEK(1); + PyObject *iter = stack_pointer[-1]; assert(cframe.use_tracing == 0); PyGenObject *gen = (PyGenObject *)iter; DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); @@ -2798,7 +2793,7 @@ } TARGET(BEFORE_ASYNC_WITH) { - PyObject *mgr = PEEK(1); + PyObject *mgr = stack_pointer[-1]; PyObject *exit; PyObject *res; PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); @@ -2831,14 +2826,14 @@ if (true) goto pop_1_error; } STACK_GROW(1); - POKE(1, res); - POKE(2, exit); + stack_pointer[-1] = res; + stack_pointer[-2] = exit; PREDICT(GET_AWAITABLE); DISPATCH(); } TARGET(BEFORE_WITH) { - PyObject *mgr = PEEK(1); + PyObject *mgr = stack_pointer[-1]; PyObject *exit; PyObject *res; /* pop the context manager, push its __exit__ and the @@ -2874,15 +2869,15 @@ if (true) goto pop_1_error; } STACK_GROW(1); - POKE(1, res); - POKE(2, exit); + stack_pointer[-1] = res; + stack_pointer[-2] = exit; DISPATCH(); } TARGET(WITH_EXCEPT_START) { - PyObject *val = PEEK(1); - PyObject *lasti = PEEK(3); - PyObject *exit_func = PEEK(4); + PyObject *val = stack_pointer[-1]; + PyObject *lasti = stack_pointer[-3]; + PyObject *exit_func = stack_pointer[-4]; PyObject *res; /* At the top of the stack are 4 values: - val: TOP = exc_info() @@ -2905,12 +2900,12 @@ 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); if (res == NULL) goto error; STACK_GROW(1); - POKE(1, res); + stack_pointer[-1] = res; DISPATCH(); } TARGET(PUSH_EXC_INFO) { - PyObject *new_exc = PEEK(1); + PyObject *new_exc = stack_pointer[-1]; PyObject *prev_exc; _PyErr_StackItem *exc_info = tstate->exc_info; if (exc_info->exc_value != NULL) { @@ -2922,13 +2917,13 @@ assert(PyExceptionInstance_Check(new_exc)); exc_info->exc_value = Py_NewRef(new_exc); STACK_GROW(1); - POKE(1, new_exc); - POKE(2, prev_exc); + stack_pointer[-1] = new_exc; + stack_pointer[-2] = prev_exc; DISPATCH(); } TARGET(LOAD_ATTR_METHOD_WITH_VALUES) { - PyObject *self = PEEK(1); + PyObject *self = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); @@ -2952,14 +2947,14 @@ res = self; assert(oparg & 1); STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, res); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), res2); } - JUMPBY(9); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_METHOD_NO_DICT) { - PyObject *self = PEEK(1); + PyObject *self = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); @@ -2975,14 +2970,14 @@ res = self; assert(oparg & 1); STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, res); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), res2); } - JUMPBY(9); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_METHOD_LAZY_DICT) { - PyObject *self = PEEK(1); + PyObject *self = stack_pointer[-1]; PyObject *res2 = NULL; PyObject *res; uint32_t type_version = read_u32(&next_instr[1].cache); @@ -3002,9 +2997,9 @@ res = self; assert(oparg & 1); STACK_GROW(((oparg & 1) ? 1 : 0)); - POKE(1, res); - if (oparg & 1) { POKE(1 + ((oparg & 1) ? 1 : 0), res2); } - JUMPBY(9); + stack_pointer[-1] = res; + if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + next_instr += 9; DISPATCH(); } @@ -3018,9 +3013,9 @@ TARGET(CALL) { PREDICTED(CALL); static_assert(INLINE_CACHE_ENTRIES_CALL == 4, "incorrect cache size"); - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; int is_meth = method != NULL; int total_args = oparg; @@ -3095,15 +3090,15 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { - PyObject *callable = PEEK(1 + oparg); - PyObject *method = PEEK(2 + oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *method = stack_pointer[-(2 + oparg)]; DEOPT_IF(method != NULL, CALL); DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type, CALL); STAT_INC(CALL, hit); @@ -3117,9 +3112,9 @@ TARGET(CALL_PY_EXACT_ARGS) { PREDICTED(CALL_PY_EXACT_ARGS); - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *method = stack_pointer[-(2 + oparg)]; uint32_t func_version = read_u32(&next_instr[1].cache); assert(kwnames == NULL); DEOPT_IF(tstate->interp->eval_frame, CALL); @@ -3148,9 +3143,9 @@ } TARGET(CALL_PY_WITH_DEFAULTS) { - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *method = stack_pointer[-(2 + oparg)]; uint32_t func_version = read_u32(&next_instr[1].cache); uint16_t min_args = read_u16(&next_instr[3].cache); assert(kwnames == NULL); @@ -3185,9 +3180,9 @@ } TARGET(CALL_NO_KW_TYPE_1) { - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *null = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; assert(kwnames == NULL); assert(cframe.use_tracing == 0); @@ -3201,15 +3196,15 @@ Py_DECREF(&PyType_Type); // I.e., callable STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; DISPATCH(); } TARGET(CALL_NO_KW_STR_1) { - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *null = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; assert(kwnames == NULL); assert(cframe.use_tracing == 0); @@ -3224,16 +3219,16 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(CALL_NO_KW_TUPLE_1) { - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *null = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *null = stack_pointer[-(2 + oparg)]; PyObject *res; assert(kwnames == NULL); assert(oparg == 1); @@ -3247,16 +3242,16 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(CALL_BUILTIN_CLASS) { - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; int is_meth = method != NULL; int total_args = oparg; @@ -3281,16 +3276,16 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(CALL_NO_KW_BUILTIN_O) { - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; assert(cframe.use_tracing == 0); /* Builtin METH_O functions */ @@ -3322,16 +3317,16 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(CALL_NO_KW_BUILTIN_FAST) { - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; assert(cframe.use_tracing == 0); /* Builtin METH_FASTCALL functions, without keywords */ @@ -3367,16 +3362,16 @@ */ STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(CALL_BUILTIN_FAST_WITH_KEYWORDS) { - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; assert(cframe.use_tracing == 0); /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ @@ -3412,16 +3407,16 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(CALL_NO_KW_LEN) { - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; assert(cframe.use_tracing == 0); assert(kwnames == NULL); @@ -3450,15 +3445,15 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; DISPATCH(); } TARGET(CALL_NO_KW_ISINSTANCE) { - PyObject **args = &PEEK(oparg); - PyObject *callable = PEEK(1 + oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *callable = stack_pointer[-(1 + oparg)]; + PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; assert(cframe.use_tracing == 0); assert(kwnames == NULL); @@ -3489,15 +3484,15 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; DISPATCH(); } TARGET(CALL_NO_KW_LIST_APPEND) { - PyObject **args = &PEEK(oparg); - PyObject *self = PEEK(1 + oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *self = stack_pointer[-(1 + oparg)]; + PyObject *method = stack_pointer[-(2 + oparg)]; assert(cframe.use_tracing == 0); assert(kwnames == NULL); assert(oparg == 1); @@ -3519,8 +3514,8 @@ } TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) { - PyObject **args = &PEEK(oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; assert(kwnames == NULL); int is_meth = method != NULL; @@ -3554,15 +3549,15 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS) { - PyObject **args = &PEEK(oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; int is_meth = method != NULL; int total_args = oparg; @@ -3594,15 +3589,15 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS) { - PyObject **args = &PEEK(oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; assert(kwnames == NULL); assert(oparg == 0 || oparg == 1); @@ -3634,15 +3629,15 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_FAST) { - PyObject **args = &PEEK(oparg); - PyObject *method = PEEK(2 + oparg); + PyObject **args = (stack_pointer - oparg); + PyObject *method = stack_pointer[-(2 + oparg)]; PyObject *res; assert(kwnames == NULL); int is_meth = method != NULL; @@ -3673,17 +3668,17 @@ if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; } STACK_SHRINK(oparg); STACK_SHRINK(1); - POKE(1, res); - JUMPBY(4); + stack_pointer[-1] = res; + next_instr += 4; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(CALL_FUNCTION_EX) { PREDICTED(CALL_FUNCTION_EX); - PyObject *kwargs = (oparg & 1) ? PEEK(((oparg & 1) ? 1 : 0)) : NULL; - PyObject *callargs = PEEK(1 + ((oparg & 1) ? 1 : 0)); - PyObject *func = PEEK(2 + ((oparg & 1) ? 1 : 0)); + PyObject *kwargs = (oparg & 1) ? stack_pointer[-(((oparg & 1) ? 1 : 0))] : NULL; + PyObject *callargs = stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))]; + PyObject *func = stack_pointer[-(2 + ((oparg & 1) ? 1 : 0))]; PyObject *result; if (oparg & 1) { // DICT_MERGE is called before this opcode if there are kwargs. @@ -3711,17 +3706,17 @@ if (result == NULL) { STACK_SHRINK(((oparg & 1) ? 1 : 0)); goto pop_3_error; } STACK_SHRINK(((oparg & 1) ? 1 : 0)); STACK_SHRINK(2); - POKE(1, result); + stack_pointer[-1] = result; CHECK_EVAL_BREAKER(); DISPATCH(); } TARGET(MAKE_FUNCTION) { - PyObject *codeobj = PEEK(1); - PyObject *closure = (oparg & 0x08) ? PEEK(1 + ((oparg & 0x08) ? 1 : 0)) : NULL; - PyObject *annotations = (oparg & 0x04) ? PEEK(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0)) : NULL; - PyObject *kwdefaults = (oparg & 0x02) ? PEEK(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0)) : NULL; - PyObject *defaults = (oparg & 0x01) ? PEEK(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x01) ? 1 : 0)) : NULL; + PyObject *codeobj = stack_pointer[-1]; + PyObject *closure = (oparg & 0x08) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0))] : NULL; + PyObject *annotations = (oparg & 0x04) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0))] : NULL; + PyObject *kwdefaults = (oparg & 0x02) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0))] : NULL; + PyObject *defaults = (oparg & 0x01) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x01) ? 1 : 0))] : NULL; PyObject *func; PyFunctionObject *func_obj = (PyFunctionObject *) @@ -3752,7 +3747,7 @@ func_obj->func_version = ((PyCodeObject *)codeobj)->co_version; func = (PyObject *)func_obj; STACK_SHRINK(((oparg & 0x01) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x08) ? 1 : 0)); - POKE(1, func); + stack_pointer[-1] = func; DISPATCH(); } @@ -3780,9 +3775,9 @@ } TARGET(BUILD_SLICE) { - PyObject *step = (oparg == 3) ? PEEK(((oparg == 3) ? 1 : 0)) : NULL; - PyObject *stop = PEEK(1 + ((oparg == 3) ? 1 : 0)); - PyObject *start = PEEK(2 + ((oparg == 3) ? 1 : 0)); + PyObject *step = (oparg == 3) ? stack_pointer[-(((oparg == 3) ? 1 : 0))] : NULL; + PyObject *stop = stack_pointer[-(1 + ((oparg == 3) ? 1 : 0))]; + PyObject *start = stack_pointer[-(2 + ((oparg == 3) ? 1 : 0))]; PyObject *slice; slice = PySlice_New(start, stop, step); Py_DECREF(start); @@ -3791,13 +3786,13 @@ if (slice == NULL) { STACK_SHRINK(((oparg == 3) ? 1 : 0)); goto pop_2_error; } STACK_SHRINK(((oparg == 3) ? 1 : 0)); STACK_SHRINK(1); - POKE(1, slice); + stack_pointer[-1] = slice; DISPATCH(); } TARGET(FORMAT_VALUE) { - PyObject *fmt_spec = ((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? PEEK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)) : NULL; - PyObject *value = PEEK(1 + (((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); + PyObject *fmt_spec = ((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? stack_pointer[-((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))] : NULL; + PyObject *value = stack_pointer[-(1 + (((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))]; PyObject *result; /* Handles f-string value formatting. */ PyObject *(*conv_fn)(PyObject *); @@ -3845,25 +3840,25 @@ if (result == NULL) { STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); goto pop_1_error; } } STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); - POKE(1, result); + stack_pointer[-1] = result; DISPATCH(); } TARGET(COPY) { - PyObject *bottom = PEEK(1 + (oparg-1)); + PyObject *bottom = stack_pointer[-(1 + (oparg-1))]; PyObject *top; assert(oparg > 0); top = Py_NewRef(bottom); STACK_GROW(1); - POKE(1, top); + stack_pointer[-1] = top; DISPATCH(); } TARGET(BINARY_OP) { PREDICTED(BINARY_OP); static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); - PyObject *rhs = PEEK(1); - PyObject *lhs = PEEK(2); + PyObject *rhs = stack_pointer[-1]; + PyObject *lhs = stack_pointer[-2]; PyObject *res; #if ENABLE_SPECIALIZATION _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; @@ -3884,17 +3879,17 @@ Py_DECREF(rhs); if (res == NULL) goto pop_2_error; STACK_SHRINK(1); - POKE(1, res); - JUMPBY(1); + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } TARGET(SWAP) { - PyObject *top = PEEK(1); - PyObject *bottom = PEEK(2 + (oparg-2)); + PyObject *top = stack_pointer[-1]; + PyObject *bottom = stack_pointer[-(2 + (oparg-2))]; assert(oparg >= 2); - POKE(1, bottom); - POKE(2 + (oparg-2), top); + stack_pointer[-1] = bottom; + stack_pointer[-(2 + (oparg-2))] = top; DISPATCH(); } diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py index c7f52b55a5eb99..b760172974c8a5 100644 --- a/Tools/cases_generator/generate_cases.py +++ b/Tools/cases_generator/generate_cases.py @@ -171,19 +171,17 @@ def declare(self, dst: StackEffect, src: StackEffect | None): def assign(self, dst: StackEffect, src: StackEffect): if src.name == UNUSED: return + if src.size: + # Don't write sized arrays -- it's up to the user code. + return cast = self.cast(dst, src) - if m := re.match(r"^PEEK\((.*)\)$", dst.name): - stmt = f"POKE({m.group(1)}, {cast}{src.name});" + if re.match(r"^REG\(oparg(\d+)\)$", dst.name): + self.emit(f"Py_XSETREF({dst.name}, {cast}{src.name});") + else: + stmt = f"{dst.name} = {cast}{src.name};" if src.cond: stmt = f"if ({src.cond}) {{ {stmt} }}" self.emit(stmt) - elif m := re.match(r"^&PEEK\(.*\)$", dst.name): - # The user code is responsible for writing to the output array. - pass - elif m := re.match(r"^REG\(oparg(\d+)\)$", dst.name): - self.emit(f"Py_XSETREF({dst.name}, {cast}{src.name});") - else: - self.emit(f"{dst.name} = {cast}{src.name};") def cast(self, dst: StackEffect, src: StackEffect) -> str: return f"({dst.type or 'PyObject *'})" if src.type != dst.type else "" @@ -292,11 +290,11 @@ def write(self, out: Formatter) -> None: list_effect_size([ieff for ieff in ieffects[: i + 1]]) ) if ieffect.size: - src = StackEffect(f"&PEEK({isize})", "PyObject **") + src = StackEffect(f"(stack_pointer - {maybe_parenthesize(isize)})", "PyObject **") elif ieffect.cond: - src = StackEffect(f"({ieffect.cond}) ? PEEK({isize}) : NULL", "") + src = StackEffect(f"({ieffect.cond}) ? stack_pointer[-{maybe_parenthesize(isize)}] : NULL", "") else: - src = StackEffect(f"PEEK({isize})", "") + src = StackEffect(f"stack_pointer[-{maybe_parenthesize(isize)}]", "") out.declare(ieffect, src) else: # Write input register variable declarations and initializations @@ -324,7 +322,7 @@ def write(self, out: Formatter) -> None: else: out.declare(oeffect, None) - # out.emit(f"JUMPBY(OPSIZE({self.inst.name}) - 1);") + # out.emit(f"next_instr += OPSIZE({self.inst.name}) - 1;") self.write_body(out, 0) @@ -349,9 +347,9 @@ def write(self, out: Formatter) -> None: list_effect_size([oeff for oeff in oeffects[: i + 1]]) ) if oeffect.size: - dst = StackEffect(f"&PEEK({osize})", "PyObject **") + dst = StackEffect(f"stack_pointer - {maybe_parenthesize(osize)}", "PyObject **") else: - dst = StackEffect(f"PEEK({osize})", "") + dst = StackEffect(f"stack_pointer[-{maybe_parenthesize(osize)}]", "") out.assign(dst, oeffect) else: # Write output register assignments @@ -361,7 +359,7 @@ def write(self, out: Formatter) -> None: # Write cache effect if self.cache_offset: - out.emit(f"JUMPBY({self.cache_offset});") + out.emit(f"next_instr += {self.cache_offset};") def write_body(self, out: Formatter, dedent: int, cache_adjust: int = 0) -> None: """Write the instruction body.""" @@ -1060,17 +1058,13 @@ def write_super(self, sup: SuperInstruction) -> None: with self.wrap_super_or_macro(sup): first = True for comp in sup.parts: - if first: - pass - # self.out.emit("JUMPBY(OPSIZE(opcode) - 1);") - else: - self.out.emit("NEXTOPARG();") - self.out.emit("JUMPBY(1);") - # self.out.emit("JUMPBY(OPSIZE(opcode));") + if not first: + self.out.emit("oparg = (next_instr++)->op.arg;") + # self.out.emit("next_instr += OPSIZE(opcode) - 1;") first = False comp.write_body(self.out, 0) if comp.instr.cache_offset: - self.out.emit(f"JUMPBY({comp.instr.cache_offset});") + self.out.emit(f"next_instr += {comp.instr.cache_offset};") def write_macro(self, mac: MacroInstruction) -> None: """Write code for a macro instruction.""" @@ -1087,7 +1081,7 @@ def write_macro(self, mac: MacroInstruction) -> None: cache_adjust += comp.instr.cache_offset if cache_adjust: - self.out.emit(f"JUMPBY({cache_adjust});") + self.out.emit(f"next_instr += {cache_adjust};") if ( last_instr @@ -1113,7 +1107,7 @@ def wrap_super_or_macro(self, up: SuperOrMacroInstruction): for i, var in reversed(list(enumerate(up.stack))): src = None if i < up.initial_sp: - src = StackEffect(f"PEEK({up.initial_sp - i})", "") + src = StackEffect(f"stack_pointer[-{up.initial_sp - i}]", "") self.out.declare(var, src) yield @@ -1122,7 +1116,7 @@ def wrap_super_or_macro(self, up: SuperOrMacroInstruction): self.out.stack_adjust(up.final_sp - up.initial_sp, [], []) for i, var in enumerate(reversed(up.stack[: up.final_sp]), 1): - dst = StackEffect(f"PEEK({i})", "") + dst = StackEffect(f"stack_pointer[-{i}]", "") self.out.assign(dst, var) self.out.emit(f"DISPATCH();") From f300a1fa4c121f7807cbda4fc8bb26240c69ea74 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Tue, 28 Feb 2023 13:14:40 -0700 Subject: [PATCH 17/40] gh-100227: Move the dtoa State to PyInterpreterState (gh-102331) https://github.com/python/cpython/issues/100227 --- Include/internal/pycore_dtoa.h | 11 ++++++----- Include/internal/pycore_interp.h | 2 ++ Include/internal/pycore_runtime.h | 2 -- Include/internal/pycore_runtime_init.h | 6 +++--- Python/dtoa.c | 15 +++++++++------ Python/pystate.c | 20 +++++++++++++++++++- 6 files changed, 39 insertions(+), 17 deletions(-) diff --git a/Include/internal/pycore_dtoa.h b/Include/internal/pycore_dtoa.h index 67189cf0ade665..fb524770efed7c 100644 --- a/Include/internal/pycore_dtoa.h +++ b/Include/internal/pycore_dtoa.h @@ -24,10 +24,11 @@ Bigint { #ifdef Py_USING_MEMORY_DEBUGGER -struct _dtoa_runtime_state { +struct _dtoa_state { int _not_used; }; -#define _dtoa_runtime_state_INIT {0} +#define _dtoa_interp_state_INIT(INTERP) \ + {0} #else // !Py_USING_MEMORY_DEBUGGER @@ -40,7 +41,7 @@ struct _dtoa_runtime_state { #define Bigint_PREALLOC_SIZE \ ((PRIVATE_MEM+sizeof(double)-1)/sizeof(double)) -struct _dtoa_runtime_state { +struct _dtoa_state { /* p5s is a linked list of powers of 5 of the form 5**(2**i), i >= 2 */ // XXX This should be freed during runtime fini. struct Bigint *p5s; @@ -48,9 +49,9 @@ struct _dtoa_runtime_state { double preallocated[Bigint_PREALLOC_SIZE]; double *preallocated_next; }; -#define _dtoa_runtime_state_INIT(runtime) \ +#define _dtoa_state_INIT(INTERP) \ { \ - .preallocated_next = runtime.dtoa.preallocated, \ + .preallocated_next = (INTERP)->dtoa.preallocated, \ } #endif // !Py_USING_MEMORY_DEBUGGER diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 60de31b336f613..7ef9c40153e421 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -16,6 +16,7 @@ extern "C" { #include "pycore_code.h" // struct callable_cache #include "pycore_context.h" // struct _Py_context_state #include "pycore_dict_state.h" // struct _Py_dict_state +#include "pycore_dtoa.h" // struct _dtoa_state #include "pycore_exceptions.h" // struct _Py_exc_state #include "pycore_floatobject.h" // struct _Py_float_state #include "pycore_function.h" // FUNC_MAX_WATCHERS @@ -139,6 +140,7 @@ struct _is { struct _Py_unicode_state unicode; struct _Py_float_state float_state; struct _Py_long_state long_state; + struct _dtoa_state dtoa; /* Using a cache is very effective since typically only a single slice is created and then deleted again. */ PySliceObject *slice_cache; diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 9ef270791576e3..2350eaab5976ca 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -11,7 +11,6 @@ extern "C" { #include "pycore_atomic.h" /* _Py_atomic_address */ #include "pycore_ceval_state.h" // struct _ceval_runtime_state #include "pycore_dict_state.h" // struct _Py_dict_runtime_state -#include "pycore_dtoa.h" // struct _dtoa_runtime_state #include "pycore_floatobject.h" // struct _Py_float_runtime_state #include "pycore_faulthandler.h" // struct _faulthandler_runtime_state #include "pycore_function.h" // struct _func_runtime_state @@ -141,7 +140,6 @@ typedef struct pyruntimestate { struct _ceval_runtime_state ceval; struct _gilstate_runtime_state gilstate; struct _getargs_runtime_state getargs; - struct _dtoa_runtime_state dtoa; struct _fileutils_state fileutils; struct _faulthandler_runtime_state faulthandler; struct _tracemalloc_runtime_state tracemalloc; diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h index a8d5953ff98b0b..b54adf04761d4e 100644 --- a/Include/internal/pycore_runtime_init.h +++ b/Include/internal/pycore_runtime_init.h @@ -53,7 +53,6 @@ extern "C" { .gilstate = { \ .check_enabled = 1, \ }, \ - .dtoa = _dtoa_runtime_state_INIT(runtime), \ .fileutils = { \ .force_ascii = -1, \ }, \ @@ -94,10 +93,10 @@ extern "C" { }, \ }, \ }, \ - ._main_interpreter = _PyInterpreterState_INIT, \ + ._main_interpreter = _PyInterpreterState_INIT(runtime._main_interpreter), \ } -#define _PyInterpreterState_INIT \ +#define _PyInterpreterState_INIT(INTERP) \ { \ .id_refcount = -1, \ .imports = IMPORTS_INIT, \ @@ -113,6 +112,7 @@ extern "C" { { .threshold = 10, }, \ }, \ }, \ + .dtoa = _dtoa_state_INIT(&(INTERP)), \ .static_objects = { \ .singletons = { \ ._not_used = 1, \ diff --git a/Python/dtoa.c b/Python/dtoa.c index cff5f1b0658eae..6ea60ac9946e0f 100644 --- a/Python/dtoa.c +++ b/Python/dtoa.c @@ -119,7 +119,7 @@ #include "Python.h" #include "pycore_dtoa.h" // _PY_SHORT_FLOAT_REPR -#include "pycore_runtime.h" // _PyRuntime +#include "pycore_pystate.h" // _PyInterpreterState_GET() #include // exit() /* if _PY_SHORT_FLOAT_REPR == 0, then don't even try to compile @@ -339,9 +339,9 @@ typedef struct Bigint Bigint; Bfree to PyMem_Free. Investigate whether this has any significant performance on impact. */ -#define freelist _PyRuntime.dtoa.freelist -#define private_mem _PyRuntime.dtoa.preallocated -#define pmem_next _PyRuntime.dtoa.preallocated_next +#define freelist interp->dtoa.freelist +#define private_mem interp->dtoa.preallocated +#define pmem_next interp->dtoa.preallocated_next /* Allocate space for a Bigint with up to 1<next; @@ -385,6 +386,7 @@ Bfree(Bigint *v) if (v->k > Bigint_Kmax) FREE((void*)v); else { + PyInterpreterState *interp = _PyInterpreterState_GET(); v->next = freelist[v->k]; freelist[v->k] = v; } @@ -692,7 +694,8 @@ pow5mult(Bigint *b, int k) if (!(k >>= 2)) return b; - p5 = _PyRuntime.dtoa.p5s; + PyInterpreterState *interp = _PyInterpreterState_GET(); + p5 = interp->dtoa.p5s; if (!p5) { /* first time */ p5 = i2b(625); @@ -700,7 +703,7 @@ pow5mult(Bigint *b, int k) Bfree(b); return NULL; } - _PyRuntime.dtoa.p5s = p5; + interp->dtoa.p5s = p5; p5->next = 0; } for(;;) { diff --git a/Python/pystate.c b/Python/pystate.c index 3c655bf3895850..28606e4f32f71c 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -3,7 +3,8 @@ #include "Python.h" #include "pycore_ceval.h" -#include "pycore_code.h" // stats +#include "pycore_code.h" // stats +#include "pycore_dtoa.h" // _dtoa_state_INIT() #include "pycore_frame.h" #include "pycore_initconfig.h" #include "pycore_object.h" // _PyType_InitCache() @@ -618,6 +619,18 @@ free_interpreter(PyInterpreterState *interp) e.g. by PyMem_RawCalloc() or memset(), or otherwise pre-initialized. The runtime state is not manipulated. Instead it is assumed that the interpreter is getting added to the runtime. + + Note that the main interpreter was statically initialized as part + of the runtime and most state is already set properly. That leaves + a small number of fields to initialize dynamically, as well as some + that are initialized lazily. + + For subinterpreters we memcpy() the main interpreter in + PyInterpreterState_New(), leaving it in the same mostly-initialized + state. The only difference is that the interpreter has some + self-referential state that is statically initializexd to the + main interpreter. We fix those fields here, in addition + to the other dynamically initialized fields. */ static void @@ -645,6 +658,11 @@ init_interpreter(PyInterpreterState *interp, PyConfig_InitPythonConfig(&interp->config); _PyType_InitCache(interp); + if (interp != &runtime->_main_interpreter) { + /* Fix the self-referential, statically initialized fields. */ + interp->dtoa = (struct _dtoa_state)_dtoa_state_INIT(interp); + } + interp->_initialized = 1; } From 880437d4ec65ef35d505eeaff9dad5c6654dbc1a Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Tue, 28 Feb 2023 14:16:39 -0700 Subject: [PATCH 18/40] gh-100227: Move _str_replace_inf to PyInterpreterState (gh-102333) https://github.com/python/cpython/issues/100227 --- Include/internal/pycore_global_objects.h | 7 ++++--- Parser/asdl_c.py | 4 +--- Python/Python-ast.c | 4 +--- Python/ast_unparse.c | 19 ++++++++++++++----- 4 files changed, 20 insertions(+), 14 deletions(-) diff --git a/Include/internal/pycore_global_objects.h b/Include/internal/pycore_global_objects.h index d0461fa7e82e8b..30c7c4e3bbd067 100644 --- a/Include/internal/pycore_global_objects.h +++ b/Include/internal/pycore_global_objects.h @@ -27,8 +27,6 @@ extern "C" { _PyRuntime.cached_objects.NAME struct _Py_cached_objects { - PyObject *str_replace_inf; - PyObject *interned_strings; }; @@ -67,11 +65,14 @@ struct _Py_static_objects { (interp)->cached_objects.NAME struct _Py_interp_cached_objects { - int _not_set; + /* AST */ + PyObject *str_replace_inf; + /* object.__reduce__ */ PyObject *objreduce; PyObject *type_slots_pname; pytype_slotdef *type_slots_ptrs[MAX_EQUIV]; + }; #define _Py_INTERP_STATIC_OBJECT(interp, NAME) \ diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index db0e597b7a5aa4..b44e303ac2594b 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -1484,9 +1484,7 @@ def generate_ast_fini(module_state, f): for s in module_state: f.write(" Py_CLEAR(state->" + s + ');\n') f.write(textwrap.dedent(""" - if (_PyInterpreterState_Get() == _PyInterpreterState_Main()) { - Py_CLEAR(_Py_CACHED_OBJECT(str_replace_inf)); - } + Py_CLEAR(_Py_INTERP_CACHED_OBJECT(interp, str_replace_inf)); #if !defined(NDEBUG) state->initialized = -1; diff --git a/Python/Python-ast.c b/Python/Python-ast.c index d113c47b95392e..6c878474afb192 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -263,9 +263,7 @@ void _PyAST_Fini(PyInterpreterState *interp) Py_CLEAR(state->vararg); Py_CLEAR(state->withitem_type); - if (_PyInterpreterState_Get() == _PyInterpreterState_Main()) { - Py_CLEAR(_Py_CACHED_OBJECT(str_replace_inf)); - } + Py_CLEAR(_Py_INTERP_CACHED_OBJECT(interp, str_replace_inf)); #if !defined(NDEBUG) state->initialized = -1; diff --git a/Python/ast_unparse.c b/Python/ast_unparse.c index 79b2e2f15ba243..8aff045101cc72 100644 --- a/Python/ast_unparse.c +++ b/Python/ast_unparse.c @@ -1,5 +1,6 @@ #include "Python.h" #include "pycore_ast.h" // expr_ty +#include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_runtime.h" // _Py_ID() #include // DBL_MAX_10_EXP #include @@ -13,7 +14,10 @@ _Py_DECLARE_STR(open_br, "{"); _Py_DECLARE_STR(dbl_open_br, "{{"); _Py_DECLARE_STR(close_br, "}"); _Py_DECLARE_STR(dbl_close_br, "}}"); -#define _str_replace_inf _Py_CACHED_OBJECT(str_replace_inf) + +/* We would statically initialize this if doing so were simple enough. */ +#define _str_replace_inf(interp) \ + _Py_INTERP_CACHED_OBJECT(interp, str_replace_inf) /* Forward declarations for recursion via helper functions. */ static PyObject * @@ -78,10 +82,11 @@ append_repr(_PyUnicodeWriter *writer, PyObject *obj) if ((PyFloat_CheckExact(obj) && Py_IS_INFINITY(PyFloat_AS_DOUBLE(obj))) || PyComplex_CheckExact(obj)) { + PyInterpreterState *interp = _PyInterpreterState_GET(); PyObject *new_repr = PyUnicode_Replace( repr, &_Py_ID(inf), - _str_replace_inf, + _str_replace_inf(interp), -1 ); Py_DECREF(repr); @@ -916,9 +921,13 @@ append_ast_expr(_PyUnicodeWriter *writer, expr_ty e, int level) static int maybe_init_static_strings(void) { - if (!_str_replace_inf && - !(_str_replace_inf = PyUnicode_FromFormat("1e%d", 1 + DBL_MAX_10_EXP))) { - return -1; + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (_str_replace_inf(interp) == NULL) { + PyObject *tmp = PyUnicode_FromFormat("1e%d", 1 + DBL_MAX_10_EXP); + if (tmp == NULL) { + return -1; + } + _str_replace_inf(interp) = tmp; } return 0; } From 360ef843d8fc03aad38ed84f9f45026ab08ca4f4 Mon Sep 17 00:00:00 2001 From: Anthony Sottile Date: Tue, 28 Feb 2023 16:34:06 -0500 Subject: [PATCH 19/40] gh-99108: Add missing md5/sha1 defines to Modules/Setup (#102308) --- Modules/Setup | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Modules/Setup b/Modules/Setup index ff432e2929ec2d..1c6f2f7ea5182d 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -163,8 +163,8 @@ PYTHONPATH=$(COREPYTHONPATH) # hashing builtins #_blake2 _blake2/blake2module.c _blake2/blake2b_impl.c _blake2/blake2s_impl.c -#_md5 md5module.c -I$(srcdir)/Modules/_hacl/include _hacl/Hacl_Hash_MD5.c -#_sha1 sha1module.c -I$(srcdir)/Modules/_hacl/include _hacl/Hacl_Hash_SHA1.c +#_md5 md5module.c -I$(srcdir)/Modules/_hacl/include _hacl/Hacl_Hash_MD5.c -D_BSD_SOURCE -D_DEFAULT_SOURCE +#_sha1 sha1module.c -I$(srcdir)/Modules/_hacl/include _hacl/Hacl_Hash_SHA1.c -D_BSD_SOURCE -D_DEFAULT_SOURCE #_sha2 sha2module.c -I$(srcdir)/Modules/_hacl/include Modules/_hacl/libHacl_Streaming_SHA2.a #_sha3 _sha3/sha3module.c From 938e36f824c5f834d6b77d47942ad81edd5491d0 Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Wed, 1 Mar 2023 01:31:21 +0100 Subject: [PATCH 20/40] gh-102336: Remove code specifically for handling Windows 7 (GH-102337) --- ...-02-28-21-17-03.gh-issue-102336.-wL3Tm.rst | 1 + Modules/_winapi.c | 31 +---- Modules/getpath.c | 5 +- Modules/mmapmodule.c | 16 +-- Modules/posixmodule.c | 112 +++++------------- Modules/socketmodule.c | 42 ++----- Python/fileutils.c | 8 +- 7 files changed, 56 insertions(+), 159 deletions(-) create mode 100644 Misc/NEWS.d/next/Core and Builtins/2023-02-28-21-17-03.gh-issue-102336.-wL3Tm.rst diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-02-28-21-17-03.gh-issue-102336.-wL3Tm.rst b/Misc/NEWS.d/next/Core and Builtins/2023-02-28-21-17-03.gh-issue-102336.-wL3Tm.rst new file mode 100644 index 00000000000000..0c3e4bd4b86094 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-02-28-21-17-03.gh-issue-102336.-wL3Tm.rst @@ -0,0 +1 @@ +Cleanup Windows 7 specific special handling. Patch by Max Bachmann. diff --git a/Modules/_winapi.c b/Modules/_winapi.c index f4d982b15d402a..8c4c725b9df256 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -63,23 +63,6 @@ #define T_HANDLE T_POINTER -/* Grab CancelIoEx dynamically from kernel32 */ -static int has_CancelIoEx = -1; -static BOOL (CALLBACK *Py_CancelIoEx)(HANDLE, LPOVERLAPPED); - -static int -check_CancelIoEx() -{ - if (has_CancelIoEx == -1) - { - HINSTANCE hKernel32 = GetModuleHandle("KERNEL32"); - * (FARPROC *) &Py_CancelIoEx = GetProcAddress(hKernel32, - "CancelIoEx"); - has_CancelIoEx = (Py_CancelIoEx != NULL); - } - return has_CancelIoEx; -} - typedef struct { PyTypeObject *overlapped_type; } WinApiState; @@ -134,8 +117,7 @@ overlapped_dealloc(OverlappedObject *self) PyObject_GC_UnTrack(self); if (self->pending) { - if (check_CancelIoEx() && - Py_CancelIoEx(self->handle, &self->overlapped) && + if (CancelIoEx(self->handle, &self->overlapped) && GetOverlappedResult(self->handle, &self->overlapped, &bytes, TRUE)) { /* The operation is no longer pending -- nothing to do. */ @@ -306,10 +288,7 @@ _winapi_Overlapped_cancel_impl(OverlappedObject *self) if (self->pending) { Py_BEGIN_ALLOW_THREADS - if (check_CancelIoEx()) - res = Py_CancelIoEx(self->handle, &self->overlapped); - else - res = CancelIo(self->handle); + CancelIoEx(self->handle, &self->overlapped); Py_END_ALLOW_THREADS } @@ -655,8 +634,10 @@ _winapi_CreateJunction_impl(PyObject *module, LPCWSTR src_path, cleanup: ret = GetLastError(); - CloseHandle(token); - CloseHandle(junction); + if (token != NULL) + CloseHandle(token); + if (junction != NULL) + CloseHandle(junction); PyMem_RawFree(rdb); if (ret != 0) diff --git a/Modules/getpath.c b/Modules/getpath.c index 13db010649fed8..c807a3c8e9a2b9 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -227,12 +227,11 @@ getpath_isxfile(PyObject *Py_UNUSED(self), PyObject *args) path = PyUnicode_AsWideCharString(pathobj, &cchPath); if (path) { #ifdef MS_WINDOWS - const wchar_t *ext; DWORD attr = GetFileAttributesW(path); r = (attr != INVALID_FILE_ATTRIBUTES) && !(attr & FILE_ATTRIBUTE_DIRECTORY) && - SUCCEEDED(PathCchFindExtension(path, cchPath + 1, &ext)) && - (CompareStringOrdinal(ext, -1, L".exe", -1, 1 /* ignore case */) == CSTR_EQUAL) + (cchPath >= 4) && + (CompareStringOrdinal(path + cchPath - 4, -1, L".exe", -1, 1 /* ignore case */) == CSTR_EQUAL) ? Py_True : Py_False; #else struct stat st; diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index a01e798265c5a5..6054c2853d7a78 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -502,7 +502,7 @@ mmap_resize_method(mmap_object *self, CloseHandle(self->map_handle); /* if the file mapping still exists, it cannot be resized. */ if (self->tagname) { - self->map_handle = OpenFileMapping(FILE_MAP_WRITE, FALSE, + self->map_handle = OpenFileMappingA(FILE_MAP_WRITE, FALSE, self->tagname); if (self->map_handle) { PyErr_SetFromWindowsErr(ERROR_USER_MAPPED_FILE); @@ -531,7 +531,7 @@ mmap_resize_method(mmap_object *self, /* create a new file mapping and map a new view */ /* FIXME: call CreateFileMappingW with wchar_t tagname */ - self->map_handle = CreateFileMapping( + self->map_handle = CreateFileMappingA( self->file_handle, NULL, PAGE_READWRITE, @@ -1514,12 +1514,12 @@ new_mmap_object(PyTypeObject *type, PyObject *args, PyObject *kwdict) off_lo = (DWORD)(offset & 0xFFFFFFFF); /* For files, it would be sufficient to pass 0 as size. For anonymous maps, we have to pass the size explicitly. */ - m_obj->map_handle = CreateFileMapping(m_obj->file_handle, - NULL, - flProtect, - size_hi, - size_lo, - m_obj->tagname); + m_obj->map_handle = CreateFileMappingA(m_obj->file_handle, + NULL, + flProtect, + size_hi, + size_lo, + m_obj->tagname); if (m_obj->map_handle != NULL) { m_obj->data = (char *) MapViewOfFile(m_obj->map_handle, dwDesiredAccess, diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 6ea216b3bf5e79..937233a3bd0779 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -10,16 +10,6 @@ #define PY_SSIZE_T_CLEAN #include "Python.h" -// Include before pycore internal headers. FSCTL_GET_REPARSE_POINT -// is not exported by if the WIN32_LEAN_AND_MEAN macro is defined, -// whereas pycore_condvar.h defines the WIN32_LEAN_AND_MEAN macro. -#ifdef MS_WINDOWS -# include -# include -# include // UNLEN -# include "osdefs.h" // SEP -# define HAVE_SYMLINK -#endif #ifdef __VXWORKS__ # include "pycore_bitutils.h" // _Py_popcount32() @@ -34,6 +24,15 @@ #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_signal.h" // Py_NSIG +#ifdef MS_WINDOWS +# include +# include +# include +# include // UNLEN +# include "osdefs.h" // SEP +# define HAVE_SYMLINK +#endif + #include "structmember.h" // PyMemberDef #ifndef MS_WINDOWS # include "posixmodule.h" @@ -1507,32 +1506,6 @@ _Py_Sigset_Converter(PyObject *obj, void *addr) } #endif /* HAVE_SIGSET_T */ -#ifdef MS_WINDOWS - -static int -win32_get_reparse_tag(HANDLE reparse_point_handle, ULONG *reparse_tag) -{ - char target_buffer[_Py_MAXIMUM_REPARSE_DATA_BUFFER_SIZE]; - _Py_REPARSE_DATA_BUFFER *rdb = (_Py_REPARSE_DATA_BUFFER *)target_buffer; - DWORD n_bytes_returned; - - if (0 == DeviceIoControl( - reparse_point_handle, - FSCTL_GET_REPARSE_POINT, - NULL, 0, /* in buffer */ - target_buffer, sizeof(target_buffer), - &n_bytes_returned, - NULL)) /* we're not using OVERLAPPED_IO */ - return FALSE; - - if (reparse_tag) - *reparse_tag = rdb->ReparseTag; - - return TRUE; -} - -#endif /* MS_WINDOWS */ - /* Return a dictionary corresponding to the POSIX environment table */ #if defined(WITH_NEXT_FRAMEWORK) || (defined(__APPLE__) && defined(Py_ENABLE_SHARED)) /* On Darwin/MacOSX a shared library or framework has no access to @@ -8263,42 +8236,32 @@ os_setpgrp_impl(PyObject *module) #ifdef HAVE_GETPPID #ifdef MS_WINDOWS -#include +#include static PyObject* win32_getppid() { - HANDLE snapshot; + DWORD error; PyObject* result = NULL; - BOOL have_record; - PROCESSENTRY32 pe; - - DWORD mypid = GetCurrentProcessId(); /* This function never fails */ - - snapshot = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0); - if (snapshot == INVALID_HANDLE_VALUE) - return PyErr_SetFromWindowsErr(GetLastError()); + HANDLE process = GetCurrentProcess(); - pe.dwSize = sizeof(pe); - have_record = Process32First(snapshot, &pe); - while (have_record) { - if (mypid == pe.th32ProcessID) { - /* We could cache the ulong value in a static variable. */ - result = PyLong_FromUnsignedLong(pe.th32ParentProcessID); - break; - } - - have_record = Process32Next(snapshot, &pe); + HPSS snapshot = NULL; + error = PssCaptureSnapshot(process, PSS_CAPTURE_NONE, 0, &snapshot); + if (error != ERROR_SUCCESS) { + return PyErr_SetFromWindowsErr(error); } - /* If our loop exits and our pid was not found (result will be NULL) - * then GetLastError will return ERROR_NO_MORE_FILES. This is an - * error anyway, so let's raise it. */ - if (!result) - result = PyErr_SetFromWindowsErr(GetLastError()); - - CloseHandle(snapshot); + PSS_PROCESS_INFORMATION info; + error = PssQuerySnapshot(snapshot, PSS_QUERY_PROCESS_INFORMATION, &info, + sizeof(info)); + if (error == ERROR_SUCCESS) { + result = PyLong_FromUnsignedLong(info.ParentProcessId); + } + else { + result = PyErr_SetFromWindowsErr(error); + } + PssFreeSnapshot(process, snapshot); return result; } #endif /*MS_WINDOWS*/ @@ -15067,9 +15030,6 @@ os_getrandom_impl(PyObject *module, Py_ssize_t size, int flags) * on win32 */ -typedef DLL_DIRECTORY_COOKIE (WINAPI *PAddDllDirectory)(PCWSTR newDirectory); -typedef BOOL (WINAPI *PRemoveDllDirectory)(DLL_DIRECTORY_COOKIE cookie); - /*[clinic input] os._add_dll_directory @@ -15089,8 +15049,6 @@ static PyObject * os__add_dll_directory_impl(PyObject *module, path_t *path) /*[clinic end generated code: output=80b025daebb5d683 input=1de3e6c13a5808c8]*/ { - HMODULE hKernel32; - PAddDllDirectory AddDllDirectory; DLL_DIRECTORY_COOKIE cookie = 0; DWORD err = 0; @@ -15098,14 +15056,8 @@ os__add_dll_directory_impl(PyObject *module, path_t *path) return NULL; } - /* For Windows 7, we have to load this. As this will be a fairly - infrequent operation, just do it each time. Kernel32 is always - loaded. */ Py_BEGIN_ALLOW_THREADS - if (!(hKernel32 = GetModuleHandleW(L"kernel32")) || - !(AddDllDirectory = (PAddDllDirectory)GetProcAddress( - hKernel32, "AddDllDirectory")) || - !(cookie = (*AddDllDirectory)(path->wide))) { + if (!(cookie = AddDllDirectory(path->wide))) { err = GetLastError(); } Py_END_ALLOW_THREADS @@ -15134,8 +15086,6 @@ static PyObject * os__remove_dll_directory_impl(PyObject *module, PyObject *cookie) /*[clinic end generated code: output=594350433ae535bc input=c1d16a7e7d9dc5dc]*/ { - HMODULE hKernel32; - PRemoveDllDirectory RemoveDllDirectory; DLL_DIRECTORY_COOKIE cookieValue; DWORD err = 0; @@ -15148,14 +15098,8 @@ os__remove_dll_directory_impl(PyObject *module, PyObject *cookie) cookieValue = (DLL_DIRECTORY_COOKIE)PyCapsule_GetPointer( cookie, "DLL directory cookie"); - /* For Windows 7, we have to load this. As this will be a fairly - infrequent operation, just do it each time. Kernel32 is always - loaded. */ Py_BEGIN_ALLOW_THREADS - if (!(hKernel32 = GetModuleHandleW(L"kernel32")) || - !(RemoveDllDirectory = (PRemoveDllDirectory)GetProcAddress( - hKernel32, "RemoveDllDirectory")) || - !(*RemoveDllDirectory)(cookieValue)) { + if (!RemoveDllDirectory(cookieValue)) { err = GetLastError(); } Py_END_ALLOW_THREADS diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index 00608be38f61bb..43a0cc0f963f9d 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -606,11 +606,6 @@ select_error(void) # define SUPPRESS_DEPRECATED_CALL #endif -#ifdef MS_WINDOWS -/* Does WSASocket() support the WSA_FLAG_NO_HANDLE_INHERIT flag? */ -static int support_wsa_no_inherit = -1; -#endif - /* Convenience function to raise an error according to errno and return a NULL pointer from a function. */ @@ -5342,6 +5337,13 @@ sock_initobj_impl(PySocketSockObject *self, int family, int type, int proto, set_error(); return -1; } + + if (!SetHandleInformation((HANDLE)fd, HANDLE_FLAG_INHERIT, 0)) { + closesocket(fd); + PyErr_SetFromWindowsErr(0); + return -1; + } + family = info.iAddressFamily; type = info.iSocketType; proto = info.iProtocol; @@ -5438,33 +5440,15 @@ sock_initobj_impl(PySocketSockObject *self, int family, int type, int proto, #endif Py_BEGIN_ALLOW_THREADS - if (support_wsa_no_inherit) { - fd = WSASocketW(family, type, proto, - NULL, 0, - WSA_FLAG_OVERLAPPED | WSA_FLAG_NO_HANDLE_INHERIT); - if (fd == INVALID_SOCKET) { - /* Windows 7 or Windows 2008 R2 without SP1 or the hotfix */ - support_wsa_no_inherit = 0; - fd = socket(family, type, proto); - } - } - else { - fd = socket(family, type, proto); - } + fd = WSASocketW(family, type, proto, + NULL, 0, + WSA_FLAG_OVERLAPPED | WSA_FLAG_NO_HANDLE_INHERIT); Py_END_ALLOW_THREADS if (fd == INVALID_SOCKET) { set_error(); return -1; } - - if (!support_wsa_no_inherit) { - if (!SetHandleInformation((HANDLE)fd, HANDLE_FLAG_INHERIT, 0)) { - closesocket(fd); - PyErr_SetFromWindowsErr(0); - return -1; - } - } #else /* UNIX */ Py_BEGIN_ALLOW_THREADS @@ -7340,12 +7324,6 @@ PyInit__socket(void) if (!os_init()) return NULL; -#ifdef MS_WINDOWS - if (support_wsa_no_inherit == -1) { - support_wsa_no_inherit = IsWindows7SP1OrGreater(); - } -#endif - Py_SET_TYPE(&sock_type, &PyType_Type); m = PyModule_Create(&socketmodule); if (m == NULL) diff --git a/Python/fileutils.c b/Python/fileutils.c index 897c2f9f4ea160..93bee9ee007cd4 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -1362,17 +1362,11 @@ set_inheritable(int fd, int inheritable, int raise, int *atomic_flag_works) else flags = 0; - /* This check can be removed once support for Windows 7 ends. */ -#define CONSOLE_PSEUDOHANDLE(handle) (((ULONG_PTR)(handle) & 0x3) == 0x3 && \ - GetFileType(handle) == FILE_TYPE_CHAR) - - if (!CONSOLE_PSEUDOHANDLE(handle) && - !SetHandleInformation(handle, HANDLE_FLAG_INHERIT, flags)) { + if (!SetHandleInformation(handle, HANDLE_FLAG_INHERIT, flags)) { if (raise) PyErr_SetFromWindowsErr(0); return -1; } -#undef CONSOLE_PSEUDOHANDLE return 0; #else From 7d1d66341838d7d1963c9ee7ffca2950d3a751fd Mon Sep 17 00:00:00 2001 From: Inada Naoki Date: Wed, 1 Mar 2023 09:48:15 +0900 Subject: [PATCH 21/40] Doc: Fix minor error in ePub (GH-100614) Fix issue reported https://mail.python.org/archives/list/docs@python.org/message/KE7OIAO53P4XRC4ZOWPDHA63ZQJCHEC3/ --- Doc/conf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Doc/conf.py b/Doc/conf.py index b3da8fa9ec4497..29fb63cbcc8614 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -268,7 +268,7 @@ ogp_site_name = 'Python documentation' ogp_image = '_static/og-image.png' ogp_custom_meta_tags = [ - '', - '', - '', + '', + '', + '', ] From f91846ba390f4437bcd525dfe6ed4355a894e159 Mon Sep 17 00:00:00 2001 From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com> Date: Wed, 1 Mar 2023 09:49:23 +0000 Subject: [PATCH 22/40] gh-102192: Replace PyErr_Fetch/Restore etc by more efficient alternatives in tkinter module (#102319) --- Modules/_tkinter.c | 58 +++++++++++++++++++--------------------------- 1 file changed, 24 insertions(+), 34 deletions(-) diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c index d4a129058702a2..1608939766ffb6 100644 --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -321,8 +321,6 @@ static PyObject *Tkinter_TclError; static int quitMainLoop = 0; static int errorInCmd = 0; static PyObject *excInCmd; -static PyObject *valInCmd; -static PyObject *trbInCmd; #ifdef TKINTER_PROTECT_LOADTK static int tk_load_failed = 0; @@ -1222,7 +1220,7 @@ typedef struct Tkapp_CallEvent { PyObject *args; int flags; PyObject **res; - PyObject **exc_type, **exc_value, **exc_tb; + PyObject **exc; Tcl_Condition *done; } Tkapp_CallEvent; @@ -1339,7 +1337,7 @@ Tkapp_CallProc(Tkapp_CallEvent *e, int flags) ENTER_PYTHON objv = Tkapp_CallArgs(e->args, objStore, &objc); if (!objv) { - PyErr_Fetch(e->exc_type, e->exc_value, e->exc_tb); + *(e->exc) = PyErr_GetRaisedException(); *(e->res) = NULL; } LEAVE_PYTHON @@ -1354,7 +1352,7 @@ Tkapp_CallProc(Tkapp_CallEvent *e, int flags) *(e->res) = Tkapp_ObjectResult(e->self); } if (*(e->res) == NULL) { - PyErr_Fetch(e->exc_type, e->exc_value, e->exc_tb); + *(e->exc) = PyErr_GetRaisedException(); } LEAVE_PYTHON @@ -1401,7 +1399,7 @@ Tkapp_Call(PyObject *selfptr, PyObject *args) marshal the parameters to the interpreter thread. */ Tkapp_CallEvent *ev; Tcl_Condition cond = NULL; - PyObject *exc_type, *exc_value, *exc_tb; + PyObject *exc; if (!WaitForMainloop(self)) return NULL; ev = (Tkapp_CallEvent*)attemptckalloc(sizeof(Tkapp_CallEvent)); @@ -1413,18 +1411,18 @@ Tkapp_Call(PyObject *selfptr, PyObject *args) ev->self = self; ev->args = args; ev->res = &res; - ev->exc_type = &exc_type; - ev->exc_value = &exc_value; - ev->exc_tb = &exc_tb; + ev->exc = &exc; ev->done = &cond; Tkapp_ThreadSend(self, (Tcl_Event*)ev, &cond, &call_mutex); if (res == NULL) { - if (exc_type) - PyErr_Restore(exc_type, exc_value, exc_tb); - else - PyErr_SetObject(Tkinter_TclError, exc_value); + if (exc) { + PyErr_SetRaisedException(exc); + } + else { + PyErr_SetObject(Tkinter_TclError, exc); + } } Tcl_ConditionFinalize(&cond); } @@ -1578,8 +1576,7 @@ typedef struct VarEvent { int flags; EventFunc func; PyObject **res; - PyObject **exc_type; - PyObject **exc_val; + PyObject **exc; Tcl_Condition *cond; } VarEvent; @@ -1643,12 +1640,7 @@ var_perform(VarEvent *ev) { *(ev->res) = ev->func(ev->self, ev->args, ev->flags); if (!*(ev->res)) { - PyObject *exc, *val, *tb; - PyErr_Fetch(&exc, &val, &tb); - PyErr_NormalizeException(&exc, &val, &tb); - *(ev->exc_type) = exc; - *(ev->exc_val) = val; - Py_XDECREF(tb); + *(ev->exc) = PyErr_GetRaisedException();; } } @@ -1672,7 +1664,7 @@ var_invoke(EventFunc func, PyObject *selfptr, PyObject *args, int flags) TkappObject *self = (TkappObject*)selfptr; if (self->threaded && self->thread_id != Tcl_GetCurrentThread()) { VarEvent *ev; - PyObject *res, *exc_type, *exc_val; + PyObject *res, *exc; Tcl_Condition cond = NULL; /* The current thread is not the interpreter thread. Marshal @@ -1691,16 +1683,14 @@ var_invoke(EventFunc func, PyObject *selfptr, PyObject *args, int flags) ev->flags = flags; ev->func = func; ev->res = &res; - ev->exc_type = &exc_type; - ev->exc_val = &exc_val; + ev->exc = &exc; ev->cond = &cond; ev->ev.proc = (Tcl_EventProc*)var_proc; Tkapp_ThreadSend(self, (Tcl_Event*)ev, &cond, &var_mutex); Tcl_ConditionFinalize(&cond); if (!res) { - PyErr_SetObject(exc_type, exc_val); - Py_DECREF(exc_type); - Py_DECREF(exc_val); + PyErr_SetObject((PyObject*)Py_TYPE(exc), exc); + Py_DECREF(exc); return NULL; } return res; @@ -2188,7 +2178,7 @@ static int PythonCmd_Error(Tcl_Interp *interp) { errorInCmd = 1; - PyErr_Fetch(&excInCmd, &valInCmd, &trbInCmd); + excInCmd = PyErr_GetRaisedException(); LEAVE_PYTHON return TCL_ERROR; } @@ -2458,7 +2448,7 @@ FileHandler(ClientData clientData, int mask) res = PyObject_CallFunction(func, "Oi", file, mask); if (res == NULL) { errorInCmd = 1; - PyErr_Fetch(&excInCmd, &valInCmd, &trbInCmd); + excInCmd = PyErr_GetRaisedException(); } Py_XDECREF(res); LEAVE_PYTHON @@ -2628,7 +2618,7 @@ TimerHandler(ClientData clientData) if (res == NULL) { errorInCmd = 1; - PyErr_Fetch(&excInCmd, &valInCmd, &trbInCmd); + excInCmd = PyErr_GetRaisedException(); } else Py_DECREF(res); @@ -2725,8 +2715,8 @@ _tkinter_tkapp_mainloop_impl(TkappObject *self, int threshold) if (errorInCmd) { errorInCmd = 0; - PyErr_Restore(excInCmd, valInCmd, trbInCmd); - excInCmd = valInCmd = trbInCmd = NULL; + PyErr_SetRaisedException(excInCmd); + excInCmd = NULL; return NULL; } Py_RETURN_NONE; @@ -3187,8 +3177,8 @@ EventHook(void) #endif if (errorInCmd) { errorInCmd = 0; - PyErr_Restore(excInCmd, valInCmd, trbInCmd); - excInCmd = valInCmd = trbInCmd = NULL; + PyErr_SetRaisedException(excInCmd); + excInCmd = NULL; PyErr_Print(); } PyEval_SaveThread(); From d3d20743ee1ae7e0be17bacd278985cffa864816 Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Wed, 1 Mar 2023 13:01:39 +0100 Subject: [PATCH 23/40] gh-102336: Ensure CancelIoEx result is not ignored (GH-102347) fix ignored return value --- Modules/_winapi.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Modules/_winapi.c b/Modules/_winapi.c index 8c4c725b9df256..eefc2571ee8287 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -288,7 +288,7 @@ _winapi_Overlapped_cancel_impl(OverlappedObject *self) if (self->pending) { Py_BEGIN_ALLOW_THREADS - CancelIoEx(self->handle, &self->overlapped); + res = CancelIoEx(self->handle, &self->overlapped); Py_END_ALLOW_THREADS } From c1748ed59dc30ab99fe69c22bdbab54f93baa57c Mon Sep 17 00:00:00 2001 From: Max Bachmann Date: Wed, 1 Mar 2023 15:50:38 +0100 Subject: [PATCH 24/40] gh-102344: Reimplement winreg QueryValue / SetValue using QueryValueEx / SetValueEx (GH-102345) The newer APIs are more widely available than the old ones, and are called in a way to preserve functionality. --- ...-03-01-01-36-39.gh-issue-102344.Dgfux4.rst | 2 + PC/winreg.c | 182 ++++++++++++------ 2 files changed, 124 insertions(+), 60 deletions(-) create mode 100644 Misc/NEWS.d/next/Windows/2023-03-01-01-36-39.gh-issue-102344.Dgfux4.rst diff --git a/Misc/NEWS.d/next/Windows/2023-03-01-01-36-39.gh-issue-102344.Dgfux4.rst b/Misc/NEWS.d/next/Windows/2023-03-01-01-36-39.gh-issue-102344.Dgfux4.rst new file mode 100644 index 00000000000000..4804212be8182c --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2023-03-01-01-36-39.gh-issue-102344.Dgfux4.rst @@ -0,0 +1,2 @@ +Implement ``winreg.QueryValue`` using ``QueryValueEx`` and +``winreg.SetValue`` using ``SetValueEx``. Patch by Max Bachmann. diff --git a/PC/winreg.c b/PC/winreg.c index 63b37be526ab80..86efed09855b01 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -561,7 +561,7 @@ Py2Reg(PyObject *value, DWORD typ, BYTE **retDataBuf, DWORD *retDataSize) { Py_ssize_t i,j; switch (typ) { - case REG_DWORD: + case REG_DWORD: { if (value != Py_None && !PyLong_Check(value)) { return FALSE; @@ -585,7 +585,7 @@ Py2Reg(PyObject *value, DWORD typ, BYTE **retDataBuf, DWORD *retDataSize) *retDataSize = sizeof(DWORD); break; } - case REG_QWORD: + case REG_QWORD: { if (value != Py_None && !PyLong_Check(value)) { return FALSE; @@ -1488,53 +1488,77 @@ static PyObject * winreg_QueryValue_impl(PyObject *module, HKEY key, const Py_UNICODE *sub_key) /*[clinic end generated code: output=c655810ae50c63a9 input=41cafbbf423b21d6]*/ { - long rc; - PyObject *retStr; - wchar_t *retBuf; - DWORD bufSize = 0; - DWORD retSize = 0; - wchar_t *tmp; + LONG rc; + HKEY childKey = key; + WCHAR buf[256], *pbuf = buf; + DWORD size = sizeof(buf); + DWORD type; + Py_ssize_t length; + PyObject *result = NULL; if (PySys_Audit("winreg.QueryValue", "nuu", - (Py_ssize_t)key, sub_key, NULL) < 0) { + (Py_ssize_t)key, sub_key, NULL) < 0) + { return NULL; } - rc = RegQueryValueW(key, sub_key, NULL, &retSize); - if (rc == ERROR_MORE_DATA) - retSize = 256; - else if (rc != ERROR_SUCCESS) - return PyErr_SetFromWindowsErrWithFunction(rc, + + if (key == HKEY_PERFORMANCE_DATA) { + return PyErr_SetFromWindowsErrWithFunction(ERROR_INVALID_HANDLE, "RegQueryValue"); + } - bufSize = retSize; - retBuf = (wchar_t *) PyMem_Malloc(bufSize); - if (retBuf == NULL) - return PyErr_NoMemory(); + if (sub_key && sub_key[0]) { + Py_BEGIN_ALLOW_THREADS + rc = RegOpenKeyExW(key, sub_key, 0, KEY_QUERY_VALUE, &childKey); + Py_END_ALLOW_THREADS + if (rc != ERROR_SUCCESS) { + return PyErr_SetFromWindowsErrWithFunction(rc, "RegOpenKeyEx"); + } + } while (1) { - retSize = bufSize; - rc = RegQueryValueW(key, sub_key, retBuf, &retSize); - if (rc != ERROR_MORE_DATA) + Py_BEGIN_ALLOW_THREADS + rc = RegQueryValueExW(childKey, NULL, NULL, &type, (LPBYTE)pbuf, + &size); + Py_END_ALLOW_THREADS + if (rc != ERROR_MORE_DATA) { break; - - bufSize *= 2; - tmp = (wchar_t *) PyMem_Realloc(retBuf, bufSize); + } + void *tmp = PyMem_Realloc(pbuf != buf ? pbuf : NULL, size); if (tmp == NULL) { - PyMem_Free(retBuf); - return PyErr_NoMemory(); + PyErr_NoMemory(); + goto exit; } - retBuf = tmp; + pbuf = tmp; } - if (rc != ERROR_SUCCESS) { - PyMem_Free(retBuf); - return PyErr_SetFromWindowsErrWithFunction(rc, - "RegQueryValue"); + if (rc == ERROR_SUCCESS) { + if (type != REG_SZ) { + PyErr_SetFromWindowsErrWithFunction(ERROR_INVALID_DATA, + "RegQueryValue"); + goto exit; + } + length = wcsnlen(pbuf, size / sizeof(WCHAR)); + } + else if (rc == ERROR_FILE_NOT_FOUND) { + // Return an empty string if there's no default value. + length = 0; + } + else { + PyErr_SetFromWindowsErrWithFunction(rc, "RegQueryValueEx"); + goto exit; } - retStr = PyUnicode_FromWideChar(retBuf, wcslen(retBuf)); - PyMem_Free(retBuf); - return retStr; + result = PyUnicode_FromWideChar(pbuf, length); + +exit: + if (pbuf != buf) { + PyMem_Free(pbuf); + } + if (childKey != key) { + RegCloseKey(childKey); + } + return result; } @@ -1687,38 +1711,69 @@ winreg_SetValue_impl(PyObject *module, HKEY key, const Py_UNICODE *sub_key, DWORD type, PyObject *value_obj) /*[clinic end generated code: output=d4773dc9c372311a input=bf088494ae2d24fd]*/ { - Py_ssize_t value_length; - long rc; + LONG rc; + HKEY childKey = key; + LPWSTR value; + Py_ssize_t size; + Py_ssize_t length; + PyObject *result = NULL; if (type != REG_SZ) { PyErr_SetString(PyExc_TypeError, "type must be winreg.REG_SZ"); return NULL; } - wchar_t *value = PyUnicode_AsWideCharString(value_obj, &value_length); + value = PyUnicode_AsWideCharString(value_obj, &length); if (value == NULL) { return NULL; } - if ((Py_ssize_t)(DWORD)value_length != value_length) { + + size = (length + 1) * sizeof(WCHAR); + if ((Py_ssize_t)(DWORD)size != size) { PyErr_SetString(PyExc_OverflowError, "value is too long"); - PyMem_Free(value); - return NULL; + goto exit; } if (PySys_Audit("winreg.SetValue", "nunu#", (Py_ssize_t)key, sub_key, (Py_ssize_t)type, - value, value_length) < 0) { - PyMem_Free(value); - return NULL; + value, length) < 0) + { + goto exit; + } + + if (key == HKEY_PERFORMANCE_DATA) { + PyErr_SetFromWindowsErrWithFunction(ERROR_INVALID_HANDLE, + "RegSetValue"); + goto exit; + } + + if (sub_key && sub_key[0]) { + Py_BEGIN_ALLOW_THREADS + rc = RegCreateKeyExW(key, sub_key, 0, NULL, 0, KEY_SET_VALUE, NULL, + &childKey, NULL); + Py_END_ALLOW_THREADS + if (rc != ERROR_SUCCESS) { + PyErr_SetFromWindowsErrWithFunction(rc, "RegCreateKeyEx"); + goto exit; + } } Py_BEGIN_ALLOW_THREADS - rc = RegSetValueW(key, sub_key, REG_SZ, value, (DWORD)(value_length + 1)); + rc = RegSetValueExW(childKey, NULL, 0, REG_SZ, (LPBYTE)value, (DWORD)size); Py_END_ALLOW_THREADS + if (rc == ERROR_SUCCESS) { + result = Py_NewRef(Py_None); + } + else { + PyErr_SetFromWindowsErrWithFunction(rc, "RegSetValueEx"); + } + +exit: PyMem_Free(value); - if (rc != ERROR_SUCCESS) - return PyErr_SetFromWindowsErrWithFunction(rc, "RegSetValue"); - Py_RETURN_NONE; + if (childKey != key) { + RegCloseKey(childKey); + } + return result; } /*[clinic input] @@ -1771,32 +1826,39 @@ winreg_SetValueEx_impl(PyObject *module, HKEY key, DWORD type, PyObject *value) /*[clinic end generated code: output=811b769a66ae11b7 input=900a9e3990bfb196]*/ { - BYTE *data; - DWORD len; - LONG rc; + BYTE *data = NULL; + DWORD size; + PyObject *result = NULL; - if (!Py2Reg(value, type, &data, &len)) + if (!Py2Reg(value, type, &data, &size)) { - if (!PyErr_Occurred()) + if (!PyErr_Occurred()) { PyErr_SetString(PyExc_ValueError, "Could not convert the data to the specified type."); + } return NULL; } if (PySys_Audit("winreg.SetValue", "nunO", (Py_ssize_t)key, value_name, (Py_ssize_t)type, - value) < 0) { - PyMem_Free(data); - return NULL; + value) < 0) + { + goto exit; } + Py_BEGIN_ALLOW_THREADS - rc = RegSetValueExW(key, value_name, 0, type, data, len); + rc = RegSetValueExW(key, value_name, 0, type, data, size); Py_END_ALLOW_THREADS + if (rc == ERROR_SUCCESS) { + result = Py_NewRef(Py_None); + } + else { + PyErr_SetFromWindowsErrWithFunction(rc, "RegSetValueEx"); + } + +exit: PyMem_Free(data); - if (rc != ERROR_SUCCESS) - return PyErr_SetFromWindowsErrWithFunction(rc, - "RegSetValueEx"); - Py_RETURN_NONE; + return result; } /*[clinic input] From 2f62a5da949cd368a9498e6a03e700f4629fa97f Mon Sep 17 00:00:00 2001 From: Hyunkyun Moon Date: Wed, 1 Mar 2023 23:56:19 +0900 Subject: [PATCH 25/40] gh-95672 skip fcntl when pipesize is smaller than pagesize (gh-102163) --- Doc/library/test.rst | 7 +++++++ Lib/test/support/__init__.py | 14 ++++++++++++++ Lib/test/test_fcntl.py | 5 +++-- Lib/test/test_subprocess.py | 3 ++- 4 files changed, 26 insertions(+), 3 deletions(-) diff --git a/Doc/library/test.rst b/Doc/library/test.rst index 8199a27d7d9c4e..3c759648e4e626 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -536,6 +536,13 @@ The :mod:`test.support` module defines the following functions: :func:`doctest.testmod`. +.. function:: get_pagesize() + + Get size of a page in bytes. + + .. versionadded:: 3.12 + + .. function:: setswitchinterval(interval) Set the :func:`sys.setswitchinterval` to the given *interval*. Defines diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 4a22ccdd4db403..c309fd7910e0e6 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -51,6 +51,8 @@ # sys "is_jython", "is_android", "is_emscripten", "is_wasi", "check_impl_detail", "unix_shell", "setswitchinterval", + # os + "get_pagesize", # network "open_urlresource", # processes @@ -1893,6 +1895,18 @@ def setswitchinterval(interval): return sys.setswitchinterval(interval) +def get_pagesize(): + """Get size of a page in bytes.""" + try: + page_size = os.sysconf('SC_PAGESIZE') + except (ValueError, AttributeError): + try: + page_size = os.sysconf('SC_PAGE_SIZE') + except (ValueError, AttributeError): + page_size = 4096 + return page_size + + @contextlib.contextmanager def disable_faulthandler(): import faulthandler diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py index 26de67d924272a..5da75615b41d79 100644 --- a/Lib/test/test_fcntl.py +++ b/Lib/test/test_fcntl.py @@ -6,7 +6,7 @@ import struct import sys import unittest -from test.support import verbose, cpython_only +from test.support import verbose, cpython_only, get_pagesize from test.support.import_helper import import_module from test.support.os_helper import TESTFN, unlink @@ -201,7 +201,8 @@ def test_fcntl_f_pipesize(self): # Get the default pipesize with F_GETPIPE_SZ pipesize_default = fcntl.fcntl(test_pipe_w, fcntl.F_GETPIPE_SZ) pipesize = pipesize_default // 2 # A new value to detect change. - if pipesize < 512: # the POSIX minimum + pagesize_default = get_pagesize() + if pipesize < pagesize_default: # the POSIX minimum raise unittest.SkipTest( 'default pipesize too small to perform test.') fcntl.fcntl(test_pipe_w, fcntl.F_SETPIPE_SZ, pipesize) diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index 727b0e6dc578c2..3880125807f235 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -717,7 +717,8 @@ def test_pipesizes(self): os.close(test_pipe_r) os.close(test_pipe_w) pipesize = pipesize_default // 2 - if pipesize < 512: # the POSIX minimum + pagesize_default = support.get_pagesize() + if pipesize < pagesize_default: # the POSIX minimum raise unittest.SkipTest( 'default pipesize too small to perform test.') p = subprocess.Popen( From eaae563b6878aa050b4ad406b67728b6b066220e Mon Sep 17 00:00:00 2001 From: Stefan Pochmann <609905+pochmann@users.noreply.github.com> Date: Thu, 2 Mar 2023 04:16:23 +0100 Subject: [PATCH 26/40] gh-102088 Optimize iter_index itertools recipe (GH-102360) --- Doc/library/itertools.rst | 9 ++++++--- Lib/test/test_operator.py | 3 +++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index 95da7166842ee6..d85a17effb04a2 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -886,9 +886,12 @@ which incur interpreter overhead. except AttributeError: # Slow path for general iterables it = islice(iterable, start, None) - for i, element in enumerate(it, start): - if element is value or element == value: - yield i + i = start - 1 + try: + while True: + yield (i := i + operator.indexOf(it, value) + 1) + except ValueError: + pass else: # Fast path for sequences i = start - 1 diff --git a/Lib/test/test_operator.py b/Lib/test/test_operator.py index b7e38c23349878..1db738d228b1b9 100644 --- a/Lib/test/test_operator.py +++ b/Lib/test/test_operator.py @@ -208,6 +208,9 @@ def test_indexOf(self): nan = float("nan") self.assertEqual(operator.indexOf([nan, nan, 21], nan), 0) self.assertEqual(operator.indexOf([{}, 1, {}, 2], {}), 0) + it = iter('leave the iterator at exactly the position after the match') + self.assertEqual(operator.indexOf(it, 'a'), 2) + self.assertEqual(next(it), 'v') def test_invert(self): operator = self.module From 60597439ef482a840f8ffc76eb6c27f3ba773d9c Mon Sep 17 00:00:00 2001 From: Hyunkyun Moon Date: Thu, 2 Mar 2023 20:10:08 +0900 Subject: [PATCH 27/40] gh-95672: Update memory_watchdog to use test.support.get_pagesize (gh-102365) --- Lib/test/memory_watchdog.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/Lib/test/memory_watchdog.py b/Lib/test/memory_watchdog.py index 88cca8d323a636..fee062ecc9b300 100644 --- a/Lib/test/memory_watchdog.py +++ b/Lib/test/memory_watchdog.py @@ -5,20 +5,13 @@ # If the process crashes, reading from the /proc entry will fail with ESRCH. -import os import sys import time +from test.support import get_pagesize -try: - page_size = os.sysconf('SC_PAGESIZE') -except (ValueError, AttributeError): - try: - page_size = os.sysconf('SC_PAGE_SIZE') - except (ValueError, AttributeError): - page_size = 4096 - while True: + page_size = get_pagesize() sys.stdin.seek(0) statm = sys.stdin.read() data = int(statm.split()[5]) From ed55c69ebd74178115cd8b080f7f8e7588cd5fda Mon Sep 17 00:00:00 2001 From: Dong-hee Na Date: Thu, 2 Mar 2023 20:32:05 +0900 Subject: [PATCH 28/40] gh-101101: Fix test_code_extra to reset value for refleak test (gh-102350) --- Modules/_testcapi/code.c | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Modules/_testcapi/code.c b/Modules/_testcapi/code.c index 588dc67971ea5a..84c668cd6b3b00 100644 --- a/Modules/_testcapi/code.c +++ b/Modules/_testcapi/code.c @@ -92,7 +92,11 @@ test_code_extra(PyObject* self, PyObject *Py_UNUSED(callable)) goto finally; } assert ((uintptr_t)extra == 77); - + // Revert to initial code extra value. + res = PyUnstable_Code_SetExtra(test_func_code, code_extra_index, NULL); + if (res < 0) { + goto finally; + } result = Py_NewRef(Py_None); finally: Py_XDECREF(test_module); From 73250000ac7d6a5e41917e8bcea7234444cbde78 Mon Sep 17 00:00:00 2001 From: Michael K Date: Thu, 2 Mar 2023 16:26:49 +0100 Subject: [PATCH 29/40] Fix typos in documentation and comments (GH-102374) Found some duplicate `to`s in the documentation and some code comments and fixed them. [Misc/NEWS.d/3.12.0a1.rst](https://github.com/python/cpython/blob/ed55c69ebd74178115cd8b080f7f8e7588cd5fda/Misc/NEWS.d/3.12.0a1.rst) also contains two duplicate `to`s, but I wasn't sure if it's ok to touch that file. Looks auto generated. I'm happy to amend the PR if requested. :) Automerge-Triggered-By: GH:AlexWaygood --- Doc/library/sqlite3.rst | 2 +- Doc/library/typing.rst | 2 +- Lib/zoneinfo/_zoneinfo.py | 2 +- Objects/typeobject.c | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst index 18d0a5e630f6a9..ff036ad56acba8 100644 --- a/Doc/library/sqlite3.rst +++ b/Doc/library/sqlite3.rst @@ -72,7 +72,7 @@ including `cursors`_ and `transactions`_. First, we need to create a new database and open a database connection to allow :mod:`!sqlite3` to work with it. -Call :func:`sqlite3.connect` to to create a connection to +Call :func:`sqlite3.connect` to create a connection to the database :file:`tutorial.db` in the current working directory, implicitly creating it if it does not exist: diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index 3395e4bfb95c44..80a969e6335abe 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -1345,7 +1345,7 @@ These are not used in annotations. They are building blocks for creating generic x: Ts # Not valid x: tuple[Ts] # Not valid - x: tuple[*Ts] # The correct way to to do it + x: tuple[*Ts] # The correct way to do it Type variable tuples can be used in the same contexts as normal type variables. For example, in class definitions, arguments, and return types:: diff --git a/Lib/zoneinfo/_zoneinfo.py b/Lib/zoneinfo/_zoneinfo.py index de68380792f17c..eede15b8271058 100644 --- a/Lib/zoneinfo/_zoneinfo.py +++ b/Lib/zoneinfo/_zoneinfo.py @@ -302,7 +302,7 @@ def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts): # difference between utcoffset() and the "standard" offset, but # the "base offset" and "DST offset" are not encoded in the file; # we can infer what they are from the isdst flag, but it is not - # sufficient to to just look at the last standard offset, because + # sufficient to just look at the last standard offset, because # occasionally countries will shift both DST offset and base offset. typecnt = len(isdsts) diff --git a/Objects/typeobject.c b/Objects/typeobject.c index b3f1429debc58b..54f1974c6ae3a7 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -3822,11 +3822,11 @@ PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, res->ht_qualname = Py_NewRef(ht_name); res->ht_name = ht_name; - ht_name = NULL; // Give our reference to to the type + ht_name = NULL; // Give our reference to the type type->tp_name = _ht_tpname; res->_ht_tpname = _ht_tpname; - _ht_tpname = NULL; // Give ownership to to the type + _ht_tpname = NULL; // Give ownership to the type /* Copy the sizes */ From 71db5dbcd714b2e1297c43538188dd69715feb9a Mon Sep 17 00:00:00 2001 From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com> Date: Thu, 2 Mar 2023 18:38:22 +0000 Subject: [PATCH 30/40] gh-102371: move _Py_Mangle from compile.c to symtable.c (#102372) --- Include/internal/pycore_compile.h | 6 --- Include/internal/pycore_symtable.h | 7 +++ Objects/typeobject.c | 2 +- Python/compile.c | 70 +----------------------------- Python/symtable.c | 67 +++++++++++++++++++++++++++- 5 files changed, 76 insertions(+), 76 deletions(-) diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h index 967fe92a5bc2b2..511f0689c93822 100644 --- a/Include/internal/pycore_compile.h +++ b/Include/internal/pycore_compile.h @@ -19,12 +19,6 @@ PyAPI_FUNC(PyCodeObject*) _PyAST_Compile( int optimize, struct _arena *arena); -int _PyFuture_FromAST( - struct _mod * mod, - PyObject *filename, - PyFutureFeatures* futures); - -extern PyObject* _Py_Mangle(PyObject *p, PyObject *name); typedef struct { int optimize; diff --git a/Include/internal/pycore_symtable.h b/Include/internal/pycore_symtable.h index 8532646ce7d95c..512c4c931f73e4 100644 --- a/Include/internal/pycore_symtable.h +++ b/Include/internal/pycore_symtable.h @@ -90,6 +90,8 @@ PyAPI_FUNC(PySTEntryObject *) PySymtable_Lookup(struct symtable *, void *); extern void _PySymtable_Free(struct symtable *); +extern PyObject* _Py_Mangle(PyObject *p, PyObject *name); + /* Flags for def-use information */ #define DEF_GLOBAL 1 /* global stmt */ @@ -128,6 +130,11 @@ extern struct symtable* _Py_SymtableStringObjectFlags( int start, PyCompilerFlags *flags); +int _PyFuture_FromAST( + struct _mod * mod, + PyObject *filename, + PyFutureFeatures* futures); + #ifdef __cplusplus } #endif diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 54f1974c6ae3a7..981930f58417d8 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -3,7 +3,7 @@ #include "Python.h" #include "pycore_call.h" #include "pycore_code.h" // CO_FAST_FREE -#include "pycore_compile.h" // _Py_Mangle() +#include "pycore_symtable.h" // _Py_Mangle() #include "pycore_dict.h" // _PyDict_KeysSize() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_moduleobject.h" // _PyModule_GetDef() diff --git a/Python/compile.c b/Python/compile.c index b14c637210ff55..2e60a8157533ad 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -29,12 +29,12 @@ #include "Python.h" #include "pycore_ast.h" // _PyAST_GetDocString() #include "pycore_code.h" // _PyCode_New() -#include "pycore_compile.h" // _PyFuture_FromAST() +#include "pycore_compile.h" #include "pycore_intrinsics.h" #include "pycore_long.h" // _PyLong_GetZero() #include "pycore_opcode.h" // _PyOpcode_Caches #include "pycore_pymem.h" // _PyMem_IsPtrFreed() -#include "pycore_symtable.h" // PySTEntryObject +#include "pycore_symtable.h" // PySTEntryObject, _PyFuture_FromAST() #include "opcode_metadata.h" // _PyOpcode_opcode_metadata, _PyOpcode_num_popped/pushed @@ -569,72 +569,6 @@ static PyCodeObject *assemble(struct compiler *, int addNone); #define CAPSULE_NAME "compile.c compiler unit" -PyObject * -_Py_Mangle(PyObject *privateobj, PyObject *ident) -{ - /* Name mangling: __private becomes _classname__private. - This is independent from how the name is used. */ - PyObject *result; - size_t nlen, plen, ipriv; - Py_UCS4 maxchar; - if (privateobj == NULL || !PyUnicode_Check(privateobj) || - PyUnicode_READ_CHAR(ident, 0) != '_' || - PyUnicode_READ_CHAR(ident, 1) != '_') { - return Py_NewRef(ident); - } - nlen = PyUnicode_GET_LENGTH(ident); - plen = PyUnicode_GET_LENGTH(privateobj); - /* Don't mangle __id__ or names with dots. - - The only time a name with a dot can occur is when - we are compiling an import statement that has a - package name. - - TODO(jhylton): Decide whether we want to support - mangling of the module name, e.g. __M.X. - */ - if ((PyUnicode_READ_CHAR(ident, nlen-1) == '_' && - PyUnicode_READ_CHAR(ident, nlen-2) == '_') || - PyUnicode_FindChar(ident, '.', 0, nlen, 1) != -1) { - return Py_NewRef(ident); /* Don't mangle __whatever__ */ - } - /* Strip leading underscores from class name */ - ipriv = 0; - while (PyUnicode_READ_CHAR(privateobj, ipriv) == '_') - ipriv++; - if (ipriv == plen) { - return Py_NewRef(ident); /* Don't mangle if class is just underscores */ - } - plen -= ipriv; - - if (plen + nlen >= PY_SSIZE_T_MAX - 1) { - PyErr_SetString(PyExc_OverflowError, - "private identifier too large to be mangled"); - return NULL; - } - - maxchar = PyUnicode_MAX_CHAR_VALUE(ident); - if (PyUnicode_MAX_CHAR_VALUE(privateobj) > maxchar) - maxchar = PyUnicode_MAX_CHAR_VALUE(privateobj); - - result = PyUnicode_New(1 + nlen + plen, maxchar); - if (!result) { - return NULL; - } - /* ident = "_" + priv[ipriv:] + ident # i.e. 1+plen+nlen bytes */ - PyUnicode_WRITE(PyUnicode_KIND(result), PyUnicode_DATA(result), 0, '_'); - if (PyUnicode_CopyCharacters(result, 1, privateobj, ipriv, plen) < 0) { - Py_DECREF(result); - return NULL; - } - if (PyUnicode_CopyCharacters(result, plen+1, ident, 0, nlen) < 0) { - Py_DECREF(result); - return NULL; - } - assert(_PyUnicode_CheckConsistency(result, 1)); - return result; -} - static int compiler_setup(struct compiler *c, mod_ty mod, PyObject *filename, diff --git a/Python/symtable.c b/Python/symtable.c index 89a2bc437dfa9b..df7473943f3fc1 100644 --- a/Python/symtable.c +++ b/Python/symtable.c @@ -1,6 +1,5 @@ #include "Python.h" #include "pycore_ast.h" // identifier, stmt_ty -#include "pycore_compile.h" // _Py_Mangle(), _PyFuture_FromAST() #include "pycore_parser.h" // _PyParser_ASTFromString() #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_symtable.h" // PySTEntryObject @@ -2152,3 +2151,69 @@ _Py_SymtableStringObjectFlags(const char *str, PyObject *filename, _PyArena_Free(arena); return st; } + +PyObject * +_Py_Mangle(PyObject *privateobj, PyObject *ident) +{ + /* Name mangling: __private becomes _classname__private. + This is independent from how the name is used. */ + if (privateobj == NULL || !PyUnicode_Check(privateobj) || + PyUnicode_READ_CHAR(ident, 0) != '_' || + PyUnicode_READ_CHAR(ident, 1) != '_') { + return Py_NewRef(ident); + } + size_t nlen = PyUnicode_GET_LENGTH(ident); + size_t plen = PyUnicode_GET_LENGTH(privateobj); + /* Don't mangle __id__ or names with dots. + + The only time a name with a dot can occur is when + we are compiling an import statement that has a + package name. + + TODO(jhylton): Decide whether we want to support + mangling of the module name, e.g. __M.X. + */ + if ((PyUnicode_READ_CHAR(ident, nlen-1) == '_' && + PyUnicode_READ_CHAR(ident, nlen-2) == '_') || + PyUnicode_FindChar(ident, '.', 0, nlen, 1) != -1) { + return Py_NewRef(ident); /* Don't mangle __whatever__ */ + } + /* Strip leading underscores from class name */ + size_t ipriv = 0; + while (PyUnicode_READ_CHAR(privateobj, ipriv) == '_') { + ipriv++; + } + if (ipriv == plen) { + return Py_NewRef(ident); /* Don't mangle if class is just underscores */ + } + plen -= ipriv; + + if (plen + nlen >= PY_SSIZE_T_MAX - 1) { + PyErr_SetString(PyExc_OverflowError, + "private identifier too large to be mangled"); + return NULL; + } + + Py_UCS4 maxchar = PyUnicode_MAX_CHAR_VALUE(ident); + if (PyUnicode_MAX_CHAR_VALUE(privateobj) > maxchar) { + maxchar = PyUnicode_MAX_CHAR_VALUE(privateobj); + } + + PyObject *result = PyUnicode_New(1 + nlen + plen, maxchar); + if (!result) { + return NULL; + } + /* ident = "_" + priv[ipriv:] + ident # i.e. 1+plen+nlen bytes */ + PyUnicode_WRITE(PyUnicode_KIND(result), PyUnicode_DATA(result), 0, '_'); + if (PyUnicode_CopyCharacters(result, 1, privateobj, ipriv, plen) < 0) { + Py_DECREF(result); + return NULL; + } + if (PyUnicode_CopyCharacters(result, plen+1, ident, 0, nlen) < 0) { + Py_DECREF(result); + return NULL; + } + assert(_PyUnicode_CheckConsistency(result, 1)); + return result; +} + From 12011dd8bafa6867f2b4a8a9e8e54cb0fbf006e4 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Fri, 3 Mar 2023 06:59:05 +0300 Subject: [PATCH 31/40] gh-102324: Improve tests of `typing.override` (#102325) Fixes #101564 --- Lib/test/test_typing.py | 99 +++++++++++++++++++++++++++++++++++++++-- 1 file changed, 95 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index d61dc6e2fbd70b..96496ec9279e3f 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -4171,38 +4171,129 @@ class OverrideDecoratorTests(BaseTestCase): def test_override(self): class Base: def normal_method(self): ... + @classmethod + def class_method_good_order(cls): ... + @classmethod + def class_method_bad_order(cls): ... @staticmethod def static_method_good_order(): ... @staticmethod def static_method_bad_order(): ... - @staticmethod - def decorator_with_slots(): ... class Derived(Base): @override def normal_method(self): return 42 + @classmethod + @override + def class_method_good_order(cls): + return 42 + @override + @classmethod + def class_method_bad_order(cls): + return 42 + @staticmethod @override def static_method_good_order(): return 42 - @override @staticmethod def static_method_bad_order(): return 42 - self.assertIsSubclass(Derived, Base) instance = Derived() self.assertEqual(instance.normal_method(), 42) + self.assertIs(True, Derived.normal_method.__override__) self.assertIs(True, instance.normal_method.__override__) + + self.assertEqual(Derived.class_method_good_order(), 42) + self.assertIs(True, Derived.class_method_good_order.__override__) + self.assertEqual(Derived.class_method_bad_order(), 42) + self.assertIs(False, hasattr(Derived.class_method_bad_order, "__override__")) + self.assertEqual(Derived.static_method_good_order(), 42) self.assertIs(True, Derived.static_method_good_order.__override__) self.assertEqual(Derived.static_method_bad_order(), 42) self.assertIs(False, hasattr(Derived.static_method_bad_order, "__override__")) + # Base object is not changed: + self.assertIs(False, hasattr(Base.normal_method, "__override__")) + self.assertIs(False, hasattr(Base.class_method_good_order, "__override__")) + self.assertIs(False, hasattr(Base.class_method_bad_order, "__override__")) + self.assertIs(False, hasattr(Base.static_method_good_order, "__override__")) + self.assertIs(False, hasattr(Base.static_method_bad_order, "__override__")) + + def test_property(self): + class Base: + @property + def correct(self) -> int: + return 1 + @property + def wrong(self) -> int: + return 1 + + class Child(Base): + @property + @override + def correct(self) -> int: + return 2 + @override + @property + def wrong(self) -> int: + return 2 + + instance = Child() + self.assertEqual(instance.correct, 2) + self.assertTrue(Child.correct.fget.__override__) + self.assertEqual(instance.wrong, 2) + self.assertFalse(hasattr(Child.wrong, "__override__")) + self.assertFalse(hasattr(Child.wrong.fset, "__override__")) + + def test_silent_failure(self): + class CustomProp: + __slots__ = ('fget',) + def __init__(self, fget): + self.fget = fget + def __get__(self, obj, objtype=None): + return self.fget(obj) + + class WithOverride: + @override # must not fail on object with `__slots__` + @CustomProp + def some(self): + return 1 + + self.assertEqual(WithOverride.some, 1) + self.assertFalse(hasattr(WithOverride.some, "__override__")) + + def test_multiple_decorators(self): + import functools + + def with_wraps(f): # similar to `lru_cache` definition + @functools.wraps(f) + def wrapper(*args, **kwargs): + return f(*args, **kwargs) + return wrapper + + class WithOverride: + @override + @with_wraps + def on_top(self, a: int) -> int: + return a + 1 + @with_wraps + @override + def on_bottom(self, a: int) -> int: + return a + 2 + + instance = WithOverride() + self.assertEqual(instance.on_top(1), 2) + self.assertTrue(instance.on_top.__override__) + self.assertEqual(instance.on_bottom(1), 3) + self.assertTrue(instance.on_bottom.__override__) + class CastTests(BaseTestCase): From 4e7c0cbf59595714848cf9827f6e5b40c3985924 Mon Sep 17 00:00:00 2001 From: Owain Davies <116417456+OTheDev@users.noreply.github.com> Date: Fri, 3 Mar 2023 17:51:32 +0700 Subject: [PATCH 32/40] gh-101754: Document that Windows converts keys in `os.environ` to uppercase (GH-101840) --- Doc/library/os.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 85924d0e48366b..23ce98785bedfc 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -201,6 +201,11 @@ process and user. ``'surrogateescape'`` error handler. Use :data:`environb` if you would like to use a different encoding. + On Windows, the keys are converted to uppercase. This also applies when + getting, setting, or deleting an item. For example, + ``environ['monty'] = 'python'`` maps the key ``'MONTY'`` to the value + ``'python'``. + .. note:: Calling :func:`putenv` directly does not change :data:`os.environ`, so it's better From 7b9132057d8f176cb9c40e8324f5122a3132ee58 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Fri, 3 Mar 2023 20:16:50 +0300 Subject: [PATCH 33/40] gh-102383: [docs] Arguments of `PyObject_CopyData` are `PyObject *` (#102390) --- Doc/c-api/buffer.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Doc/c-api/buffer.rst b/Doc/c-api/buffer.rst index a04062fb2a68f1..91d1edd9b2ec46 100644 --- a/Doc/c-api/buffer.rst +++ b/Doc/c-api/buffer.rst @@ -499,7 +499,7 @@ Buffer-related functions This function fails if *len* != *src->len*. -.. c:function:: int PyObject_CopyData(Py_buffer *dest, Py_buffer *src) +.. c:function:: int PyObject_CopyData(PyObject *dest, PyObject *src) Copy data from *src* to *dest* buffer. Can convert between C-style and or Fortran-style buffers. From cb944d0be869dfb1189265467ec8a986176cc104 Mon Sep 17 00:00:00 2001 From: Wagner Alberto Date: Fri, 3 Mar 2023 14:25:31 -0300 Subject: [PATCH 34/40] Add import of `unittest.mock.Mock` in documentation (#102346) --- Doc/library/unittest.mock.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst index d6d8e5e9557d5c..6c4d801f69f5a9 100644 --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -72,6 +72,7 @@ available, and then make assertions about how they have been used: :attr:`side_effect` allows you to perform side effects, including raising an exception when a mock is called: + >>> from unittest.mock import Mock >>> mock = Mock(side_effect=KeyError('foo')) >>> mock() Traceback (most recent call last): From 8de59c1bb9fdcea69ff6e6357972ef1b75b71721 Mon Sep 17 00:00:00 2001 From: Jacob Bower <1978924+jbower-fb@users.noreply.github.com> Date: Fri, 3 Mar 2023 20:59:21 -0800 Subject: [PATCH 35/40] gh-102021 : Allow multiple input files for interpreter loop generator (#102022) The input files no longer use `-i`. --- Makefile.pre.in | 4 +- Python/generated_cases.c.h | 3 +- Python/opcode_metadata.h | 5 +- Tools/cases_generator/generate_cases.py | 115 ++++++++++++++++++------ Tools/cases_generator/lexer.py | 2 +- Tools/cases_generator/parser.py | 17 ++-- 6 files changed, 105 insertions(+), 41 deletions(-) diff --git a/Makefile.pre.in b/Makefile.pre.in index b12a1bc060af90..1a1853bf3d7871 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1485,9 +1485,9 @@ regen-cases: PYTHONPATH=$(srcdir)/Tools/cases_generator \ $(PYTHON_FOR_REGEN) \ $(srcdir)/Tools/cases_generator/generate_cases.py \ - -i $(srcdir)/Python/bytecodes.c \ -o $(srcdir)/Python/generated_cases.c.h.new \ - -m $(srcdir)/Python/opcode_metadata.h.new + -m $(srcdir)/Python/opcode_metadata.h.new \ + $(srcdir)/Python/bytecodes.c $(UPDATE_FILE) $(srcdir)/Python/generated_cases.c.h $(srcdir)/Python/generated_cases.c.h.new $(UPDATE_FILE) $(srcdir)/Python/opcode_metadata.h $(srcdir)/Python/opcode_metadata.h.new diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index f59f7c17451c17..82e18505b0d430 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -1,5 +1,6 @@ // This file is generated by Tools/cases_generator/generate_cases.py -// from Python/bytecodes.c +// from: +// Python/bytecodes.c // Do not edit! TARGET(NOP) { diff --git a/Python/opcode_metadata.h b/Python/opcode_metadata.h index f27906a3e349eb..67cb0088c3b789 100644 --- a/Python/opcode_metadata.h +++ b/Python/opcode_metadata.h @@ -1,5 +1,6 @@ -// This file is generated by Tools/cases_generator/generate_cases.py --metadata -// from Python/bytecodes.c +// This file is generated by Tools/cases_generator/generate_cases.py +// from: +// Python/bytecodes.c // Do not edit! #ifndef NEED_OPCODE_TABLES diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py index b760172974c8a5..25cf75e4c1c490 100644 --- a/Tools/cases_generator/generate_cases.py +++ b/Tools/cases_generator/generate_cases.py @@ -37,15 +37,15 @@ description="Generate the code for the interpreter switch.", formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) -arg_parser.add_argument( - "-i", "--input", type=str, help="Instruction definitions", default=DEFAULT_INPUT -) arg_parser.add_argument( "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT ) arg_parser.add_argument( "-m", "--metadata", type=str, help="Generated metadata", default=DEFAULT_METADATA_OUTPUT ) +arg_parser.add_argument( + "input", nargs=argparse.REMAINDER, help="Instruction definition file(s)" +) def effect_size(effect: StackEffect) -> tuple[int, str]: @@ -485,6 +485,11 @@ class MacroInstruction(SuperOrMacroInstruction): parts: list[Component | parser.CacheEffect] +@dataclasses.dataclass +class OverriddenInstructionPlaceHolder: + name: str + + AnyInstruction = Instruction | SuperInstruction | MacroInstruction INSTR_FMT_PREFIX = "INSTR_FMT_" @@ -492,32 +497,33 @@ class MacroInstruction(SuperOrMacroInstruction): class Analyzer: """Parse input, analyze it, and write to output.""" - filename: str + input_filenames: list[str] output_filename: str metadata_filename: str - src: str errors: int = 0 - def __init__(self, filename: str, output_filename: str, metadata_filename: str): + def __init__(self, input_filenames: list[str], output_filename: str, metadata_filename: str): """Read the input file.""" - self.filename = filename + self.input_filenames = input_filenames self.output_filename = output_filename self.metadata_filename = metadata_filename - with open(filename) as f: - self.src = f.read() def error(self, msg: str, node: parser.Node) -> None: lineno = 0 + filename = "" if context := node.context: + filename = context.owner.filename # Use line number of first non-comment in the node for token in context.owner.tokens[context.begin : context.end]: lineno = token.line if token.kind != "COMMENT": break - print(f"{self.filename}:{lineno}: {msg}", file=sys.stderr) + print(f"{filename}:{lineno}: {msg}", file=sys.stderr) self.errors += 1 - everything: list[parser.InstDef | parser.Super | parser.Macro] + everything: list[ + parser.InstDef | parser.Super | parser.Macro | OverriddenInstructionPlaceHolder + ] instrs: dict[str, Instruction] # Includes ops supers: dict[str, parser.Super] super_instrs: dict[str, SuperInstruction] @@ -531,7 +537,31 @@ def parse(self) -> None: We only want the parser to see the stuff between the begin and end markers. """ - psr = parser.Parser(self.src, filename=self.filename) + + self.everything = [] + self.instrs = {} + self.supers = {} + self.macros = {} + self.families = {} + + instrs_idx: dict[str, int] = dict() + + for filename in self.input_filenames: + self.parse_file(filename, instrs_idx) + + files = " + ".join(self.input_filenames) + print( + f"Read {len(self.instrs)} instructions/ops, " + f"{len(self.supers)} supers, {len(self.macros)} macros, " + f"and {len(self.families)} families from {files}", + file=sys.stderr, + ) + + def parse_file(self, filename: str, instrs_idx: dict[str, int]) -> None: + with open(filename) as file: + src = file.read() + + psr = parser.Parser(src, filename=filename) # Skip until begin marker while tkn := psr.next(raw=True): @@ -551,16 +581,27 @@ def parse(self) -> None: # Parse from start psr.setpos(start) - self.everything = [] - self.instrs = {} - self.supers = {} - self.macros = {} - self.families = {} thing: parser.InstDef | parser.Super | parser.Macro | parser.Family | None + thing_first_token = psr.peek() while thing := psr.definition(): match thing: case parser.InstDef(name=name): + if name in self.instrs: + if not thing.override: + raise psr.make_syntax_error( + f"Duplicate definition of '{name}' @ {thing.context} " + f"previous definition @ {self.instrs[name].inst.context}", + thing_first_token, + ) + self.everything[instrs_idx[name]] = OverriddenInstructionPlaceHolder(name=name) + if name not in self.instrs and thing.override: + raise psr.make_syntax_error( + f"Definition of '{name}' @ {thing.context} is supposed to be " + "an override but no previous definition exists.", + thing_first_token, + ) self.instrs[name] = Instruction(thing) + instrs_idx[name] = len(self.everything) self.everything.append(thing) case parser.Super(name): self.supers[name] = thing @@ -573,14 +614,7 @@ def parse(self) -> None: case _: typing.assert_never(thing) if not psr.eof(): - raise psr.make_syntax_error("Extra stuff at the end") - - print( - f"Read {len(self.instrs)} instructions/ops, " - f"{len(self.supers)} supers, {len(self.macros)} macros, " - f"and {len(self.families)} families from {self.filename}", - file=sys.stderr, - ) + raise psr.make_syntax_error(f"Extra stuff at the end of {filename}") def analyze(self) -> None: """Analyze the inputs. @@ -879,6 +913,8 @@ def write_stack_effect_functions(self) -> None: popped_data: list[tuple[AnyInstruction, str]] = [] pushed_data: list[tuple[AnyInstruction, str]] = [] for thing in self.everything: + if isinstance(thing, OverriddenInstructionPlaceHolder): + continue instr, popped, pushed = self.get_stack_effect_info(thing) if instr is not None: popped_data.append((instr, popped)) @@ -907,6 +943,13 @@ def write_function( write_function("pushed", pushed_data) self.out.emit("") + def from_source_files(self) -> str: + paths = "\n// ".join( + os.path.relpath(filename, ROOT).replace(os.path.sep, posixpath.sep) + for filename in self.input_filenames + ) + return f"// from:\n// {paths}\n" + def write_metadata(self) -> None: """Write instruction metadata to output file.""" @@ -914,6 +957,8 @@ def write_metadata(self) -> None: all_formats: set[str] = set() for thing in self.everything: match thing: + case OverriddenInstructionPlaceHolder(): + continue case parser.InstDef(): format = self.instrs[thing.name].instr_fmt case parser.Super(): @@ -928,8 +973,8 @@ def write_metadata(self) -> None: with open(self.metadata_filename, "w") as f: # Write provenance header - f.write(f"// This file is generated by {THIS} --metadata\n") - f.write(f"// from {os.path.relpath(self.filename, ROOT).replace(os.path.sep, posixpath.sep)}\n") + f.write(f"// This file is generated by {THIS}\n") + f.write(self.from_source_files()) f.write(f"// Do not edit!\n") # Create formatter; the rest of the code uses this @@ -959,6 +1004,8 @@ def write_metadata(self) -> None: # Write metadata for each instruction for thing in self.everything: match thing: + case OverriddenInstructionPlaceHolder(): + continue case parser.InstDef(): if thing.kind != "op": self.write_metadata_for_inst(self.instrs[thing.name]) @@ -1008,7 +1055,7 @@ def write_instructions(self) -> None: with open(self.output_filename, "w") as f: # Write provenance header f.write(f"// This file is generated by {THIS}\n") - f.write(f"// from {os.path.relpath(self.filename, ROOT).replace(os.path.sep, posixpath.sep)}\n") + f.write(self.from_source_files()) f.write(f"// Do not edit!\n") # Create formatter; the rest of the code uses this @@ -1020,6 +1067,8 @@ def write_instructions(self) -> None: n_macros = 0 for thing in self.everything: match thing: + case OverriddenInstructionPlaceHolder(): + self.write_overridden_instr_place_holder(thing) case parser.InstDef(): if thing.kind != "op": n_instrs += 1 @@ -1039,9 +1088,17 @@ def write_instructions(self) -> None: file=sys.stderr, ) + def write_overridden_instr_place_holder(self, + place_holder: OverriddenInstructionPlaceHolder) -> None: + self.out.emit("") + self.out.emit( + f"// TARGET({place_holder.name}) overridden by later definition") + def write_instr(self, instr: Instruction) -> None: name = instr.name self.out.emit("") + if instr.inst.override: + self.out.emit("// Override") with self.out.block(f"TARGET({name})"): if instr.predicted: self.out.emit(f"PREDICTED({name});") @@ -1190,6 +1247,8 @@ def variable_used(node: parser.Node, name: str) -> bool: def main(): """Parse command line, parse input, analyze, write output.""" args = arg_parser.parse_args() # Prints message and sys.exit(2) on error + if len(args.input) == 0: + args.input.append(DEFAULT_INPUT) a = Analyzer(args.input, args.output, args.metadata) # Raises OSError if input unreadable a.parse() # Raises SyntaxError on failure a.analyze() # Prints messages and sets a.errors on failure diff --git a/Tools/cases_generator/lexer.py b/Tools/cases_generator/lexer.py index 39b6a212a67b1c..1c70d1c4089e4e 100644 --- a/Tools/cases_generator/lexer.py +++ b/Tools/cases_generator/lexer.py @@ -119,7 +119,7 @@ def choice(*opts): kwds = ( 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST', 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN', - 'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG', + 'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG', 'OVERRIDE', 'REGISTER', 'OFFSETOF', 'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT', 'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID', diff --git a/Tools/cases_generator/parser.py b/Tools/cases_generator/parser.py index c7c8d8af6b7318..7bf45a350bc84b 100644 --- a/Tools/cases_generator/parser.py +++ b/Tools/cases_generator/parser.py @@ -33,7 +33,7 @@ class Context(NamedTuple): owner: PLexer def __repr__(self): - return f"<{self.begin}-{self.end}>" + return f"<{self.owner.filename}: {self.begin}-{self.end}>" @dataclass @@ -99,6 +99,7 @@ class OpName(Node): @dataclass class InstHeader(Node): + override: bool register: bool kind: Literal["inst", "op", "legacy"] # Legacy means no (inputs -- outputs) name: str @@ -108,6 +109,7 @@ class InstHeader(Node): @dataclass class InstDef(Node): + override: bool register: bool kind: Literal["inst", "op", "legacy"] name: str @@ -152,17 +154,18 @@ def inst_def(self) -> InstDef | None: if hdr := self.inst_header(): if block := self.block(): return InstDef( - hdr.register, hdr.kind, hdr.name, hdr.inputs, hdr.outputs, block + hdr.override, hdr.register, hdr.kind, hdr.name, hdr.inputs, hdr.outputs, block ) raise self.make_syntax_error("Expected block") return None @contextual def inst_header(self) -> InstHeader | None: - # inst(NAME) - # | [register] inst(NAME, (inputs -- outputs)) - # | [register] op(NAME, (inputs -- outputs)) + # [override] inst(NAME) + # | [override] [register] inst(NAME, (inputs -- outputs)) + # | [override] [register] op(NAME, (inputs -- outputs)) # TODO: Make INST a keyword in the lexer. + override = bool(self.expect(lx.OVERRIDE)) register = bool(self.expect(lx.REGISTER)) if (tkn := self.expect(lx.IDENTIFIER)) and (kind := tkn.text) in ("inst", "op"): if self.expect(lx.LPAREN) and (tkn := self.expect(lx.IDENTIFIER)): @@ -171,10 +174,10 @@ def inst_header(self) -> InstHeader | None: inp, outp = self.io_effect() if self.expect(lx.RPAREN): if (tkn := self.peek()) and tkn.kind == lx.LBRACE: - return InstHeader(register, kind, name, inp, outp) + return InstHeader(override, register, kind, name, inp, outp) elif self.expect(lx.RPAREN) and kind == "inst": # No legacy stack effect if kind is "op". - return InstHeader(register, "legacy", name, [], []) + return InstHeader(override, register, "legacy", name, [], []) return None def io_effect(self) -> tuple[list[InputEffect], list[OutputEffect]]: From b022250e67449e0bc49a3c982fe9e6a2d6a7b71a Mon Sep 17 00:00:00 2001 From: Mark Dickinson Date: Sat, 4 Mar 2023 12:20:14 +0000 Subject: [PATCH 36/40] Remove unused internal macros (#102415) Since #101826 was merged, the internal macro `_Py_InIntegralTypeRange` is unused, as are its supporting macros `_Py_IntegralTypeMax` and `_Py_IntegralTypeMin`. This PR removes them. Note that `_Py_InIntegralTypeRange` doesn't actually work as advertised - it's not a safe way to avoid undefined behaviour in an integer to double conversion. --- Include/internal/pycore_pymath.h | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/Include/internal/pycore_pymath.h b/Include/internal/pycore_pymath.h index 5f3afe4df6865c..7a4e1c1eb714f7 100644 --- a/Include/internal/pycore_pymath.h +++ b/Include/internal/pycore_pymath.h @@ -56,21 +56,6 @@ static inline void _Py_ADJUST_ERANGE2(double x, double y) } } -// Return the maximum value of integral type *type*. -#define _Py_IntegralTypeMax(type) \ - (_Py_IS_TYPE_SIGNED(type) ? (((((type)1 << (sizeof(type)*CHAR_BIT - 2)) - 1) << 1) + 1) : ~(type)0) - -// Return the minimum value of integral type *type*. -#define _Py_IntegralTypeMin(type) \ - (_Py_IS_TYPE_SIGNED(type) ? -_Py_IntegralTypeMax(type) - 1 : 0) - -// Check whether *v* is in the range of integral type *type*. This is most -// useful if *v* is floating-point, since demoting a floating-point *v* to an -// integral type that cannot represent *v*'s integral part is undefined -// behavior. -#define _Py_InIntegralTypeRange(type, v) \ - (_Py_IntegralTypeMin(type) <= v && v <= _Py_IntegralTypeMax(type)) - //--- HAVE_PY_SET_53BIT_PRECISION macro ------------------------------------ // From 705487c6557c3d8866622b4d32528bf7fc2e4204 Mon Sep 17 00:00:00 2001 From: Raj <51259329+workingpayload@users.noreply.github.com> Date: Sat, 4 Mar 2023 19:51:29 +0530 Subject: [PATCH 37/40] gh-101892: Fix `SystemError` when a callable iterator call exhausts the iterator (#101896) Co-authored-by: Oleg Iarygin --- Lib/test/test_iter.py | 25 +++++++++++++++++++ ...-02-14-09-08-48.gh-issue-101892.FMos8l.rst | 3 +++ Objects/iterobject.c | 4 +-- 3 files changed, 30 insertions(+), 2 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2023-02-14-09-08-48.gh-issue-101892.FMos8l.rst diff --git a/Lib/test/test_iter.py b/Lib/test/test_iter.py index 6ab9b7a7230309..30aedb0db3bb3d 100644 --- a/Lib/test/test_iter.py +++ b/Lib/test/test_iter.py @@ -348,6 +348,31 @@ def spam(state=[0]): return i self.check_iterator(iter(spam, 20), list(range(10)), pickle=False) + def test_iter_function_concealing_reentrant_exhaustion(self): + # gh-101892: Test two-argument iter() with a function that + # exhausts its associated iterator but forgets to either return + # a sentinel value or raise StopIteration. + HAS_MORE = 1 + NO_MORE = 2 + + def exhaust(iterator): + """Exhaust an iterator without raising StopIteration.""" + list(iterator) + + def spam(): + # Touching the iterator with exhaust() below will call + # spam() once again so protect against recursion. + if spam.is_recursive_call: + return NO_MORE + spam.is_recursive_call = True + exhaust(spam.iterator) + return HAS_MORE + + spam.is_recursive_call = False + spam.iterator = iter(spam, NO_MORE) + with self.assertRaises(StopIteration): + next(spam.iterator) + # Test exception propagation through function iterator def test_exception_function(self): def spam(state=[0]): diff --git a/Misc/NEWS.d/next/Library/2023-02-14-09-08-48.gh-issue-101892.FMos8l.rst b/Misc/NEWS.d/next/Library/2023-02-14-09-08-48.gh-issue-101892.FMos8l.rst new file mode 100644 index 00000000000000..d586779b3a8a36 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-14-09-08-48.gh-issue-101892.FMos8l.rst @@ -0,0 +1,3 @@ +Callable iterators no longer raise :class:`SystemError` when the +callable object exhausts the iterator but forgets to either return a +sentinel value or raise :class:`StopIteration`. diff --git a/Objects/iterobject.c b/Objects/iterobject.c index 149b701b68e91a..7cb17a6ca4ab56 100644 --- a/Objects/iterobject.c +++ b/Objects/iterobject.c @@ -219,7 +219,7 @@ calliter_iternext(calliterobject *it) } result = _PyObject_CallNoArgs(it->it_callable); - if (result != NULL) { + if (result != NULL && it->it_sentinel != NULL){ int ok; ok = PyObject_RichCompareBool(it->it_sentinel, result, Py_EQ); @@ -227,7 +227,6 @@ calliter_iternext(calliterobject *it) return result; /* Common case, fast path */ } - Py_DECREF(result); if (ok > 0) { Py_CLEAR(it->it_callable); Py_CLEAR(it->it_sentinel); @@ -238,6 +237,7 @@ calliter_iternext(calliterobject *it) Py_CLEAR(it->it_callable); Py_CLEAR(it->it_sentinel); } + Py_XDECREF(result); return NULL; } From c2bd55d26f8eb2850eb9f9026b5d7f0ed1420b65 Mon Sep 17 00:00:00 2001 From: Alexey Izbyshev Date: Sat, 4 Mar 2023 17:24:08 +0300 Subject: [PATCH 38/40] gh-102179: Fix `os.dup2` error reporting for negative fds (#102180) --- Lib/test/test_os.py | 20 +++++++++++++++++++ ...-02-23-15-06-01.gh-issue-102179.P6KQ4c.rst | 1 + Modules/posixmodule.c | 5 ----- 3 files changed, 21 insertions(+), 5 deletions(-) create mode 100644 Misc/NEWS.d/next/Library/2023-02-23-15-06-01.gh-issue-102179.P6KQ4c.rst diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index deea207bfdadd9..792794ca109489 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -2221,6 +2221,26 @@ def test_closerange(self): def test_dup2(self): self.check(os.dup2, 20) + @unittest.skipUnless(hasattr(os, 'dup2'), 'test needs os.dup2()') + @unittest.skipIf( + support.is_emscripten, + "dup2() with negative fds is broken on Emscripten (see gh-102179)" + ) + def test_dup2_negative_fd(self): + valid_fd = os.open(__file__, os.O_RDONLY) + self.addCleanup(os.close, valid_fd) + fds = [ + valid_fd, + -1, + -2**31, + ] + for fd, fd2 in itertools.product(fds, repeat=2): + if fd != fd2: + with self.subTest(fd=fd, fd2=fd2): + with self.assertRaises(OSError) as ctx: + os.dup2(fd, fd2) + self.assertEqual(ctx.exception.errno, errno.EBADF) + @unittest.skipUnless(hasattr(os, 'fchmod'), 'test needs os.fchmod()') def test_fchmod(self): self.check(os.fchmod, 0) diff --git a/Misc/NEWS.d/next/Library/2023-02-23-15-06-01.gh-issue-102179.P6KQ4c.rst b/Misc/NEWS.d/next/Library/2023-02-23-15-06-01.gh-issue-102179.P6KQ4c.rst new file mode 100644 index 00000000000000..f77493e267ac7e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-23-15-06-01.gh-issue-102179.P6KQ4c.rst @@ -0,0 +1 @@ +Fix :func:`os.dup2` error message for negative fds. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 937233a3bd0779..7beb2cee64a05c 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -9795,11 +9795,6 @@ os_dup2_impl(PyObject *module, int fd, int fd2, int inheritable) static int dup3_works = -1; #endif - if (fd < 0 || fd2 < 0) { - posix_error(); - return -1; - } - /* dup2() can fail with EINTR if the target FD is already open, because it * then has to be closed. See os_close_impl() for why we don't handle EINTR * upon close(), and therefore below. From 90801e48fdd2a57c5c5a5e202ff76c3a7dc42a50 Mon Sep 17 00:00:00 2001 From: Gouvernathor <44340603+Gouvernathor@users.noreply.github.com> Date: Sat, 4 Mar 2023 16:08:57 +0100 Subject: [PATCH 39/40] gh-102302 Micro-optimize `inspect.Parameter.__hash__` (#102303) --- Lib/inspect.py | 2 +- .../next/Library/2023-03-04-14-46-47.gh-issue-102302.-b_s6Z.rst | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 Misc/NEWS.d/next/Library/2023-03-04-14-46-47.gh-issue-102302.-b_s6Z.rst diff --git a/Lib/inspect.py b/Lib/inspect.py index 8bb3a375735af6..166667c62cdccf 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -2805,7 +2805,7 @@ def __repr__(self): return '<{} "{}">'.format(self.__class__.__name__, self) def __hash__(self): - return hash((self.name, self.kind, self.annotation, self.default)) + return hash((self._name, self._kind, self._annotation, self._default)) def __eq__(self, other): if self is other: diff --git a/Misc/NEWS.d/next/Library/2023-03-04-14-46-47.gh-issue-102302.-b_s6Z.rst b/Misc/NEWS.d/next/Library/2023-03-04-14-46-47.gh-issue-102302.-b_s6Z.rst new file mode 100644 index 00000000000000..aaf4e62069ca24 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-03-04-14-46-47.gh-issue-102302.-b_s6Z.rst @@ -0,0 +1 @@ +Micro-optimise hashing of :class:`inspect.Parameter`, reducing the time it takes to hash an instance by around 40%. From 77a3196b7cc17d90a8aae5629aa71ff183b9266a Mon Sep 17 00:00:00 2001 From: Byeongmin Choi Date: Sun, 5 Mar 2023 01:01:54 +0900 Subject: [PATCH 40/40] gh-101863: Fix wrong comments in EUC-KR codec (gh-102417) --- Misc/ACKS | 1 + Modules/cjkcodecs/_codecs_kr.c | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Misc/ACKS b/Misc/ACKS index 2da3d0ab29b81d..c591cd3bfe4b9e 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -322,6 +322,7 @@ Adal Chiriliuc Matt Chisholm Lita Cho Kit Yan Choi +Byeongmin Choi Sayan Chowdhury Yuan-Chao Chou Anders Chrigström diff --git a/Modules/cjkcodecs/_codecs_kr.c b/Modules/cjkcodecs/_codecs_kr.c index 6d6acb5c4be4b5..72641e495af0b0 100644 --- a/Modules/cjkcodecs/_codecs_kr.c +++ b/Modules/cjkcodecs/_codecs_kr.c @@ -60,7 +60,7 @@ ENCODER(euc_kr) } else { /* Mapping is found in CP949 extension, - but we encode it in KS X 1001:1998 Annex 3, + but we encode it in KS X 1001:1998, make-up sequence for EUC-KR. */ REQUIRE_OUTBUF(8); @@ -120,7 +120,7 @@ DECODER(euc_kr) if (c == EUCKR_JAMO_FIRSTBYTE && INBYTE2 == EUCKR_JAMO_FILLER) { - /* KS X 1001:1998 Annex 3 make-up sequence */ + /* KS X 1001:1998 make-up sequence */ DBCHAR cho, jung, jong; REQUIRE_INBUF(8);