diff --git a/.gitattributes b/.gitattributes index 22afffb05abb20..2a48df079e1aeb 100644 --- a/.gitattributes +++ b/.gitattributes @@ -94,7 +94,6 @@ Programs/test_frozenmain.h generated Python/Python-ast.c generated Python/executor_cases.c.h generated Python/generated_cases.c.h generated -Python/abstract_interp_cases.c.h generated Python/opcode_targets.h generated Python/stdlib_module_names.h generated Tools/peg_generator/pegen/grammar_parser.py generated diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index db28c2a231ae04..8038206441ab9b 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -157,6 +157,8 @@ Doc/c-api/stable.rst @encukou **/*dataclasses* @ericvsmith +**/*ensurepip* @pfmoore @pradyunsg + **/*idlelib* @terryjreedy **/*typing* @JelleZijlstra @AlexWaygood diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst index e0c72ff9249ee7..582bb18f752bd5 100644 --- a/Doc/library/collections.abc.rst +++ b/Doc/library/collections.abc.rst @@ -87,7 +87,7 @@ the required methods (unless those methods have been set to class E: def __iter__(self): ... - def __next__(next): ... + def __next__(self): ... .. doctest:: diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index 686b37754368c1..3674b4bd97d39d 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -1799,6 +1799,8 @@ Other constructor: Examples:: + .. doctest:: + >>> from datetime import time >>> time.fromisoformat('04:23:01') datetime.time(4, 23, 1) @@ -1808,7 +1810,7 @@ Other constructor: datetime.time(4, 23, 1) >>> time.fromisoformat('04:23:01.000384') datetime.time(4, 23, 1, 384) - >>> time.fromisoformat('04:23:01,000') + >>> time.fromisoformat('04:23:01,000384') datetime.time(4, 23, 1, 384) >>> time.fromisoformat('04:23:01+04:00') datetime.time(4, 23, 1, tzinfo=datetime.timezone(datetime.timedelta(seconds=14400))) diff --git a/Doc/library/dbm.rst b/Doc/library/dbm.rst index 766847b971b645..cb95c61322582f 100644 --- a/Doc/library/dbm.rst +++ b/Doc/library/dbm.rst @@ -272,6 +272,13 @@ This module can be used with the "classic" ndbm interface or the GNU GDBM compatibility interface. On Unix, the :program:`configure` script will attempt to locate the appropriate header file to simplify building this module. +.. warning:: + + The ndbm library shipped as part of macOS has an undocumented limitation on the + size of values, which can result in corrupted database files + when storing values larger than this limit. Reading such corrupted files can + result in a hard crash (segmentation fault). + .. exception:: error Raised on :mod:`dbm.ndbm`-specific errors, such as I/O errors. :exc:`KeyError` is raised diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst index 5647021d6a9ba6..5823142cc75998 100644 --- a/Doc/library/dis.rst +++ b/Doc/library/dis.rst @@ -1214,9 +1214,10 @@ iterations of the loop. ``super(cls, self).method()``, ``super(cls, self).attr``). It pops three values from the stack (from top of stack down): - - ``self``: the first argument to the current method - - ``cls``: the class within which the current method was defined - - the global ``super`` + + * ``self``: the first argument to the current method + * ``cls``: the class within which the current method was defined + * the global ``super`` With respect to its argument, it works similarly to :opcode:`LOAD_ATTR`, except that ``namei`` is shifted left by 2 bits instead of 1. diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index c731b6fd333275..4682ec9c924757 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -1800,6 +1800,13 @@ are always available. They are listed here in alphabetical order. the second argument is a type, ``issubclass(type2, type)`` must be true (this is useful for classmethods). + When called directly within an ordinary method of a class, both arguments may + be omitted ("zero-argument :func:`!super`"). In this case, *type* will be the + enclosing class, and *obj* will be the first argument of the immediately + enclosing function (typically ``self``). (This means that zero-argument + :func:`!super` will not work as expected within nested functions, including + generator expressions, which implicitly create nested functions.) + There are two typical use cases for *super*. In a class hierarchy with single inheritance, *super* can be used to refer to parent classes without naming them explicitly, thus making the code more maintainable. This use diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst index 1df7d8d772a274..cc4a0da92da60a 100644 --- a/Doc/library/importlib.metadata.rst +++ b/Doc/library/importlib.metadata.rst @@ -171,16 +171,18 @@ group. Read `the setuptools docs `_ for more information on entry points, their definition, and usage. -*Compatibility Note* - -The "selectable" entry points were introduced in ``importlib_metadata`` -3.6 and Python 3.10. Prior to those changes, ``entry_points`` accepted -no parameters and always returned a dictionary of entry points, keyed -by group. With ``importlib_metadata`` 5.0 and Python 3.12, -``entry_points`` always returns an ``EntryPoints`` object. See -`backports.entry_points_selectable `_ -for compatibility options. - +.. versionchanged:: 3.12 + The "selectable" entry points were introduced in ``importlib_metadata`` + 3.6 and Python 3.10. Prior to those changes, ``entry_points`` accepted + no parameters and always returned a dictionary of entry points, keyed + by group. With ``importlib_metadata`` 5.0 and Python 3.12, + ``entry_points`` always returns an ``EntryPoints`` object. See + `backports.entry_points_selectable `_ + for compatibility options. + +.. versionchanged:: 3.13 + ``EntryPoint`` objects no longer present a tuple-like interface + (:meth:`~object.__getitem__`). .. _metadata: @@ -342,9 +344,17 @@ instance:: >>> dist.metadata['License'] # doctest: +SKIP 'MIT' +For editable packages, an origin property may present :pep:`610` +metadata:: + + >>> dist.origin.url + 'file:///path/to/wheel-0.32.3.editable-py3-none-any.whl' + The full set of available metadata is not described here. See the `Core metadata specifications `_ for additional details. +.. versionadded:: 3.13 + The ``.origin`` property was added. Distribution Discovery ====================== diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 1138cc1f249ee7..6b6e62a683ab18 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -1001,11 +1001,14 @@ as internal buffering of data. .. audit-event:: os.chmod path,mode,dir_fd os.fchmod - .. availability:: Unix. + .. availability:: Unix, Windows. The function is limited on Emscripten and WASI, see :ref:`wasm-availability` for more information. + .. versionchanged:: 3.13 + Added support on Windows. + .. function:: fchown(fd, uid, gid) @@ -2077,7 +2080,8 @@ features: Accepts a :term:`path-like object`. .. versionchanged:: 3.13 - Added support for the *follow_symlinks* argument on Windows. + Added support for a file descriptor and the *follow_symlinks* argument + on Windows. .. function:: chown(path, uid, gid, *, dir_fd=None, follow_symlinks=True) diff --git a/Doc/library/shelve.rst b/Doc/library/shelve.rst index 88802d717d7383..95c54991887022 100644 --- a/Doc/library/shelve.rst +++ b/Doc/library/shelve.rst @@ -113,6 +113,9 @@ Restrictions differs across Unix versions and requires knowledge about the database implementation used. +* On macOS :mod:`dbm.ndbm` can silently corrupt the database file on updates, + which can cause hard crashes when trying to read from the database. + .. class:: Shelf(dict, protocol=None, writeback=False, keyencoding='utf-8') diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index 0db233e2dde33c..e8709b516ae07a 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -25,7 +25,7 @@ probably additional platforms, as long as OpenSSL is installed on that platform. Some behavior may be platform dependent, since calls are made to the operating system socket APIs. The installed version of OpenSSL may also - cause variations in behavior. For example, TLSv1.3 with OpenSSL version + cause variations in behavior. For example, TLSv1.3 comes with OpenSSL version 1.1.1. .. warning:: diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index ba2845eb17ddcc..63bd62d1f6679b 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -2604,10 +2604,10 @@ Functions and decorators .. function:: reveal_type(obj, /) - Reveal the inferred static type of an expression. + Ask a static type checker to reveal the inferred type of an expression. When a static type checker encounters a call to this function, - it emits a diagnostic with the type of the argument. For example:: + it emits a diagnostic with the inferred type of the argument. For example:: x: int = 1 reveal_type(x) # Revealed type is "builtins.int" @@ -2615,22 +2615,21 @@ Functions and decorators This can be useful when you want to debug how your type checker handles a particular piece of code. - The function returns its argument unchanged, which allows using - it within an expression:: + At runtime, this function prints the runtime type of its argument to + :data:`sys.stderr` and returns the argument unchanged (allowing the call to + be used within an expression):: - x = reveal_type(1) # Revealed type is "builtins.int" + x = reveal_type(1) # prints "Runtime type is int" + print(x) # prints "1" + + Note that the runtime type may be different from (more or less specific + than) the type statically inferred by a type checker. Most type checkers support ``reveal_type()`` anywhere, even if the name is not imported from ``typing``. Importing the name from - ``typing`` allows your code to run without runtime errors and + ``typing``, however, allows your code to run without runtime errors and communicates intent more clearly. - At runtime, this function prints the runtime type of its argument to stderr - and returns it unchanged:: - - x = reveal_type(1) # prints "Runtime type is int" - print(x) # prints "1" - .. versionadded:: 3.11 .. decorator:: dataclass_transform(*, eq_default=True, order_default=False, \ diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 5e3757e1f5c6f6..b3af5c6298d02d 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -2808,10 +2808,10 @@ through the object's keys; for sequences, it should iterate through the values. .. method:: object.__getitem__(self, key) Called to implement evaluation of ``self[key]``. For :term:`sequence` types, - the accepted keys should be integers and slice objects. Note that the - special interpretation of negative indexes (if the class wishes to emulate a - :term:`sequence` type) is up to the :meth:`__getitem__` method. If *key* is - of an inappropriate type, :exc:`TypeError` may be raised; if of a value + the accepted keys should be integers. Optionally, they may support + :class:`slice` objects as well. Negative index support is also optional. + If *key* is + of an inappropriate type, :exc:`TypeError` may be raised; if *key* is a value outside the set of indexes for the sequence (after any special interpretation of negative values), :exc:`IndexError` should be raised. For :term:`mapping` types, if *key* is missing (not in the container), diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index 2c869cbe11396b..b4cd106f5cac5f 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -289,6 +289,10 @@ os ``False`` on Windows. (Contributed by Serhiy Storchaka in :gh:`59616`) +* Add support of :func:`os.fchmod` and a file descriptor + in :func:`os.chmod` on Windows. + (Contributed by Serhiy Storchaka in :gh:`113191`) + * :func:`os.posix_spawn` now accepts ``env=None``, which makes the newly spawned process use the current process environment. (Contributed by Jakub Kulik in :gh:`113119`.) @@ -1001,6 +1005,10 @@ importlib for migration advice. (Contributed by Jason R. Coombs in :gh:`106532`.) +* Remove deprecated :meth:`~object.__getitem__` access for + :class:`!importlib.metadata.EntryPoint` objects. + (Contributed by Jason R. Coombs in :gh:`113175`.) + locale ------ diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index 56172d231c44f4..ed7dd829d4b6f0 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -223,9 +223,11 @@ struct _ts { // layout, optimization, and WASI runtime. Wasmtime can handle about 700 // recursions, sometimes less. 500 is a more conservative limit. # define Py_C_RECURSION_LIMIT 500 +#elif defined(__s390x__) +# define Py_C_RECURSION_LIMIT 1200 #else // This value is duplicated in Lib/test/support/__init__.py -# define Py_C_RECURSION_LIMIT 1500 +# define Py_C_RECURSION_LIMIT 8000 #endif diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 36b6cd52d2b272..7d39e4bc03099c 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -1,13 +1,20 @@ -// This file is generated by Tools/cases_generator/generate_cases.py +// This file is generated by Tools/cases_generator/opcode_metadata_generator.py // from: // Python/bytecodes.c // Do not edit! +#ifndef Py_CORE_OPCODE_METADATA_H +#define Py_CORE_OPCODE_METADATA_H +#ifdef __cplusplus +extern "C" { +#endif + #ifndef Py_BUILD_CORE # error "this header requires Py_BUILD_CORE define" #endif #include // bool +#include "opcode_ids.h" #define IS_PSEUDO_INSTR(OP) ( \ @@ -26,10 +33,9 @@ 0) #include "pycore_uop_ids.h" - -extern int _PyOpcode_num_popped(int opcode, int oparg, bool jump); +extern int _PyOpcode_num_popped(int opcode, int oparg); #ifdef NEED_OPCODE_METADATA -int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { +int _PyOpcode_num_popped(int opcode, int oparg) { switch(opcode) { case BEFORE_ASYNC_WITH: return 1; @@ -68,7 +74,7 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { case BINARY_SUBSCR_TUPLE_INT: return 2; case BUILD_CONST_KEY_MAP: - return oparg + 1; + return 1 + oparg; case BUILD_LIST: return oparg; case BUILD_MAP: @@ -76,7 +82,7 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { case BUILD_SET: return oparg; case BUILD_SLICE: - return ((oparg == 3) ? 1 : 0) + 2; + return 2 + ((oparg == 3) ? 1 : 0); case BUILD_STRING: return oparg; case BUILD_TUPLE: @@ -84,51 +90,51 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { case CACHE: return 0; case CALL: - return oparg + 2; + return 2 + oparg; case CALL_ALLOC_AND_ENTER_INIT: - return oparg + 2; + return 2 + oparg; case CALL_BOUND_METHOD_EXACT_ARGS: - return oparg + 2; + return 2 + oparg; case CALL_BUILTIN_CLASS: - return oparg + 2; + return 2 + oparg; case CALL_BUILTIN_FAST: - return oparg + 2; + return 2 + oparg; case CALL_BUILTIN_FAST_WITH_KEYWORDS: - return oparg + 2; + return 2 + oparg; case CALL_BUILTIN_O: - return oparg + 2; + return 2 + oparg; case CALL_FUNCTION_EX: - return ((oparg & 1) ? 1 : 0) + 3; + return 3 + (oparg & 1); case CALL_INTRINSIC_1: return 1; case CALL_INTRINSIC_2: return 2; case CALL_ISINSTANCE: - return oparg + 2; + return 2 + oparg; case CALL_KW: - return oparg + 3; + return 3 + oparg; case CALL_LEN: - return oparg + 2; + return 2 + oparg; case CALL_LIST_APPEND: - return oparg + 2; + return 2 + oparg; case CALL_METHOD_DESCRIPTOR_FAST: - return oparg + 2; + return 2 + oparg; case CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: - return oparg + 2; + return 2 + oparg; case CALL_METHOD_DESCRIPTOR_NOARGS: - return oparg + 2; + return 2 + oparg; case CALL_METHOD_DESCRIPTOR_O: - return oparg + 2; + return 2 + oparg; case CALL_PY_EXACT_ARGS: - return oparg + 2; + return 2 + oparg; case CALL_PY_WITH_DEFAULTS: - return oparg + 2; + return 2 + oparg; case CALL_STR_1: - return oparg + 2; + return 2 + oparg; case CALL_TUPLE_1: - return oparg + 2; + return 2 + oparg; case CALL_TYPE_1: - return oparg + 2; + return 2 + oparg; case CHECK_EG_MATCH: return 2; case CHECK_EXC_MATCH: @@ -148,7 +154,7 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { case CONVERT_VALUE: return 1; case COPY: - return (oparg-1) + 1; + return 1 + (oparg-1); case COPY_FREE_VARS: return 0; case DELETE_ATTR: @@ -164,9 +170,9 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { case DELETE_SUBSCR: return 2; case DICT_MERGE: - return (oparg - 1) + 5; + return 5 + (oparg - 1); case DICT_UPDATE: - return (oparg - 1) + 2; + return 2 + (oparg - 1); case END_ASYNC_FOR: return 2; case END_FOR: @@ -249,20 +255,16 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 1; case IS_OP: return 2; - case JUMP: - return 0; case JUMP_BACKWARD: return 0; case JUMP_BACKWARD_NO_INTERRUPT: return 0; case JUMP_FORWARD: return 0; - case JUMP_NO_INTERRUPT: - return 0; case LIST_APPEND: - return (oparg-1) + 2; + return 2 + (oparg-1); case LIST_EXTEND: - return (oparg-1) + 2; + return 2 + (oparg-1); case LOAD_ASSERTION_ERROR: return 0; case LOAD_ATTR: @@ -293,8 +295,6 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 1; case LOAD_BUILD_CLASS: return 0; - case LOAD_CLOSURE: - return 0; case LOAD_CONST: return 0; case LOAD_DEREF: @@ -319,8 +319,6 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 0; case LOAD_LOCALS: return 0; - case LOAD_METHOD: - return 1; case LOAD_NAME: return 0; case LOAD_SUPER_ATTR: @@ -329,18 +327,12 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 3; case LOAD_SUPER_ATTR_METHOD: return 3; - case LOAD_SUPER_METHOD: - return 3; - case LOAD_ZERO_SUPER_ATTR: - return 3; - case LOAD_ZERO_SUPER_METHOD: - return 3; case MAKE_CELL: return 0; case MAKE_FUNCTION: return 1; case MAP_ADD: - return (oparg - 1) + 3; + return 3 + (oparg - 1); case MATCH_CLASS: return 3; case MATCH_KEYS: @@ -351,8 +343,6 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 1; case NOP: return 0; - case POP_BLOCK: - return 0; case POP_EXCEPT: return 1; case POP_JUMP_IF_FALSE: @@ -372,7 +362,7 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { case RAISE_VARARGS: return oparg; case RERAISE: - return oparg + 1; + return 1 + oparg; case RESERVED: return 0; case RESUME: @@ -391,18 +381,12 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 2; case SETUP_ANNOTATIONS: return 0; - case SETUP_CLEANUP: - return 0; - case SETUP_FINALLY: - return 0; - case SETUP_WITH: - return 0; case SET_ADD: - return (oparg-1) + 2; + return 2 + (oparg-1); case SET_FUNCTION_ATTRIBUTE: return 2; case SET_UPDATE: - return (oparg-1) + 2; + return 2 + (oparg-1); case STORE_ATTR: return 2; case STORE_ATTR_INSTANCE_VALUE: @@ -417,8 +401,6 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 1; case STORE_FAST_LOAD_FAST: return 1; - case STORE_FAST_MAYBE_NULL: - return 1; case STORE_FAST_STORE_FAST: return 2; case STORE_GLOBAL: @@ -434,7 +416,7 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { case STORE_SUBSCR_LIST_INT: return 3; case SWAP: - return (oparg-2) + 2; + return 2 + (oparg-2); case TO_BOOL: return 1; case TO_BOOL_ALWAYS_TRUE: @@ -469,207 +451,16 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 4; case YIELD_VALUE: return 1; - case _BINARY_OP: - return 2; - case _BINARY_OP_ADD_FLOAT: - return 2; - case _BINARY_OP_ADD_INT: - return 2; - case _BINARY_OP_ADD_UNICODE: - return 2; - case _BINARY_OP_INPLACE_ADD_UNICODE: - return 2; - case _BINARY_OP_MULTIPLY_FLOAT: - return 2; - case _BINARY_OP_MULTIPLY_INT: - return 2; - case _BINARY_OP_SUBTRACT_FLOAT: - return 2; - case _BINARY_OP_SUBTRACT_INT: - return 2; - case _BINARY_SUBSCR: - return 2; - case _CALL: - return oparg + 2; - case _CHECK_ATTR_CLASS: - return 1; - case _CHECK_ATTR_METHOD_LAZY_DICT: - return 1; - case _CHECK_ATTR_MODULE: - return 1; - case _CHECK_ATTR_WITH_HINT: - return 1; - case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: - return oparg + 2; - case _CHECK_FUNCTION_EXACT_ARGS: - return oparg + 2; - case _CHECK_MANAGED_OBJECT_HAS_VALUES: - return 1; - case _CHECK_PEP_523: - return 0; - case _CHECK_STACK_SPACE: - return oparg + 2; - case _CHECK_VALIDITY: - return 0; - case _COMPARE_OP: - return 2; - case _EXIT_TRACE: - return 0; - case _FOR_ITER: - return 1; - case _FOR_ITER_TIER_TWO: - return 1; - case _GUARD_BOTH_FLOAT: - return 2; - case _GUARD_BOTH_INT: - return 2; - case _GUARD_BOTH_UNICODE: - return 2; - case _GUARD_BUILTINS_VERSION: - return 0; - case _GUARD_DORV_VALUES: - return 1; - case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: - return 1; - case _GUARD_GLOBALS_VERSION: - return 0; - case _GUARD_IS_FALSE_POP: - return 1; - case _GUARD_IS_NONE_POP: - return 1; - case _GUARD_IS_NOT_NONE_POP: - return 1; - case _GUARD_IS_TRUE_POP: - return 1; - case _GUARD_KEYS_VERSION: - return 1; - case _GUARD_NOT_EXHAUSTED_LIST: - return 1; - case _GUARD_NOT_EXHAUSTED_RANGE: - return 1; - case _GUARD_NOT_EXHAUSTED_TUPLE: - return 1; - case _GUARD_TYPE_VERSION: - return 1; - case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: - return oparg + 2; - case _INIT_CALL_PY_EXACT_ARGS: - return oparg + 2; - case _INSERT: - return oparg + 1; - case _IS_NONE: - return 1; - case _ITER_CHECK_LIST: - return 1; - case _ITER_CHECK_RANGE: - return 1; - case _ITER_CHECK_TUPLE: - return 1; - case _ITER_JUMP_LIST: - return 1; - case _ITER_JUMP_RANGE: - return 1; - case _ITER_JUMP_TUPLE: - return 1; - case _ITER_NEXT_LIST: - return 1; - case _ITER_NEXT_RANGE: - return 1; - case _ITER_NEXT_TUPLE: - return 1; - case _JUMP_TO_TOP: - return 0; - case _LOAD_ATTR: - return 1; - case _LOAD_ATTR_CLASS: - return 1; - case _LOAD_ATTR_INSTANCE_VALUE: - return 1; - case _LOAD_ATTR_METHOD_LAZY_DICT: - return 1; - case _LOAD_ATTR_METHOD_NO_DICT: - return 1; - case _LOAD_ATTR_METHOD_WITH_VALUES: - return 1; - case _LOAD_ATTR_MODULE: - return 1; - case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: - return 1; - case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: - return 1; - case _LOAD_ATTR_SLOT: - return 1; - case _LOAD_ATTR_WITH_HINT: - return 1; - case _LOAD_GLOBAL: - return 0; - case _LOAD_GLOBAL_BUILTINS: - return 0; - case _LOAD_GLOBAL_MODULE: - return 0; - case _LOAD_SUPER_ATTR: - return 3; - case _POP_FRAME: - return 1; - case _POP_JUMP_IF_FALSE: - return 1; - case _POP_JUMP_IF_TRUE: - return 1; - case _PUSH_FRAME: - return 1; - case _SAVE_RETURN_OFFSET: - return 0; - case _SEND: - return 2; - case _SET_IP: - return 0; - case _SPECIALIZE_BINARY_OP: - return 2; - case _SPECIALIZE_BINARY_SUBSCR: - return 2; - case _SPECIALIZE_CALL: - return oparg + 2; - case _SPECIALIZE_COMPARE_OP: - return 2; - case _SPECIALIZE_FOR_ITER: - return 1; - case _SPECIALIZE_LOAD_ATTR: - return 1; - case _SPECIALIZE_LOAD_GLOBAL: - return 0; - case _SPECIALIZE_LOAD_SUPER_ATTR: - return 3; - case _SPECIALIZE_SEND: - return 2; - case _SPECIALIZE_STORE_ATTR: - return 1; - case _SPECIALIZE_STORE_SUBSCR: - return 2; - case _SPECIALIZE_TO_BOOL: - return 1; - case _SPECIALIZE_UNPACK_SEQUENCE: - return 1; - case _STORE_ATTR: - return 2; - case _STORE_ATTR_INSTANCE_VALUE: - return 2; - case _STORE_ATTR_SLOT: - return 2; - case _STORE_SUBSCR: - return 3; - case _TO_BOOL: - return 1; - case _UNPACK_SEQUENCE: - return 1; default: return -1; } } -#endif // NEED_OPCODE_METADATA -extern int _PyOpcode_num_pushed(int opcode, int oparg, bool jump); +#endif + +extern int _PyOpcode_num_pushed(int opcode, int oparg); #ifdef NEED_OPCODE_METADATA -int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { +int _PyOpcode_num_pushed(int opcode, int oparg) { switch(opcode) { case BEFORE_ASYNC_WITH: return 2; @@ -728,7 +519,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case CALL_ALLOC_AND_ENTER_INIT: return 1; case CALL_BOUND_METHOD_EXACT_ARGS: - return 0; + return ((0) ? 1 : 0); case CALL_BUILTIN_CLASS: return 1; case CALL_BUILTIN_FAST: @@ -760,7 +551,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case CALL_METHOD_DESCRIPTOR_O: return 1; case CALL_PY_EXACT_ARGS: - return 0; + return ((0) ? 1 : 0); case CALL_PY_WITH_DEFAULTS: return 1; case CALL_STR_1: @@ -788,7 +579,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case CONVERT_VALUE: return 1; case COPY: - return (oparg-1) + 2; + return 2 + (oparg-1); case COPY_FREE_VARS: return 0; case DELETE_ATTR: @@ -804,9 +595,9 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case DELETE_SUBSCR: return 0; case DICT_MERGE: - return (oparg - 1) + 4; + return 4 + (oparg - 1); case DICT_UPDATE: - return (oparg - 1) + 1; + return 1 + (oparg - 1); case END_ASYNC_FOR: return 0; case END_FOR: @@ -868,7 +659,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case INSTRUMENTED_JUMP_FORWARD: return 0; case INSTRUMENTED_LOAD_SUPER_ATTR: - return ((oparg & 1) ? 1 : 0) + 1; + return 1 + (oparg & 1); case INSTRUMENTED_POP_JUMP_IF_FALSE: return 0; case INSTRUMENTED_POP_JUMP_IF_NONE: @@ -889,52 +680,46 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 0; case IS_OP: return 1; - case JUMP: - return 0; case JUMP_BACKWARD: return 0; case JUMP_BACKWARD_NO_INTERRUPT: return 0; case JUMP_FORWARD: return 0; - case JUMP_NO_INTERRUPT: - return 0; case LIST_APPEND: - return (oparg-1) + 1; + return 1 + (oparg-1); case LIST_EXTEND: - return (oparg-1) + 1; + return 1 + (oparg-1); case LOAD_ASSERTION_ERROR: return 1; case LOAD_ATTR: - return (oparg & 1 ? 1 : 0) + 1; + return 1 + (oparg & 1); case LOAD_ATTR_CLASS: - return (oparg & 1 ? 1 : 0) + 1; + return 1 + (oparg & 1); case LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN: - return 1; + return 1 + ((0) ? 1 : 0); case LOAD_ATTR_INSTANCE_VALUE: - return (oparg & 1 ? 1 : 0) + 1; + return 1 + (oparg & 1); case LOAD_ATTR_METHOD_LAZY_DICT: - return 2; + return 1 + ((1) ? 1 : 0); case LOAD_ATTR_METHOD_NO_DICT: - return 2; + return 1 + ((1) ? 1 : 0); case LOAD_ATTR_METHOD_WITH_VALUES: - return 2; + return 1 + ((1) ? 1 : 0); case LOAD_ATTR_MODULE: - return (oparg & 1 ? 1 : 0) + 1; + return 1 + (oparg & 1); case LOAD_ATTR_NONDESCRIPTOR_NO_DICT: - return 1; + return 1 + ((0) ? 1 : 0); case LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: - return 1; + return 1 + ((0) ? 1 : 0); case LOAD_ATTR_PROPERTY: - return 1; + return 1 + ((0) ? 1 : 0); case LOAD_ATTR_SLOT: - return (oparg & 1 ? 1 : 0) + 1; + return 1 + (oparg & 1); case LOAD_ATTR_WITH_HINT: - return (oparg & 1 ? 1 : 0) + 1; + return 1 + (oparg & 1); case LOAD_BUILD_CLASS: return 1; - case LOAD_CLOSURE: - return 1; case LOAD_CONST: return 1; case LOAD_DEREF: @@ -952,35 +737,27 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case LOAD_FROM_DICT_OR_GLOBALS: return 1; case LOAD_GLOBAL: - return (oparg & 1 ? 1 : 0) + 1; + return 1 + (oparg & 1); case LOAD_GLOBAL_BUILTIN: - return (oparg & 1 ? 1 : 0) + 1; + return 1 + (oparg & 1); case LOAD_GLOBAL_MODULE: - return (oparg & 1 ? 1 : 0) + 1; + return 1 + (oparg & 1); case LOAD_LOCALS: return 1; - case LOAD_METHOD: - return (oparg & 1 ? 1 : 0) + 1; case LOAD_NAME: return 1; case LOAD_SUPER_ATTR: - return (oparg & 1 ? 1 : 0) + 1; + return 1 + (oparg & 1); case LOAD_SUPER_ATTR_ATTR: - return 1; + return 1 + ((0) ? 1 : 0); case LOAD_SUPER_ATTR_METHOD: return 2; - case LOAD_SUPER_METHOD: - return (oparg & 1 ? 1 : 0) + 1; - case LOAD_ZERO_SUPER_ATTR: - return (oparg & 1 ? 1 : 0) + 1; - case LOAD_ZERO_SUPER_METHOD: - return (oparg & 1 ? 1 : 0) + 1; case MAKE_CELL: return 0; case MAKE_FUNCTION: return 1; case MAP_ADD: - return (oparg - 1) + 1; + return 1 + (oparg - 1); case MATCH_CLASS: return 1; case MATCH_KEYS: @@ -991,8 +768,6 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 2; case NOP: return 0; - case POP_BLOCK: - return 0; case POP_EXCEPT: return 0; case POP_JUMP_IF_FALSE: @@ -1031,18 +806,12 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 2; case SETUP_ANNOTATIONS: return 0; - case SETUP_CLEANUP: - return 0; - case SETUP_FINALLY: - return 0; - case SETUP_WITH: - return 0; case SET_ADD: - return (oparg-1) + 1; + return 1 + (oparg-1); case SET_FUNCTION_ATTRIBUTE: return 1; case SET_UPDATE: - return (oparg-1) + 1; + return 1 + (oparg-1); case STORE_ATTR: return 0; case STORE_ATTR_INSTANCE_VALUE: @@ -1057,8 +826,6 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 0; case STORE_FAST_LOAD_FAST: return 1; - case STORE_FAST_MAYBE_NULL: - return 0; case STORE_FAST_STORE_FAST: return 0; case STORE_GLOBAL: @@ -1074,7 +841,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case STORE_SUBSCR_LIST_INT: return 0; case SWAP: - return (oparg-2) + 2; + return 2 + (oparg-2); case TO_BOOL: return 1; case TO_BOOL_ALWAYS_TRUE: @@ -1096,7 +863,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case UNARY_NOT: return 1; case UNPACK_EX: - return (oparg & 0xFF) + (oparg >> 8) + 1; + return 1 + (oparg >> 8) + (oparg & 0xFF); case UNPACK_SEQUENCE: return oparg; case UNPACK_SEQUENCE_LIST: @@ -1109,221 +876,27 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 5; case YIELD_VALUE: return 1; - case _BINARY_OP: - return 1; - case _BINARY_OP_ADD_FLOAT: - return 1; - case _BINARY_OP_ADD_INT: - return 1; - case _BINARY_OP_ADD_UNICODE: - return 1; - case _BINARY_OP_INPLACE_ADD_UNICODE: - return 0; - case _BINARY_OP_MULTIPLY_FLOAT: - return 1; - case _BINARY_OP_MULTIPLY_INT: - return 1; - case _BINARY_OP_SUBTRACT_FLOAT: - return 1; - case _BINARY_OP_SUBTRACT_INT: - return 1; - case _BINARY_SUBSCR: - return 1; - case _CALL: - return 1; - case _CHECK_ATTR_CLASS: - return 1; - case _CHECK_ATTR_METHOD_LAZY_DICT: - return 1; - case _CHECK_ATTR_MODULE: - return 1; - case _CHECK_ATTR_WITH_HINT: - return 1; - case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: - return oparg + 2; - case _CHECK_FUNCTION_EXACT_ARGS: - return oparg + 2; - case _CHECK_MANAGED_OBJECT_HAS_VALUES: - return 1; - case _CHECK_PEP_523: - return 0; - case _CHECK_STACK_SPACE: - return oparg + 2; - case _CHECK_VALIDITY: - return 0; - case _COMPARE_OP: - return 1; - case _EXIT_TRACE: - return 0; - case _FOR_ITER: - return 2; - case _FOR_ITER_TIER_TWO: - return 2; - case _GUARD_BOTH_FLOAT: - return 2; - case _GUARD_BOTH_INT: - return 2; - case _GUARD_BOTH_UNICODE: - return 2; - case _GUARD_BUILTINS_VERSION: - return 0; - case _GUARD_DORV_VALUES: - return 1; - case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: - return 1; - case _GUARD_GLOBALS_VERSION: - return 0; - case _GUARD_IS_FALSE_POP: - return 0; - case _GUARD_IS_NONE_POP: - return 0; - case _GUARD_IS_NOT_NONE_POP: - return 0; - case _GUARD_IS_TRUE_POP: - return 0; - case _GUARD_KEYS_VERSION: - return 1; - case _GUARD_NOT_EXHAUSTED_LIST: - return 1; - case _GUARD_NOT_EXHAUSTED_RANGE: - return 1; - case _GUARD_NOT_EXHAUSTED_TUPLE: - return 1; - case _GUARD_TYPE_VERSION: - return 1; - case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: - return oparg + 2; - case _INIT_CALL_PY_EXACT_ARGS: - return 1; - case _INSERT: - return oparg + 1; - case _IS_NONE: - return 1; - case _ITER_CHECK_LIST: - return 1; - case _ITER_CHECK_RANGE: - return 1; - case _ITER_CHECK_TUPLE: - return 1; - case _ITER_JUMP_LIST: - return 1; - case _ITER_JUMP_RANGE: - return 1; - case _ITER_JUMP_TUPLE: - return 1; - case _ITER_NEXT_LIST: - return 2; - case _ITER_NEXT_RANGE: - return 2; - case _ITER_NEXT_TUPLE: - return 2; - case _JUMP_TO_TOP: - return 0; - case _LOAD_ATTR: - return ((oparg & 1) ? 1 : 0) + 1; - case _LOAD_ATTR_CLASS: - return ((oparg & 1) ? 1 : 0) + 1; - case _LOAD_ATTR_INSTANCE_VALUE: - return ((oparg & 1) ? 1 : 0) + 1; - case _LOAD_ATTR_METHOD_LAZY_DICT: - return 2; - case _LOAD_ATTR_METHOD_NO_DICT: - return 2; - case _LOAD_ATTR_METHOD_WITH_VALUES: - return 2; - case _LOAD_ATTR_MODULE: - return ((oparg & 1) ? 1 : 0) + 1; - case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: - return 1; - case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: - return 1; - case _LOAD_ATTR_SLOT: - return ((oparg & 1) ? 1 : 0) + 1; - case _LOAD_ATTR_WITH_HINT: - return ((oparg & 1) ? 1 : 0) + 1; - case _LOAD_GLOBAL: - return ((oparg & 1) ? 1 : 0) + 1; - case _LOAD_GLOBAL_BUILTINS: - return ((oparg & 1) ? 1 : 0) + 1; - case _LOAD_GLOBAL_MODULE: - return ((oparg & 1) ? 1 : 0) + 1; - case _LOAD_SUPER_ATTR: - return ((oparg & 1) ? 1 : 0) + 1; - case _POP_FRAME: - return 0; - case _POP_JUMP_IF_FALSE: - return 0; - case _POP_JUMP_IF_TRUE: - return 0; - case _PUSH_FRAME: - return 0; - case _SAVE_RETURN_OFFSET: - return 0; - case _SEND: - return 2; - case _SET_IP: - return 0; - case _SPECIALIZE_BINARY_OP: - return 2; - case _SPECIALIZE_BINARY_SUBSCR: - return 2; - case _SPECIALIZE_CALL: - return oparg + 2; - case _SPECIALIZE_COMPARE_OP: - return 2; - case _SPECIALIZE_FOR_ITER: - return 1; - case _SPECIALIZE_LOAD_ATTR: - return 1; - case _SPECIALIZE_LOAD_GLOBAL: - return 0; - case _SPECIALIZE_LOAD_SUPER_ATTR: - return 3; - case _SPECIALIZE_SEND: - return 2; - case _SPECIALIZE_STORE_ATTR: - return 1; - case _SPECIALIZE_STORE_SUBSCR: - return 2; - case _SPECIALIZE_TO_BOOL: - return 1; - case _SPECIALIZE_UNPACK_SEQUENCE: - return 1; - case _STORE_ATTR: - return 0; - case _STORE_ATTR_INSTANCE_VALUE: - return 0; - case _STORE_ATTR_SLOT: - return 0; - case _STORE_SUBSCR: - return 0; - case _TO_BOOL: - return 1; - case _UNPACK_SEQUENCE: - return oparg; default: return -1; } } -#endif // NEED_OPCODE_METADATA + +#endif enum InstructionFormat { - INSTR_FMT_IB, - INSTR_FMT_IBC, - INSTR_FMT_IBC0, - INSTR_FMT_IBC00, - INSTR_FMT_IBC000, - INSTR_FMT_IBC0000000, - INSTR_FMT_IBC00000000, - INSTR_FMT_IX, - INSTR_FMT_IXC, - INSTR_FMT_IXC0, - INSTR_FMT_IXC00, - INSTR_FMT_IXC000, + INSTR_FMT_IB = 1, + INSTR_FMT_IBC = 2, + INSTR_FMT_IBC00 = 3, + INSTR_FMT_IBC000 = 4, + INSTR_FMT_IBC00000000 = 5, + INSTR_FMT_IX = 6, + INSTR_FMT_IXC = 7, + INSTR_FMT_IXC00 = 8, + INSTR_FMT_IXC000 = 9, }; #define IS_VALID_OPCODE(OP) \ - (((OP) >= 0) && ((OP) < OPCODE_METADATA_SIZE) && \ + (((OP) >= 0) && ((OP) < 268) && \ (_PyOpcode_opcode_metadata[(OP)].valid_entry)) #define HAS_ARG_FLAG (1) @@ -1347,17 +920,6 @@ enum InstructionFormat { #define OPCODE_HAS_ERROR(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_ERROR_FLAG)) #define OPCODE_HAS_ESCAPES(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_ESCAPES_FLAG)) -struct opcode_metadata { - bool valid_entry; - enum InstructionFormat instr_format; - int flags; -}; - -struct opcode_macro_expansion { - int nuops; - struct { int16_t uop; int8_t size; int8_t offset; } uops[12]; -}; - #define OPARG_FULL 0 #define OPARG_CACHE_1 1 #define OPARG_CACHE_2 2 @@ -1365,18 +927,17 @@ struct opcode_macro_expansion { #define OPARG_TOP 5 #define OPARG_BOTTOM 6 #define OPARG_SAVE_RETURN_OFFSET 7 +#define OPARG_REPLACED 9 -#define OPCODE_METADATA_FLAGS(OP) (_PyOpcode_opcode_metadata[(OP)].flags & (HAS_ARG_FLAG | HAS_JUMP_FLAG)) -#define SAME_OPCODE_METADATA(OP1, OP2) \ - (OPCODE_METADATA_FLAGS(OP1) == OPCODE_METADATA_FLAGS(OP2)) - -#define OPCODE_METADATA_SIZE 512 -#define OPCODE_UOP_NAME_SIZE 512 -#define OPCODE_MACRO_EXPANSION_SIZE 256 +struct opcode_metadata { + uint8_t valid_entry; + int8_t instr_format; + int16_t flags; +}; -extern const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE]; +extern const struct opcode_metadata _PyOpcode_opcode_metadata[268]; #ifdef NEED_OPCODE_METADATA -const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { +const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [BEFORE_ASYNC_WITH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BEFORE_WITH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1486,11 +1047,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { [INSTRUMENTED_YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INTERPRETER_EXIT] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, [IS_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [JUMP] = { true, 0, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [JUMP_BACKWARD_NO_INTERRUPT] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, [JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [JUMP_NO_INTERRUPT] = { true, 0, HAS_ARG_FLAG | HAS_JUMP_FLAG }, [LIST_APPEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, [LIST_EXTEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_ASSERTION_ERROR] = { true, INSTR_FMT_IX, 0 }, @@ -1508,7 +1067,6 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { [LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [LOAD_BUILD_CLASS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_CLOSURE] = { true, 0, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, [LOAD_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG }, [LOAD_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, @@ -1521,14 +1079,10 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { [LOAD_GLOBAL_BUILTIN] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_GLOBAL_MODULE] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_LOCALS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_METHOD] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_ATTR_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_ATTR_METHOD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_SUPER_METHOD] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_ZERO_SUPER_ATTR] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_ZERO_SUPER_METHOD] = { true, 0, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [MAKE_CELL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [MAKE_FUNCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [MAP_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1537,7 +1091,6 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { [MATCH_MAPPING] = { true, INSTR_FMT_IX, 0 }, [MATCH_SEQUENCE] = { true, INSTR_FMT_IX, 0 }, [NOP] = { true, INSTR_FMT_IX, 0 }, - [POP_BLOCK] = { true, 0, 0 }, [POP_EXCEPT] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, [POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, [POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, @@ -1557,9 +1110,6 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { [SEND] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [SEND_GEN] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [SETUP_ANNOTATIONS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [SETUP_CLEANUP] = { true, 0, HAS_ARG_FLAG }, - [SETUP_FINALLY] = { true, 0, HAS_ARG_FLAG }, - [SETUP_WITH] = { true, 0, HAS_ARG_FLAG }, [SET_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [SET_FUNCTION_ATTRIBUTE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, [SET_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1570,7 +1120,6 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { [STORE_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ESCAPES_FLAG }, [STORE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, [STORE_FAST_LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [STORE_FAST_MAYBE_NULL] = { true, 0, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, [STORE_FAST_STORE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, [STORE_GLOBAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [STORE_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1596,110 +1145,33 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { [UNPACK_SEQUENCE_TWO_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [WITH_EXCEPT_START] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_BINARY_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, - [_BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC, 0 }, - [_BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG }, - [_BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC, 0 }, - [_BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG }, - [_BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC, 0 }, - [_BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG }, - [_BINARY_SUBSCR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_CALL] = { true, INSTR_FMT_IBC0, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_CHECK_ATTR_CLASS] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, - [_CHECK_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_CHECK_ATTR_MODULE] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, - [_CHECK_ATTR_WITH_HINT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_CHECK_FUNCTION_EXACT_ARGS] = { true, INSTR_FMT_IBC0, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_CHECK_MANAGED_OBJECT_HAS_VALUES] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_CHECK_PEP_523] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_CHECK_STACK_SPACE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_CHECK_VALIDITY] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_COMPARE_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_EXIT_TRACE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_FOR_ITER] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_FOR_ITER_TIER_TWO] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_GUARD_BOTH_FLOAT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_BOTH_INT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_BOTH_UNICODE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_BUILTINS_VERSION] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, - [_GUARD_DORV_VALUES] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_GLOBALS_VERSION] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG }, - [_GUARD_IS_FALSE_POP] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_IS_NONE_POP] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_IS_NOT_NONE_POP] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_IS_TRUE_POP] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_KEYS_VERSION] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, - [_GUARD_NOT_EXHAUSTED_LIST] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_NOT_EXHAUSTED_RANGE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_NOT_EXHAUSTED_TUPLE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_GUARD_TYPE_VERSION] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, - [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [_INIT_CALL_PY_EXACT_ARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_INSERT] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [_IS_NONE] = { true, INSTR_FMT_IX, 0 }, - [_ITER_CHECK_LIST] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_ITER_CHECK_RANGE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_ITER_CHECK_TUPLE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, - [_ITER_JUMP_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [_ITER_JUMP_RANGE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [_ITER_JUMP_TUPLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [_ITER_NEXT_LIST] = { true, INSTR_FMT_IX, 0 }, - [_ITER_NEXT_RANGE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_ITER_NEXT_TUPLE] = { true, INSTR_FMT_IX, 0 }, - [_JUMP_TO_TOP] = { true, INSTR_FMT_IX, HAS_EVAL_BREAK_FLAG }, - [_LOAD_ATTR] = { true, INSTR_FMT_IBC0000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_ATTR_CLASS] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG }, - [_LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_ATTR_METHOD_WITH_VALUES] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG }, - [_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG }, - [_LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_LOAD_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_GLOBAL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_LOAD_GLOBAL_BUILTINS] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_LOAD_GLOBAL_MODULE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_POP_FRAME] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, - [_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [_POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [_PUSH_FRAME] = { true, INSTR_FMT_IX, 0 }, - [_SAVE_RETURN_OFFSET] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, - [_SEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_SET_IP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_BINARY_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_BINARY_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [_SPECIALIZE_CALL] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_COMPARE_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_LOAD_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_LOAD_GLOBAL] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_SEND] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [_SPECIALIZE_STORE_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ESCAPES_FLAG }, - [_SPECIALIZE_STORE_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [_SPECIALIZE_TO_BOOL] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [_SPECIALIZE_UNPACK_SEQUENCE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, - [_STORE_ATTR] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_STORE_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [_STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC, HAS_ESCAPES_FLAG }, - [_STORE_SUBSCR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_TO_BOOL] = { true, INSTR_FMT_IXC0, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [_UNPACK_SEQUENCE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [JUMP] = { true, -1, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [JUMP_NO_INTERRUPT] = { true, -1, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [LOAD_CLOSURE] = { true, -1, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [LOAD_METHOD] = { true, -1, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_SUPER_METHOD] = { true, -1, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_ZERO_SUPER_ATTR] = { true, -1, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_ZERO_SUPER_METHOD] = { true, -1, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [POP_BLOCK] = { true, -1, 0 }, + [SETUP_CLEANUP] = { true, -1, HAS_ARG_FLAG }, + [SETUP_FINALLY] = { true, -1, HAS_ARG_FLAG }, + [SETUP_WITH] = { true, -1, HAS_ARG_FLAG }, + [STORE_FAST_MAYBE_NULL] = { true, -1, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, }; -#endif // NEED_OPCODE_METADATA +#endif + +#define MAX_UOP_PER_EXPANSION 8 +struct opcode_macro_expansion { + int nuops; + struct { int16_t uop; int8_t size; int8_t offset; } uops[MAX_UOP_PER_EXPANSION]; +}; +extern const struct opcode_macro_expansion _PyOpcode_macro_expansion[256]; -extern const struct opcode_macro_expansion _PyOpcode_macro_expansion[OPCODE_MACRO_EXPANSION_SIZE]; #ifdef NEED_OPCODE_METADATA -const struct opcode_macro_expansion _PyOpcode_macro_expansion[OPCODE_MACRO_EXPANSION_SIZE] = { - [BEFORE_ASYNC_WITH] = { .nuops = 1, .uops = { { BEFORE_ASYNC_WITH, 0, 0 } } }, - [BEFORE_WITH] = { .nuops = 1, .uops = { { BEFORE_WITH, 0, 0 } } }, +const struct opcode_macro_expansion +_PyOpcode_macro_expansion[256] = { + [BEFORE_ASYNC_WITH] = { .nuops = 1, .uops = { { _BEFORE_ASYNC_WITH, 0, 0 } } }, + [BEFORE_WITH] = { .nuops = 1, .uops = { { _BEFORE_WITH, 0, 0 } } }, [BINARY_OP] = { .nuops = 1, .uops = { { _BINARY_OP, 0, 0 } } }, [BINARY_OP_ADD_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_ADD_FLOAT, 0, 0 } } }, [BINARY_OP_ADD_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_ADD_INT, 0, 0 } } }, @@ -1708,73 +1180,73 @@ const struct opcode_macro_expansion _PyOpcode_macro_expansion[OPCODE_MACRO_EXPAN [BINARY_OP_MULTIPLY_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_MULTIPLY_INT, 0, 0 } } }, [BINARY_OP_SUBTRACT_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_SUBTRACT_FLOAT, 0, 0 } } }, [BINARY_OP_SUBTRACT_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_SUBTRACT_INT, 0, 0 } } }, - [BINARY_SLICE] = { .nuops = 1, .uops = { { BINARY_SLICE, 0, 0 } } }, + [BINARY_SLICE] = { .nuops = 1, .uops = { { _BINARY_SLICE, 0, 0 } } }, [BINARY_SUBSCR] = { .nuops = 1, .uops = { { _BINARY_SUBSCR, 0, 0 } } }, - [BINARY_SUBSCR_DICT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_DICT, 0, 0 } } }, - [BINARY_SUBSCR_LIST_INT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_LIST_INT, 0, 0 } } }, - [BINARY_SUBSCR_STR_INT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_STR_INT, 0, 0 } } }, - [BINARY_SUBSCR_TUPLE_INT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_TUPLE_INT, 0, 0 } } }, - [BUILD_CONST_KEY_MAP] = { .nuops = 1, .uops = { { BUILD_CONST_KEY_MAP, 0, 0 } } }, - [BUILD_LIST] = { .nuops = 1, .uops = { { BUILD_LIST, 0, 0 } } }, - [BUILD_MAP] = { .nuops = 1, .uops = { { BUILD_MAP, 0, 0 } } }, - [BUILD_SET] = { .nuops = 1, .uops = { { BUILD_SET, 0, 0 } } }, - [BUILD_SLICE] = { .nuops = 1, .uops = { { BUILD_SLICE, 0, 0 } } }, - [BUILD_STRING] = { .nuops = 1, .uops = { { BUILD_STRING, 0, 0 } } }, - [BUILD_TUPLE] = { .nuops = 1, .uops = { { BUILD_TUPLE, 0, 0 } } }, + [BINARY_SUBSCR_DICT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_DICT, 0, 0 } } }, + [BINARY_SUBSCR_LIST_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_LIST_INT, 0, 0 } } }, + [BINARY_SUBSCR_STR_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_STR_INT, 0, 0 } } }, + [BINARY_SUBSCR_TUPLE_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_TUPLE_INT, 0, 0 } } }, + [BUILD_CONST_KEY_MAP] = { .nuops = 1, .uops = { { _BUILD_CONST_KEY_MAP, 0, 0 } } }, + [BUILD_LIST] = { .nuops = 1, .uops = { { _BUILD_LIST, 0, 0 } } }, + [BUILD_MAP] = { .nuops = 1, .uops = { { _BUILD_MAP, 0, 0 } } }, + [BUILD_SET] = { .nuops = 1, .uops = { { _BUILD_SET, 0, 0 } } }, + [BUILD_SLICE] = { .nuops = 1, .uops = { { _BUILD_SLICE, 0, 0 } } }, + [BUILD_STRING] = { .nuops = 1, .uops = { { _BUILD_STRING, 0, 0 } } }, + [BUILD_TUPLE] = { .nuops = 1, .uops = { { _BUILD_TUPLE, 0, 0 } } }, [CALL_BOUND_METHOD_EXACT_ARGS] = { .nuops = 8, .uops = { { _CHECK_PEP_523, 0, 0 }, { _CHECK_CALL_BOUND_METHOD_EXACT_ARGS, 0, 0 }, { _INIT_CALL_BOUND_METHOD_EXACT_ARGS, 0, 0 }, { _CHECK_FUNCTION_EXACT_ARGS, 2, 1 }, { _CHECK_STACK_SPACE, 0, 0 }, { _INIT_CALL_PY_EXACT_ARGS, 0, 0 }, { _SAVE_RETURN_OFFSET, 7, 3 }, { _PUSH_FRAME, 0, 0 } } }, - [CALL_BUILTIN_CLASS] = { .nuops = 1, .uops = { { CALL_BUILTIN_CLASS, 0, 0 } } }, - [CALL_BUILTIN_FAST] = { .nuops = 1, .uops = { { CALL_BUILTIN_FAST, 0, 0 } } }, - [CALL_BUILTIN_FAST_WITH_KEYWORDS] = { .nuops = 1, .uops = { { CALL_BUILTIN_FAST_WITH_KEYWORDS, 0, 0 } } }, - [CALL_BUILTIN_O] = { .nuops = 1, .uops = { { CALL_BUILTIN_O, 0, 0 } } }, - [CALL_INTRINSIC_1] = { .nuops = 1, .uops = { { CALL_INTRINSIC_1, 0, 0 } } }, - [CALL_INTRINSIC_2] = { .nuops = 1, .uops = { { CALL_INTRINSIC_2, 0, 0 } } }, - [CALL_ISINSTANCE] = { .nuops = 1, .uops = { { CALL_ISINSTANCE, 0, 0 } } }, - [CALL_LEN] = { .nuops = 1, .uops = { { CALL_LEN, 0, 0 } } }, - [CALL_METHOD_DESCRIPTOR_FAST] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_FAST, 0, 0 } } }, - [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, 0, 0 } } }, - [CALL_METHOD_DESCRIPTOR_NOARGS] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_NOARGS, 0, 0 } } }, - [CALL_METHOD_DESCRIPTOR_O] = { .nuops = 1, .uops = { { CALL_METHOD_DESCRIPTOR_O, 0, 0 } } }, + [CALL_BUILTIN_CLASS] = { .nuops = 1, .uops = { { _CALL_BUILTIN_CLASS, 0, 0 } } }, + [CALL_BUILTIN_FAST] = { .nuops = 1, .uops = { { _CALL_BUILTIN_FAST, 0, 0 } } }, + [CALL_BUILTIN_FAST_WITH_KEYWORDS] = { .nuops = 1, .uops = { { _CALL_BUILTIN_FAST_WITH_KEYWORDS, 0, 0 } } }, + [CALL_BUILTIN_O] = { .nuops = 1, .uops = { { _CALL_BUILTIN_O, 0, 0 } } }, + [CALL_INTRINSIC_1] = { .nuops = 1, .uops = { { _CALL_INTRINSIC_1, 0, 0 } } }, + [CALL_INTRINSIC_2] = { .nuops = 1, .uops = { { _CALL_INTRINSIC_2, 0, 0 } } }, + [CALL_ISINSTANCE] = { .nuops = 1, .uops = { { _CALL_ISINSTANCE, 0, 0 } } }, + [CALL_LEN] = { .nuops = 1, .uops = { { _CALL_LEN, 0, 0 } } }, + [CALL_METHOD_DESCRIPTOR_FAST] = { .nuops = 1, .uops = { { _CALL_METHOD_DESCRIPTOR_FAST, 0, 0 } } }, + [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { .nuops = 1, .uops = { { _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, 0, 0 } } }, + [CALL_METHOD_DESCRIPTOR_NOARGS] = { .nuops = 1, .uops = { { _CALL_METHOD_DESCRIPTOR_NOARGS, 0, 0 } } }, + [CALL_METHOD_DESCRIPTOR_O] = { .nuops = 1, .uops = { { _CALL_METHOD_DESCRIPTOR_O, 0, 0 } } }, [CALL_PY_EXACT_ARGS] = { .nuops = 6, .uops = { { _CHECK_PEP_523, 0, 0 }, { _CHECK_FUNCTION_EXACT_ARGS, 2, 1 }, { _CHECK_STACK_SPACE, 0, 0 }, { _INIT_CALL_PY_EXACT_ARGS, 0, 0 }, { _SAVE_RETURN_OFFSET, 7, 3 }, { _PUSH_FRAME, 0, 0 } } }, - [CALL_STR_1] = { .nuops = 1, .uops = { { CALL_STR_1, 0, 0 } } }, - [CALL_TUPLE_1] = { .nuops = 1, .uops = { { CALL_TUPLE_1, 0, 0 } } }, - [CALL_TYPE_1] = { .nuops = 1, .uops = { { CALL_TYPE_1, 0, 0 } } }, - [CHECK_EG_MATCH] = { .nuops = 1, .uops = { { CHECK_EG_MATCH, 0, 0 } } }, - [CHECK_EXC_MATCH] = { .nuops = 1, .uops = { { CHECK_EXC_MATCH, 0, 0 } } }, + [CALL_STR_1] = { .nuops = 1, .uops = { { _CALL_STR_1, 0, 0 } } }, + [CALL_TUPLE_1] = { .nuops = 1, .uops = { { _CALL_TUPLE_1, 0, 0 } } }, + [CALL_TYPE_1] = { .nuops = 1, .uops = { { _CALL_TYPE_1, 0, 0 } } }, + [CHECK_EG_MATCH] = { .nuops = 1, .uops = { { _CHECK_EG_MATCH, 0, 0 } } }, + [CHECK_EXC_MATCH] = { .nuops = 1, .uops = { { _CHECK_EXC_MATCH, 0, 0 } } }, [COMPARE_OP] = { .nuops = 1, .uops = { { _COMPARE_OP, 0, 0 } } }, - [COMPARE_OP_FLOAT] = { .nuops = 1, .uops = { { COMPARE_OP_FLOAT, 0, 0 } } }, - [COMPARE_OP_INT] = { .nuops = 1, .uops = { { COMPARE_OP_INT, 0, 0 } } }, - [COMPARE_OP_STR] = { .nuops = 1, .uops = { { COMPARE_OP_STR, 0, 0 } } }, - [CONTAINS_OP] = { .nuops = 1, .uops = { { CONTAINS_OP, 0, 0 } } }, - [CONVERT_VALUE] = { .nuops = 1, .uops = { { CONVERT_VALUE, 0, 0 } } }, - [COPY] = { .nuops = 1, .uops = { { COPY, 0, 0 } } }, - [COPY_FREE_VARS] = { .nuops = 1, .uops = { { COPY_FREE_VARS, 0, 0 } } }, - [DELETE_ATTR] = { .nuops = 1, .uops = { { DELETE_ATTR, 0, 0 } } }, - [DELETE_DEREF] = { .nuops = 1, .uops = { { DELETE_DEREF, 0, 0 } } }, - [DELETE_FAST] = { .nuops = 1, .uops = { { DELETE_FAST, 0, 0 } } }, - [DELETE_GLOBAL] = { .nuops = 1, .uops = { { DELETE_GLOBAL, 0, 0 } } }, - [DELETE_NAME] = { .nuops = 1, .uops = { { DELETE_NAME, 0, 0 } } }, - [DELETE_SUBSCR] = { .nuops = 1, .uops = { { DELETE_SUBSCR, 0, 0 } } }, - [DICT_MERGE] = { .nuops = 1, .uops = { { DICT_MERGE, 0, 0 } } }, - [DICT_UPDATE] = { .nuops = 1, .uops = { { DICT_UPDATE, 0, 0 } } }, - [END_FOR] = { .nuops = 2, .uops = { { POP_TOP, 0, 0 }, { POP_TOP, 0, 0 } } }, - [END_SEND] = { .nuops = 1, .uops = { { END_SEND, 0, 0 } } }, - [EXIT_INIT_CHECK] = { .nuops = 1, .uops = { { EXIT_INIT_CHECK, 0, 0 } } }, - [FORMAT_SIMPLE] = { .nuops = 1, .uops = { { FORMAT_SIMPLE, 0, 0 } } }, - [FORMAT_WITH_SPEC] = { .nuops = 1, .uops = { { FORMAT_WITH_SPEC, 0, 0 } } }, - [FOR_ITER] = { .nuops = 1, .uops = { { _FOR_ITER, 0, 0 } } }, - [FOR_ITER_LIST] = { .nuops = 3, .uops = { { _ITER_CHECK_LIST, 0, 0 }, { _ITER_JUMP_LIST, 0, 0 }, { _ITER_NEXT_LIST, 0, 0 } } }, - [FOR_ITER_RANGE] = { .nuops = 3, .uops = { { _ITER_CHECK_RANGE, 0, 0 }, { _ITER_JUMP_RANGE, 0, 0 }, { _ITER_NEXT_RANGE, 0, 0 } } }, - [FOR_ITER_TUPLE] = { .nuops = 3, .uops = { { _ITER_CHECK_TUPLE, 0, 0 }, { _ITER_JUMP_TUPLE, 0, 0 }, { _ITER_NEXT_TUPLE, 0, 0 } } }, - [GET_AITER] = { .nuops = 1, .uops = { { GET_AITER, 0, 0 } } }, - [GET_ANEXT] = { .nuops = 1, .uops = { { GET_ANEXT, 0, 0 } } }, - [GET_AWAITABLE] = { .nuops = 1, .uops = { { GET_AWAITABLE, 0, 0 } } }, - [GET_ITER] = { .nuops = 1, .uops = { { GET_ITER, 0, 0 } } }, - [GET_LEN] = { .nuops = 1, .uops = { { GET_LEN, 0, 0 } } }, - [GET_YIELD_FROM_ITER] = { .nuops = 1, .uops = { { GET_YIELD_FROM_ITER, 0, 0 } } }, - [IS_OP] = { .nuops = 1, .uops = { { IS_OP, 0, 0 } } }, - [LIST_APPEND] = { .nuops = 1, .uops = { { LIST_APPEND, 0, 0 } } }, - [LIST_EXTEND] = { .nuops = 1, .uops = { { LIST_EXTEND, 0, 0 } } }, - [LOAD_ASSERTION_ERROR] = { .nuops = 1, .uops = { { LOAD_ASSERTION_ERROR, 0, 0 } } }, + [COMPARE_OP_FLOAT] = { .nuops = 1, .uops = { { _COMPARE_OP_FLOAT, 0, 0 } } }, + [COMPARE_OP_INT] = { .nuops = 1, .uops = { { _COMPARE_OP_INT, 0, 0 } } }, + [COMPARE_OP_STR] = { .nuops = 1, .uops = { { _COMPARE_OP_STR, 0, 0 } } }, + [CONTAINS_OP] = { .nuops = 1, .uops = { { _CONTAINS_OP, 0, 0 } } }, + [CONVERT_VALUE] = { .nuops = 1, .uops = { { _CONVERT_VALUE, 0, 0 } } }, + [COPY] = { .nuops = 1, .uops = { { _COPY, 0, 0 } } }, + [COPY_FREE_VARS] = { .nuops = 1, .uops = { { _COPY_FREE_VARS, 0, 0 } } }, + [DELETE_ATTR] = { .nuops = 1, .uops = { { _DELETE_ATTR, 0, 0 } } }, + [DELETE_DEREF] = { .nuops = 1, .uops = { { _DELETE_DEREF, 0, 0 } } }, + [DELETE_FAST] = { .nuops = 1, .uops = { { _DELETE_FAST, 0, 0 } } }, + [DELETE_GLOBAL] = { .nuops = 1, .uops = { { _DELETE_GLOBAL, 0, 0 } } }, + [DELETE_NAME] = { .nuops = 1, .uops = { { _DELETE_NAME, 0, 0 } } }, + [DELETE_SUBSCR] = { .nuops = 1, .uops = { { _DELETE_SUBSCR, 0, 0 } } }, + [DICT_MERGE] = { .nuops = 1, .uops = { { _DICT_MERGE, 0, 0 } } }, + [DICT_UPDATE] = { .nuops = 1, .uops = { { _DICT_UPDATE, 0, 0 } } }, + [END_FOR] = { .nuops = 2, .uops = { { _POP_TOP, 0, 0 }, { _POP_TOP, 0, 0 } } }, + [END_SEND] = { .nuops = 1, .uops = { { _END_SEND, 0, 0 } } }, + [EXIT_INIT_CHECK] = { .nuops = 1, .uops = { { _EXIT_INIT_CHECK, 0, 0 } } }, + [FORMAT_SIMPLE] = { .nuops = 1, .uops = { { _FORMAT_SIMPLE, 0, 0 } } }, + [FORMAT_WITH_SPEC] = { .nuops = 1, .uops = { { _FORMAT_WITH_SPEC, 0, 0 } } }, + [FOR_ITER] = { .nuops = 1, .uops = { { _FOR_ITER, 9, 0 } } }, + [FOR_ITER_LIST] = { .nuops = 3, .uops = { { _ITER_CHECK_LIST, 0, 0 }, { _ITER_JUMP_LIST, 9, 1 }, { _ITER_NEXT_LIST, 0, 0 } } }, + [FOR_ITER_RANGE] = { .nuops = 3, .uops = { { _ITER_CHECK_RANGE, 0, 0 }, { _ITER_JUMP_RANGE, 9, 1 }, { _ITER_NEXT_RANGE, 0, 0 } } }, + [FOR_ITER_TUPLE] = { .nuops = 3, .uops = { { _ITER_CHECK_TUPLE, 0, 0 }, { _ITER_JUMP_TUPLE, 9, 1 }, { _ITER_NEXT_TUPLE, 0, 0 } } }, + [GET_AITER] = { .nuops = 1, .uops = { { _GET_AITER, 0, 0 } } }, + [GET_ANEXT] = { .nuops = 1, .uops = { { _GET_ANEXT, 0, 0 } } }, + [GET_AWAITABLE] = { .nuops = 1, .uops = { { _GET_AWAITABLE, 0, 0 } } }, + [GET_ITER] = { .nuops = 1, .uops = { { _GET_ITER, 0, 0 } } }, + [GET_LEN] = { .nuops = 1, .uops = { { _GET_LEN, 0, 0 } } }, + [GET_YIELD_FROM_ITER] = { .nuops = 1, .uops = { { _GET_YIELD_FROM_ITER, 0, 0 } } }, + [IS_OP] = { .nuops = 1, .uops = { { _IS_OP, 0, 0 } } }, + [LIST_APPEND] = { .nuops = 1, .uops = { { _LIST_APPEND, 0, 0 } } }, + [LIST_EXTEND] = { .nuops = 1, .uops = { { _LIST_EXTEND, 0, 0 } } }, + [LOAD_ASSERTION_ERROR] = { .nuops = 1, .uops = { { _LOAD_ASSERTION_ERROR, 0, 0 } } }, [LOAD_ATTR] = { .nuops = 1, .uops = { { _LOAD_ATTR, 0, 0 } } }, [LOAD_ATTR_CLASS] = { .nuops = 2, .uops = { { _CHECK_ATTR_CLASS, 2, 1 }, { _LOAD_ATTR_CLASS, 4, 5 } } }, [LOAD_ATTR_INSTANCE_VALUE] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_MANAGED_OBJECT_HAS_VALUES, 0, 0 }, { _LOAD_ATTR_INSTANCE_VALUE, 1, 3 } } }, @@ -1786,183 +1258,81 @@ const struct opcode_macro_expansion _PyOpcode_macro_expansion[OPCODE_MACRO_EXPAN [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { .nuops = 4, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, 0, 0 }, { _GUARD_KEYS_VERSION, 2, 3 }, { _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, 4, 5 } } }, [LOAD_ATTR_SLOT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_SLOT, 1, 3 } } }, [LOAD_ATTR_WITH_HINT] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_ATTR_WITH_HINT, 0, 0 }, { _LOAD_ATTR_WITH_HINT, 1, 3 } } }, - [LOAD_BUILD_CLASS] = { .nuops = 1, .uops = { { LOAD_BUILD_CLASS, 0, 0 } } }, - [LOAD_CONST] = { .nuops = 1, .uops = { { LOAD_CONST, 0, 0 } } }, - [LOAD_DEREF] = { .nuops = 1, .uops = { { LOAD_DEREF, 0, 0 } } }, - [LOAD_FAST] = { .nuops = 1, .uops = { { LOAD_FAST, 0, 0 } } }, - [LOAD_FAST_AND_CLEAR] = { .nuops = 1, .uops = { { LOAD_FAST_AND_CLEAR, 0, 0 } } }, - [LOAD_FAST_CHECK] = { .nuops = 1, .uops = { { LOAD_FAST_CHECK, 0, 0 } } }, - [LOAD_FAST_LOAD_FAST] = { .nuops = 2, .uops = { { LOAD_FAST, 5, 0 }, { LOAD_FAST, 6, 0 } } }, - [LOAD_FROM_DICT_OR_DEREF] = { .nuops = 1, .uops = { { LOAD_FROM_DICT_OR_DEREF, 0, 0 } } }, - [LOAD_FROM_DICT_OR_GLOBALS] = { .nuops = 1, .uops = { { LOAD_FROM_DICT_OR_GLOBALS, 0, 0 } } }, + [LOAD_BUILD_CLASS] = { .nuops = 1, .uops = { { _LOAD_BUILD_CLASS, 0, 0 } } }, + [LOAD_CONST] = { .nuops = 1, .uops = { { _LOAD_CONST, 0, 0 } } }, + [LOAD_DEREF] = { .nuops = 1, .uops = { { _LOAD_DEREF, 0, 0 } } }, + [LOAD_FAST] = { .nuops = 1, .uops = { { _LOAD_FAST, 0, 0 } } }, + [LOAD_FAST_AND_CLEAR] = { .nuops = 1, .uops = { { _LOAD_FAST_AND_CLEAR, 0, 0 } } }, + [LOAD_FAST_CHECK] = { .nuops = 1, .uops = { { _LOAD_FAST_CHECK, 0, 0 } } }, + [LOAD_FAST_LOAD_FAST] = { .nuops = 2, .uops = { { _LOAD_FAST, 5, 0 }, { _LOAD_FAST, 6, 0 } } }, + [LOAD_FROM_DICT_OR_DEREF] = { .nuops = 1, .uops = { { _LOAD_FROM_DICT_OR_DEREF, 0, 0 } } }, + [LOAD_FROM_DICT_OR_GLOBALS] = { .nuops = 1, .uops = { { _LOAD_FROM_DICT_OR_GLOBALS, 0, 0 } } }, [LOAD_GLOBAL] = { .nuops = 1, .uops = { { _LOAD_GLOBAL, 0, 0 } } }, [LOAD_GLOBAL_BUILTIN] = { .nuops = 3, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _GUARD_BUILTINS_VERSION, 1, 2 }, { _LOAD_GLOBAL_BUILTINS, 1, 3 } } }, [LOAD_GLOBAL_MODULE] = { .nuops = 2, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _LOAD_GLOBAL_MODULE, 1, 3 } } }, - [LOAD_LOCALS] = { .nuops = 1, .uops = { { LOAD_LOCALS, 0, 0 } } }, - [LOAD_NAME] = { .nuops = 1, .uops = { { LOAD_NAME, 0, 0 } } }, - [LOAD_SUPER_ATTR_ATTR] = { .nuops = 1, .uops = { { LOAD_SUPER_ATTR_ATTR, 0, 0 } } }, - [LOAD_SUPER_ATTR_METHOD] = { .nuops = 1, .uops = { { LOAD_SUPER_ATTR_METHOD, 0, 0 } } }, - [MAKE_CELL] = { .nuops = 1, .uops = { { MAKE_CELL, 0, 0 } } }, - [MAKE_FUNCTION] = { .nuops = 1, .uops = { { MAKE_FUNCTION, 0, 0 } } }, - [MAP_ADD] = { .nuops = 1, .uops = { { MAP_ADD, 0, 0 } } }, - [MATCH_CLASS] = { .nuops = 1, .uops = { { MATCH_CLASS, 0, 0 } } }, - [MATCH_KEYS] = { .nuops = 1, .uops = { { MATCH_KEYS, 0, 0 } } }, - [MATCH_MAPPING] = { .nuops = 1, .uops = { { MATCH_MAPPING, 0, 0 } } }, - [MATCH_SEQUENCE] = { .nuops = 1, .uops = { { MATCH_SEQUENCE, 0, 0 } } }, - [NOP] = { .nuops = 1, .uops = { { NOP, 0, 0 } } }, - [POP_EXCEPT] = { .nuops = 1, .uops = { { POP_EXCEPT, 0, 0 } } }, - [POP_JUMP_IF_FALSE] = { .nuops = 1, .uops = { { _POP_JUMP_IF_FALSE, 0, 0 } } }, - [POP_JUMP_IF_NONE] = { .nuops = 2, .uops = { { _IS_NONE, 0, 0 }, { _POP_JUMP_IF_TRUE, 0, 0 } } }, - [POP_JUMP_IF_NOT_NONE] = { .nuops = 2, .uops = { { _IS_NONE, 0, 0 }, { _POP_JUMP_IF_FALSE, 0, 0 } } }, - [POP_JUMP_IF_TRUE] = { .nuops = 1, .uops = { { _POP_JUMP_IF_TRUE, 0, 0 } } }, - [POP_TOP] = { .nuops = 1, .uops = { { POP_TOP, 0, 0 } } }, - [PUSH_EXC_INFO] = { .nuops = 1, .uops = { { PUSH_EXC_INFO, 0, 0 } } }, - [PUSH_NULL] = { .nuops = 1, .uops = { { PUSH_NULL, 0, 0 } } }, - [RESUME_CHECK] = { .nuops = 1, .uops = { { RESUME_CHECK, 0, 0 } } }, - [RETURN_CONST] = { .nuops = 2, .uops = { { LOAD_CONST, 0, 0 }, { _POP_FRAME, 0, 0 } } }, + [LOAD_LOCALS] = { .nuops = 1, .uops = { { _LOAD_LOCALS, 0, 0 } } }, + [LOAD_NAME] = { .nuops = 1, .uops = { { _LOAD_NAME, 0, 0 } } }, + [LOAD_SUPER_ATTR_ATTR] = { .nuops = 1, .uops = { { _LOAD_SUPER_ATTR_ATTR, 0, 0 } } }, + [LOAD_SUPER_ATTR_METHOD] = { .nuops = 1, .uops = { { _LOAD_SUPER_ATTR_METHOD, 0, 0 } } }, + [MAKE_CELL] = { .nuops = 1, .uops = { { _MAKE_CELL, 0, 0 } } }, + [MAKE_FUNCTION] = { .nuops = 1, .uops = { { _MAKE_FUNCTION, 0, 0 } } }, + [MAP_ADD] = { .nuops = 1, .uops = { { _MAP_ADD, 0, 0 } } }, + [MATCH_CLASS] = { .nuops = 1, .uops = { { _MATCH_CLASS, 0, 0 } } }, + [MATCH_KEYS] = { .nuops = 1, .uops = { { _MATCH_KEYS, 0, 0 } } }, + [MATCH_MAPPING] = { .nuops = 1, .uops = { { _MATCH_MAPPING, 0, 0 } } }, + [MATCH_SEQUENCE] = { .nuops = 1, .uops = { { _MATCH_SEQUENCE, 0, 0 } } }, + [NOP] = { .nuops = 1, .uops = { { _NOP, 0, 0 } } }, + [POP_EXCEPT] = { .nuops = 1, .uops = { { _POP_EXCEPT, 0, 0 } } }, + [POP_JUMP_IF_FALSE] = { .nuops = 1, .uops = { { _POP_JUMP_IF_FALSE, 9, 1 } } }, + [POP_JUMP_IF_NONE] = { .nuops = 2, .uops = { { _IS_NONE, 0, 0 }, { _POP_JUMP_IF_TRUE, 9, 1 } } }, + [POP_JUMP_IF_NOT_NONE] = { .nuops = 2, .uops = { { _IS_NONE, 0, 0 }, { _POP_JUMP_IF_FALSE, 9, 1 } } }, + [POP_JUMP_IF_TRUE] = { .nuops = 1, .uops = { { _POP_JUMP_IF_TRUE, 9, 1 } } }, + [POP_TOP] = { .nuops = 1, .uops = { { _POP_TOP, 0, 0 } } }, + [PUSH_EXC_INFO] = { .nuops = 1, .uops = { { _PUSH_EXC_INFO, 0, 0 } } }, + [PUSH_NULL] = { .nuops = 1, .uops = { { _PUSH_NULL, 0, 0 } } }, + [RESUME_CHECK] = { .nuops = 1, .uops = { { _RESUME_CHECK, 0, 0 } } }, + [RETURN_CONST] = { .nuops = 2, .uops = { { _LOAD_CONST, 0, 0 }, { _POP_FRAME, 0, 0 } } }, [RETURN_VALUE] = { .nuops = 1, .uops = { { _POP_FRAME, 0, 0 } } }, - [SETUP_ANNOTATIONS] = { .nuops = 1, .uops = { { SETUP_ANNOTATIONS, 0, 0 } } }, - [SET_ADD] = { .nuops = 1, .uops = { { SET_ADD, 0, 0 } } }, - [SET_FUNCTION_ATTRIBUTE] = { .nuops = 1, .uops = { { SET_FUNCTION_ATTRIBUTE, 0, 0 } } }, - [SET_UPDATE] = { .nuops = 1, .uops = { { SET_UPDATE, 0, 0 } } }, + [SETUP_ANNOTATIONS] = { .nuops = 1, .uops = { { _SETUP_ANNOTATIONS, 0, 0 } } }, + [SET_ADD] = { .nuops = 1, .uops = { { _SET_ADD, 0, 0 } } }, + [SET_FUNCTION_ATTRIBUTE] = { .nuops = 1, .uops = { { _SET_FUNCTION_ATTRIBUTE, 0, 0 } } }, + [SET_UPDATE] = { .nuops = 1, .uops = { { _SET_UPDATE, 0, 0 } } }, [STORE_ATTR] = { .nuops = 1, .uops = { { _STORE_ATTR, 0, 0 } } }, [STORE_ATTR_INSTANCE_VALUE] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES, 0, 0 }, { _STORE_ATTR_INSTANCE_VALUE, 1, 3 } } }, [STORE_ATTR_SLOT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _STORE_ATTR_SLOT, 1, 3 } } }, - [STORE_DEREF] = { .nuops = 1, .uops = { { STORE_DEREF, 0, 0 } } }, - [STORE_FAST] = { .nuops = 1, .uops = { { STORE_FAST, 0, 0 } } }, - [STORE_FAST_LOAD_FAST] = { .nuops = 2, .uops = { { STORE_FAST, 5, 0 }, { LOAD_FAST, 6, 0 } } }, - [STORE_FAST_STORE_FAST] = { .nuops = 2, .uops = { { STORE_FAST, 5, 0 }, { STORE_FAST, 6, 0 } } }, - [STORE_GLOBAL] = { .nuops = 1, .uops = { { STORE_GLOBAL, 0, 0 } } }, - [STORE_NAME] = { .nuops = 1, .uops = { { STORE_NAME, 0, 0 } } }, - [STORE_SLICE] = { .nuops = 1, .uops = { { STORE_SLICE, 0, 0 } } }, + [STORE_DEREF] = { .nuops = 1, .uops = { { _STORE_DEREF, 0, 0 } } }, + [STORE_FAST] = { .nuops = 1, .uops = { { _STORE_FAST, 0, 0 } } }, + [STORE_FAST_LOAD_FAST] = { .nuops = 2, .uops = { { _STORE_FAST, 5, 0 }, { _LOAD_FAST, 6, 0 } } }, + [STORE_FAST_STORE_FAST] = { .nuops = 2, .uops = { { _STORE_FAST, 5, 0 }, { _STORE_FAST, 6, 0 } } }, + [STORE_GLOBAL] = { .nuops = 1, .uops = { { _STORE_GLOBAL, 0, 0 } } }, + [STORE_NAME] = { .nuops = 1, .uops = { { _STORE_NAME, 0, 0 } } }, + [STORE_SLICE] = { .nuops = 1, .uops = { { _STORE_SLICE, 0, 0 } } }, [STORE_SUBSCR] = { .nuops = 1, .uops = { { _STORE_SUBSCR, 0, 0 } } }, - [STORE_SUBSCR_DICT] = { .nuops = 1, .uops = { { STORE_SUBSCR_DICT, 0, 0 } } }, - [STORE_SUBSCR_LIST_INT] = { .nuops = 1, .uops = { { STORE_SUBSCR_LIST_INT, 0, 0 } } }, - [SWAP] = { .nuops = 1, .uops = { { SWAP, 0, 0 } } }, + [STORE_SUBSCR_DICT] = { .nuops = 1, .uops = { { _STORE_SUBSCR_DICT, 0, 0 } } }, + [STORE_SUBSCR_LIST_INT] = { .nuops = 1, .uops = { { _STORE_SUBSCR_LIST_INT, 0, 0 } } }, + [SWAP] = { .nuops = 1, .uops = { { _SWAP, 0, 0 } } }, [TO_BOOL] = { .nuops = 1, .uops = { { _TO_BOOL, 0, 0 } } }, - [TO_BOOL_ALWAYS_TRUE] = { .nuops = 1, .uops = { { TO_BOOL_ALWAYS_TRUE, 2, 1 } } }, - [TO_BOOL_BOOL] = { .nuops = 1, .uops = { { TO_BOOL_BOOL, 0, 0 } } }, - [TO_BOOL_INT] = { .nuops = 1, .uops = { { TO_BOOL_INT, 0, 0 } } }, - [TO_BOOL_LIST] = { .nuops = 1, .uops = { { TO_BOOL_LIST, 0, 0 } } }, - [TO_BOOL_NONE] = { .nuops = 1, .uops = { { TO_BOOL_NONE, 0, 0 } } }, - [TO_BOOL_STR] = { .nuops = 1, .uops = { { TO_BOOL_STR, 0, 0 } } }, - [UNARY_INVERT] = { .nuops = 1, .uops = { { UNARY_INVERT, 0, 0 } } }, - [UNARY_NEGATIVE] = { .nuops = 1, .uops = { { UNARY_NEGATIVE, 0, 0 } } }, - [UNARY_NOT] = { .nuops = 1, .uops = { { UNARY_NOT, 0, 0 } } }, - [UNPACK_EX] = { .nuops = 1, .uops = { { UNPACK_EX, 0, 0 } } }, + [TO_BOOL_ALWAYS_TRUE] = { .nuops = 1, .uops = { { _TO_BOOL_ALWAYS_TRUE, 2, 1 } } }, + [TO_BOOL_BOOL] = { .nuops = 1, .uops = { { _TO_BOOL_BOOL, 0, 0 } } }, + [TO_BOOL_INT] = { .nuops = 1, .uops = { { _TO_BOOL_INT, 0, 0 } } }, + [TO_BOOL_LIST] = { .nuops = 1, .uops = { { _TO_BOOL_LIST, 0, 0 } } }, + [TO_BOOL_NONE] = { .nuops = 1, .uops = { { _TO_BOOL_NONE, 0, 0 } } }, + [TO_BOOL_STR] = { .nuops = 1, .uops = { { _TO_BOOL_STR, 0, 0 } } }, + [UNARY_INVERT] = { .nuops = 1, .uops = { { _UNARY_INVERT, 0, 0 } } }, + [UNARY_NEGATIVE] = { .nuops = 1, .uops = { { _UNARY_NEGATIVE, 0, 0 } } }, + [UNARY_NOT] = { .nuops = 1, .uops = { { _UNARY_NOT, 0, 0 } } }, + [UNPACK_EX] = { .nuops = 1, .uops = { { _UNPACK_EX, 0, 0 } } }, [UNPACK_SEQUENCE] = { .nuops = 1, .uops = { { _UNPACK_SEQUENCE, 0, 0 } } }, - [UNPACK_SEQUENCE_LIST] = { .nuops = 1, .uops = { { UNPACK_SEQUENCE_LIST, 0, 0 } } }, - [UNPACK_SEQUENCE_TUPLE] = { .nuops = 1, .uops = { { UNPACK_SEQUENCE_TUPLE, 0, 0 } } }, - [UNPACK_SEQUENCE_TWO_TUPLE] = { .nuops = 1, .uops = { { UNPACK_SEQUENCE_TWO_TUPLE, 0, 0 } } }, - [WITH_EXCEPT_START] = { .nuops = 1, .uops = { { WITH_EXCEPT_START, 0, 0 } } }, + [UNPACK_SEQUENCE_LIST] = { .nuops = 1, .uops = { { _UNPACK_SEQUENCE_LIST, 0, 0 } } }, + [UNPACK_SEQUENCE_TUPLE] = { .nuops = 1, .uops = { { _UNPACK_SEQUENCE_TUPLE, 0, 0 } } }, + [UNPACK_SEQUENCE_TWO_TUPLE] = { .nuops = 1, .uops = { { _UNPACK_SEQUENCE_TWO_TUPLE, 0, 0 } } }, + [WITH_EXCEPT_START] = { .nuops = 1, .uops = { { _WITH_EXCEPT_START, 0, 0 } } }, }; #endif // NEED_OPCODE_METADATA -extern const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE]; +extern const char *_PyOpcode_OpName[268]; #ifdef NEED_OPCODE_METADATA -const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE] = { - [_EXIT_TRACE] = "_EXIT_TRACE", - [_SET_IP] = "_SET_IP", - [_BINARY_OP] = "_BINARY_OP", - [_BINARY_OP_ADD_FLOAT] = "_BINARY_OP_ADD_FLOAT", - [_BINARY_OP_ADD_INT] = "_BINARY_OP_ADD_INT", - [_BINARY_OP_ADD_UNICODE] = "_BINARY_OP_ADD_UNICODE", - [_BINARY_OP_INPLACE_ADD_UNICODE] = "_BINARY_OP_INPLACE_ADD_UNICODE", - [_BINARY_OP_MULTIPLY_FLOAT] = "_BINARY_OP_MULTIPLY_FLOAT", - [_BINARY_OP_MULTIPLY_INT] = "_BINARY_OP_MULTIPLY_INT", - [_BINARY_OP_SUBTRACT_FLOAT] = "_BINARY_OP_SUBTRACT_FLOAT", - [_BINARY_OP_SUBTRACT_INT] = "_BINARY_OP_SUBTRACT_INT", - [_BINARY_SUBSCR] = "_BINARY_SUBSCR", - [_CALL] = "_CALL", - [_CHECK_ATTR_CLASS] = "_CHECK_ATTR_CLASS", - [_CHECK_ATTR_METHOD_LAZY_DICT] = "_CHECK_ATTR_METHOD_LAZY_DICT", - [_CHECK_ATTR_MODULE] = "_CHECK_ATTR_MODULE", - [_CHECK_ATTR_WITH_HINT] = "_CHECK_ATTR_WITH_HINT", - [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = "_CHECK_CALL_BOUND_METHOD_EXACT_ARGS", - [_CHECK_FUNCTION_EXACT_ARGS] = "_CHECK_FUNCTION_EXACT_ARGS", - [_CHECK_MANAGED_OBJECT_HAS_VALUES] = "_CHECK_MANAGED_OBJECT_HAS_VALUES", - [_CHECK_PEP_523] = "_CHECK_PEP_523", - [_CHECK_STACK_SPACE] = "_CHECK_STACK_SPACE", - [_CHECK_VALIDITY] = "_CHECK_VALIDITY", - [_COMPARE_OP] = "_COMPARE_OP", - [_FOR_ITER] = "_FOR_ITER", - [_FOR_ITER_TIER_TWO] = "_FOR_ITER_TIER_TWO", - [_GUARD_BOTH_FLOAT] = "_GUARD_BOTH_FLOAT", - [_GUARD_BOTH_INT] = "_GUARD_BOTH_INT", - [_GUARD_BOTH_UNICODE] = "_GUARD_BOTH_UNICODE", - [_GUARD_BUILTINS_VERSION] = "_GUARD_BUILTINS_VERSION", - [_GUARD_DORV_VALUES] = "_GUARD_DORV_VALUES", - [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = "_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT", - [_GUARD_GLOBALS_VERSION] = "_GUARD_GLOBALS_VERSION", - [_GUARD_IS_FALSE_POP] = "_GUARD_IS_FALSE_POP", - [_GUARD_IS_NONE_POP] = "_GUARD_IS_NONE_POP", - [_GUARD_IS_NOT_NONE_POP] = "_GUARD_IS_NOT_NONE_POP", - [_GUARD_IS_TRUE_POP] = "_GUARD_IS_TRUE_POP", - [_GUARD_KEYS_VERSION] = "_GUARD_KEYS_VERSION", - [_GUARD_NOT_EXHAUSTED_LIST] = "_GUARD_NOT_EXHAUSTED_LIST", - [_GUARD_NOT_EXHAUSTED_RANGE] = "_GUARD_NOT_EXHAUSTED_RANGE", - [_GUARD_NOT_EXHAUSTED_TUPLE] = "_GUARD_NOT_EXHAUSTED_TUPLE", - [_GUARD_TYPE_VERSION] = "_GUARD_TYPE_VERSION", - [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = "_INIT_CALL_BOUND_METHOD_EXACT_ARGS", - [_INIT_CALL_PY_EXACT_ARGS] = "_INIT_CALL_PY_EXACT_ARGS", - [_INSERT] = "_INSERT", - [_IS_NONE] = "_IS_NONE", - [_ITER_CHECK_LIST] = "_ITER_CHECK_LIST", - [_ITER_CHECK_RANGE] = "_ITER_CHECK_RANGE", - [_ITER_CHECK_TUPLE] = "_ITER_CHECK_TUPLE", - [_ITER_JUMP_LIST] = "_ITER_JUMP_LIST", - [_ITER_JUMP_RANGE] = "_ITER_JUMP_RANGE", - [_ITER_JUMP_TUPLE] = "_ITER_JUMP_TUPLE", - [_ITER_NEXT_LIST] = "_ITER_NEXT_LIST", - [_ITER_NEXT_RANGE] = "_ITER_NEXT_RANGE", - [_ITER_NEXT_TUPLE] = "_ITER_NEXT_TUPLE", - [_JUMP_TO_TOP] = "_JUMP_TO_TOP", - [_LOAD_ATTR] = "_LOAD_ATTR", - [_LOAD_ATTR_CLASS] = "_LOAD_ATTR_CLASS", - [_LOAD_ATTR_INSTANCE_VALUE] = "_LOAD_ATTR_INSTANCE_VALUE", - [_LOAD_ATTR_METHOD_LAZY_DICT] = "_LOAD_ATTR_METHOD_LAZY_DICT", - [_LOAD_ATTR_METHOD_NO_DICT] = "_LOAD_ATTR_METHOD_NO_DICT", - [_LOAD_ATTR_METHOD_WITH_VALUES] = "_LOAD_ATTR_METHOD_WITH_VALUES", - [_LOAD_ATTR_MODULE] = "_LOAD_ATTR_MODULE", - [_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = "_LOAD_ATTR_NONDESCRIPTOR_NO_DICT", - [_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = "_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", - [_LOAD_ATTR_SLOT] = "_LOAD_ATTR_SLOT", - [_LOAD_ATTR_WITH_HINT] = "_LOAD_ATTR_WITH_HINT", - [_LOAD_GLOBAL] = "_LOAD_GLOBAL", - [_LOAD_GLOBAL_BUILTINS] = "_LOAD_GLOBAL_BUILTINS", - [_LOAD_GLOBAL_MODULE] = "_LOAD_GLOBAL_MODULE", - [_LOAD_SUPER_ATTR] = "_LOAD_SUPER_ATTR", - [_POP_FRAME] = "_POP_FRAME", - [_POP_JUMP_IF_FALSE] = "_POP_JUMP_IF_FALSE", - [_POP_JUMP_IF_TRUE] = "_POP_JUMP_IF_TRUE", - [_PUSH_FRAME] = "_PUSH_FRAME", - [_SAVE_RETURN_OFFSET] = "_SAVE_RETURN_OFFSET", - [_SEND] = "_SEND", - [_SPECIALIZE_BINARY_OP] = "_SPECIALIZE_BINARY_OP", - [_SPECIALIZE_BINARY_SUBSCR] = "_SPECIALIZE_BINARY_SUBSCR", - [_SPECIALIZE_CALL] = "_SPECIALIZE_CALL", - [_SPECIALIZE_COMPARE_OP] = "_SPECIALIZE_COMPARE_OP", - [_SPECIALIZE_FOR_ITER] = "_SPECIALIZE_FOR_ITER", - [_SPECIALIZE_LOAD_ATTR] = "_SPECIALIZE_LOAD_ATTR", - [_SPECIALIZE_LOAD_GLOBAL] = "_SPECIALIZE_LOAD_GLOBAL", - [_SPECIALIZE_LOAD_SUPER_ATTR] = "_SPECIALIZE_LOAD_SUPER_ATTR", - [_SPECIALIZE_SEND] = "_SPECIALIZE_SEND", - [_SPECIALIZE_STORE_ATTR] = "_SPECIALIZE_STORE_ATTR", - [_SPECIALIZE_STORE_SUBSCR] = "_SPECIALIZE_STORE_SUBSCR", - [_SPECIALIZE_TO_BOOL] = "_SPECIALIZE_TO_BOOL", - [_SPECIALIZE_UNPACK_SEQUENCE] = "_SPECIALIZE_UNPACK_SEQUENCE", - [_STORE_ATTR] = "_STORE_ATTR", - [_STORE_ATTR_INSTANCE_VALUE] = "_STORE_ATTR_INSTANCE_VALUE", - [_STORE_ATTR_SLOT] = "_STORE_ATTR_SLOT", - [_STORE_SUBSCR] = "_STORE_SUBSCR", - [_TO_BOOL] = "_TO_BOOL", - [_UNPACK_SEQUENCE] = "_UNPACK_SEQUENCE", -}; -#endif // NEED_OPCODE_METADATA - -extern const char *const _PyOpcode_OpName[268]; -#ifdef NEED_OPCODE_METADATA -const char *const _PyOpcode_OpName[268] = { +const char *_PyOpcode_OpName[268] = { [BEFORE_ASYNC_WITH] = "BEFORE_ASYNC_WITH", [BEFORE_WITH] = "BEFORE_WITH", [BINARY_OP] = "BINARY_OP", @@ -2184,13 +1554,13 @@ const char *const _PyOpcode_OpName[268] = { [WITH_EXCEPT_START] = "WITH_EXCEPT_START", [YIELD_VALUE] = "YIELD_VALUE", }; -#endif // NEED_OPCODE_METADATA +#endif extern const uint8_t _PyOpcode_Caches[256]; #ifdef NEED_OPCODE_METADATA const uint8_t _PyOpcode_Caches[256] = { + [JUMP_BACKWARD] = 1, [TO_BOOL] = 3, - [BINARY_OP_INPLACE_ADD_UNICODE] = 1, [BINARY_SUBSCR] = 1, [STORE_SUBSCR] = 1, [SEND] = 1, @@ -2200,7 +1570,6 @@ const uint8_t _PyOpcode_Caches[256] = { [LOAD_SUPER_ATTR] = 1, [LOAD_ATTR] = 9, [COMPARE_OP] = 1, - [JUMP_BACKWARD] = 1, [POP_JUMP_IF_TRUE] = 1, [POP_JUMP_IF_FALSE] = 1, [POP_JUMP_IF_NONE] = 1, @@ -2209,7 +1578,7 @@ const uint8_t _PyOpcode_Caches[256] = { [CALL] = 3, [BINARY_OP] = 1, }; -#endif // NEED_OPCODE_METADATA +#endif extern const uint8_t _PyOpcode_Deopt[256]; #ifdef NEED_OPCODE_METADATA @@ -2423,6 +1792,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [WITH_EXCEPT_START] = WITH_EXCEPT_START, [YIELD_VALUE] = YIELD_VALUE, }; + #endif // NEED_OPCODE_METADATA #define EXTRA_CASES \ @@ -2475,4 +1845,40 @@ const uint8_t _PyOpcode_Deopt[256] = { case 235: \ case 255: \ ; +struct pseudo_targets { + uint8_t targets[3]; +}; +extern const struct pseudo_targets _PyOpcode_PseudoTargets[12]; +#ifdef NEED_OPCODE_METADATA +const struct pseudo_targets _PyOpcode_PseudoTargets[12] = { + [LOAD_CLOSURE-256] = { { LOAD_FAST, 0, 0 } }, + [STORE_FAST_MAYBE_NULL-256] = { { STORE_FAST, 0, 0 } }, + [LOAD_SUPER_METHOD-256] = { { LOAD_SUPER_ATTR, 0, 0 } }, + [LOAD_ZERO_SUPER_METHOD-256] = { { LOAD_SUPER_ATTR, 0, 0 } }, + [LOAD_ZERO_SUPER_ATTR-256] = { { LOAD_SUPER_ATTR, 0, 0 } }, + [LOAD_METHOD-256] = { { LOAD_ATTR, 0, 0 } }, + [JUMP-256] = { { JUMP_FORWARD, JUMP_BACKWARD, 0 } }, + [JUMP_NO_INTERRUPT-256] = { { JUMP_FORWARD, JUMP_BACKWARD_NO_INTERRUPT, 0 } }, + [SETUP_FINALLY-256] = { { NOP, 0, 0 } }, + [SETUP_CLEANUP-256] = { { NOP, 0, 0 } }, + [SETUP_WITH-256] = { { NOP, 0, 0 } }, + [POP_BLOCK-256] = { { NOP, 0, 0 } }, +}; +#endif // NEED_OPCODE_METADATA +static inline bool +is_pseudo_target(int pseudo, int target) { + if (pseudo < 256 || pseudo >= 268) { + return false; + } + for (int i = 0; _PyOpcode_PseudoTargets[pseudo-256].targets[i]; i++) { + if (_PyOpcode_PseudoTargets[pseudo-256].targets[i] == target) return true; + } + return false; +} + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_CORE_OPCODE_METADATA_H */ diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index f983de56049631..c03c3d766bef61 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -133,7 +133,7 @@ _PyType_IsReady(PyTypeObject *type) extern PyObject* _Py_type_getattro_impl(PyTypeObject *type, PyObject *name, int *suppress_missing_attribute); -extern PyObject* _Py_type_getattro(PyTypeObject *type, PyObject *name); +extern PyObject* _Py_type_getattro(PyObject *type, PyObject *name); extern PyObject* _Py_BaseObject_RichCompare(PyObject* self, PyObject* other, int op); diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h index c96ea51ae1acb6..4a9a00ba352d33 100644 --- a/Include/internal/pycore_uop_ids.h +++ b/Include/internal/pycore_uop_ids.h @@ -2,6 +2,7 @@ // from: // Python/bytecodes.c // Do not edit! + #ifndef Py_CORE_UOP_IDS_H #define Py_CORE_UOP_IDS_H #ifdef __cplusplus @@ -11,7 +12,6 @@ extern "C" { #define _EXIT_TRACE 300 #define _SET_IP 301 #define _NOP NOP -#define _RESUME RESUME #define _RESUME_CHECK RESUME_CHECK #define _INSTRUMENTED_RESUME INSTRUMENTED_RESUME #define _LOAD_FAST_CHECK LOAD_FAST_CHECK @@ -24,13 +24,10 @@ extern "C" { #define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST #define _POP_TOP POP_TOP #define _PUSH_NULL PUSH_NULL -#define _INSTRUMENTED_END_FOR INSTRUMENTED_END_FOR #define _END_SEND END_SEND -#define _INSTRUMENTED_END_SEND INSTRUMENTED_END_SEND #define _UNARY_NEGATIVE UNARY_NEGATIVE #define _UNARY_NOT UNARY_NOT -#define _SPECIALIZE_TO_BOOL 302 -#define _TO_BOOL 303 +#define _TO_BOOL 302 #define _TO_BOOL_BOOL TO_BOOL_BOOL #define _TO_BOOL_INT TO_BOOL_INT #define _TO_BOOL_LIST TO_BOOL_LIST @@ -38,19 +35,17 @@ extern "C" { #define _TO_BOOL_STR TO_BOOL_STR #define _TO_BOOL_ALWAYS_TRUE TO_BOOL_ALWAYS_TRUE #define _UNARY_INVERT UNARY_INVERT -#define _GUARD_BOTH_INT 304 -#define _BINARY_OP_MULTIPLY_INT 305 -#define _BINARY_OP_ADD_INT 306 -#define _BINARY_OP_SUBTRACT_INT 307 -#define _GUARD_BOTH_FLOAT 308 -#define _BINARY_OP_MULTIPLY_FLOAT 309 -#define _BINARY_OP_ADD_FLOAT 310 -#define _BINARY_OP_SUBTRACT_FLOAT 311 -#define _GUARD_BOTH_UNICODE 312 -#define _BINARY_OP_ADD_UNICODE 313 -#define _BINARY_OP_INPLACE_ADD_UNICODE 314 -#define _SPECIALIZE_BINARY_SUBSCR 315 -#define _BINARY_SUBSCR 316 +#define _GUARD_BOTH_INT 303 +#define _BINARY_OP_MULTIPLY_INT 304 +#define _BINARY_OP_ADD_INT 305 +#define _BINARY_OP_SUBTRACT_INT 306 +#define _GUARD_BOTH_FLOAT 307 +#define _BINARY_OP_MULTIPLY_FLOAT 308 +#define _BINARY_OP_ADD_FLOAT 309 +#define _BINARY_OP_SUBTRACT_FLOAT 310 +#define _GUARD_BOTH_UNICODE 311 +#define _BINARY_OP_ADD_UNICODE 312 +#define _BINARY_SUBSCR 313 #define _BINARY_SLICE BINARY_SLICE #define _STORE_SLICE STORE_SLICE #define _BINARY_SUBSCR_LIST_INT BINARY_SUBSCR_LIST_INT @@ -60,54 +55,43 @@ extern "C" { #define _BINARY_SUBSCR_GETITEM BINARY_SUBSCR_GETITEM #define _LIST_APPEND LIST_APPEND #define _SET_ADD SET_ADD -#define _SPECIALIZE_STORE_SUBSCR 317 -#define _STORE_SUBSCR 318 +#define _STORE_SUBSCR 314 #define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT #define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT #define _DELETE_SUBSCR DELETE_SUBSCR #define _CALL_INTRINSIC_1 CALL_INTRINSIC_1 #define _CALL_INTRINSIC_2 CALL_INTRINSIC_2 -#define _RAISE_VARARGS RAISE_VARARGS -#define _INTERPRETER_EXIT INTERPRETER_EXIT -#define _POP_FRAME 319 +#define _POP_FRAME 315 #define _INSTRUMENTED_RETURN_VALUE INSTRUMENTED_RETURN_VALUE #define _INSTRUMENTED_RETURN_CONST INSTRUMENTED_RETURN_CONST #define _GET_AITER GET_AITER #define _GET_ANEXT GET_ANEXT #define _GET_AWAITABLE GET_AWAITABLE -#define _SPECIALIZE_SEND 320 -#define _SEND 321 +#define _SEND 316 #define _SEND_GEN SEND_GEN #define _INSTRUMENTED_YIELD_VALUE INSTRUMENTED_YIELD_VALUE -#define _YIELD_VALUE YIELD_VALUE #define _POP_EXCEPT POP_EXCEPT -#define _RERAISE RERAISE -#define _END_ASYNC_FOR END_ASYNC_FOR -#define _CLEANUP_THROW CLEANUP_THROW #define _LOAD_ASSERTION_ERROR LOAD_ASSERTION_ERROR #define _LOAD_BUILD_CLASS LOAD_BUILD_CLASS #define _STORE_NAME STORE_NAME #define _DELETE_NAME DELETE_NAME -#define _SPECIALIZE_UNPACK_SEQUENCE 322 -#define _UNPACK_SEQUENCE 323 +#define _UNPACK_SEQUENCE 317 #define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE #define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE #define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST #define _UNPACK_EX UNPACK_EX -#define _SPECIALIZE_STORE_ATTR 324 -#define _STORE_ATTR 325 +#define _STORE_ATTR 318 #define _DELETE_ATTR DELETE_ATTR #define _STORE_GLOBAL STORE_GLOBAL #define _DELETE_GLOBAL DELETE_GLOBAL #define _LOAD_LOCALS LOAD_LOCALS #define _LOAD_FROM_DICT_OR_GLOBALS LOAD_FROM_DICT_OR_GLOBALS #define _LOAD_NAME LOAD_NAME -#define _SPECIALIZE_LOAD_GLOBAL 326 -#define _LOAD_GLOBAL 327 -#define _GUARD_GLOBALS_VERSION 328 -#define _GUARD_BUILTINS_VERSION 329 -#define _LOAD_GLOBAL_MODULE 330 -#define _LOAD_GLOBAL_BUILTINS 331 +#define _LOAD_GLOBAL 319 +#define _GUARD_GLOBALS_VERSION 320 +#define _GUARD_BUILTINS_VERSION 321 +#define _LOAD_GLOBAL_MODULE 322 +#define _LOAD_GLOBAL_BUILTINS 323 #define _DELETE_FAST DELETE_FAST #define _MAKE_CELL MAKE_CELL #define _DELETE_DEREF DELETE_DEREF @@ -128,30 +112,26 @@ extern "C" { #define _DICT_MERGE DICT_MERGE #define _MAP_ADD MAP_ADD #define _INSTRUMENTED_LOAD_SUPER_ATTR INSTRUMENTED_LOAD_SUPER_ATTR -#define _SPECIALIZE_LOAD_SUPER_ATTR 332 -#define _LOAD_SUPER_ATTR 333 #define _LOAD_SUPER_ATTR_ATTR LOAD_SUPER_ATTR_ATTR #define _LOAD_SUPER_ATTR_METHOD LOAD_SUPER_ATTR_METHOD -#define _SPECIALIZE_LOAD_ATTR 334 -#define _LOAD_ATTR 335 -#define _GUARD_TYPE_VERSION 336 -#define _CHECK_MANAGED_OBJECT_HAS_VALUES 337 -#define _LOAD_ATTR_INSTANCE_VALUE 338 -#define _CHECK_ATTR_MODULE 339 -#define _LOAD_ATTR_MODULE 340 -#define _CHECK_ATTR_WITH_HINT 341 -#define _LOAD_ATTR_WITH_HINT 342 -#define _LOAD_ATTR_SLOT 343 -#define _CHECK_ATTR_CLASS 344 -#define _LOAD_ATTR_CLASS 345 +#define _LOAD_ATTR 324 +#define _GUARD_TYPE_VERSION 325 +#define _CHECK_MANAGED_OBJECT_HAS_VALUES 326 +#define _LOAD_ATTR_INSTANCE_VALUE 327 +#define _CHECK_ATTR_MODULE 328 +#define _LOAD_ATTR_MODULE 329 +#define _CHECK_ATTR_WITH_HINT 330 +#define _LOAD_ATTR_WITH_HINT 331 +#define _LOAD_ATTR_SLOT 332 +#define _CHECK_ATTR_CLASS 333 +#define _LOAD_ATTR_CLASS 334 #define _LOAD_ATTR_PROPERTY LOAD_ATTR_PROPERTY #define _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN -#define _GUARD_DORV_VALUES 346 -#define _STORE_ATTR_INSTANCE_VALUE 347 +#define _GUARD_DORV_VALUES 335 +#define _STORE_ATTR_INSTANCE_VALUE 336 #define _STORE_ATTR_WITH_HINT STORE_ATTR_WITH_HINT -#define _STORE_ATTR_SLOT 348 -#define _SPECIALIZE_COMPARE_OP 349 -#define _COMPARE_OP 350 +#define _STORE_ATTR_SLOT 337 +#define _COMPARE_OP 338 #define _COMPARE_OP_FLOAT COMPARE_OP_FLOAT #define _COMPARE_OP_INT COMPARE_OP_INT #define _COMPARE_OP_STR COMPARE_OP_STR @@ -159,15 +139,10 @@ extern "C" { #define _CONTAINS_OP CONTAINS_OP #define _CHECK_EG_MATCH CHECK_EG_MATCH #define _CHECK_EXC_MATCH CHECK_EXC_MATCH -#define _IMPORT_NAME IMPORT_NAME -#define _IMPORT_FROM IMPORT_FROM -#define _JUMP_FORWARD JUMP_FORWARD #define _JUMP_BACKWARD JUMP_BACKWARD -#define _ENTER_EXECUTOR ENTER_EXECUTOR -#define _POP_JUMP_IF_FALSE 351 -#define _POP_JUMP_IF_TRUE 352 -#define _IS_NONE 353 -#define _JUMP_BACKWARD_NO_INTERRUPT JUMP_BACKWARD_NO_INTERRUPT +#define _POP_JUMP_IF_FALSE 339 +#define _POP_JUMP_IF_TRUE 340 +#define _IS_NONE 341 #define _GET_LEN GET_LEN #define _MATCH_CLASS MATCH_CLASS #define _MATCH_MAPPING MATCH_MAPPING @@ -175,45 +150,43 @@ extern "C" { #define _MATCH_KEYS MATCH_KEYS #define _GET_ITER GET_ITER #define _GET_YIELD_FROM_ITER GET_YIELD_FROM_ITER -#define _SPECIALIZE_FOR_ITER 354 -#define _FOR_ITER 355 -#define _FOR_ITER_TIER_TWO 356 +#define _FOR_ITER 342 +#define _FOR_ITER_TIER_TWO 343 #define _INSTRUMENTED_FOR_ITER INSTRUMENTED_FOR_ITER -#define _ITER_CHECK_LIST 357 -#define _ITER_JUMP_LIST 358 -#define _GUARD_NOT_EXHAUSTED_LIST 359 -#define _ITER_NEXT_LIST 360 -#define _ITER_CHECK_TUPLE 361 -#define _ITER_JUMP_TUPLE 362 -#define _GUARD_NOT_EXHAUSTED_TUPLE 363 -#define _ITER_NEXT_TUPLE 364 -#define _ITER_CHECK_RANGE 365 -#define _ITER_JUMP_RANGE 366 -#define _GUARD_NOT_EXHAUSTED_RANGE 367 -#define _ITER_NEXT_RANGE 368 +#define _ITER_CHECK_LIST 344 +#define _ITER_JUMP_LIST 345 +#define _GUARD_NOT_EXHAUSTED_LIST 346 +#define _ITER_NEXT_LIST 347 +#define _ITER_CHECK_TUPLE 348 +#define _ITER_JUMP_TUPLE 349 +#define _GUARD_NOT_EXHAUSTED_TUPLE 350 +#define _ITER_NEXT_TUPLE 351 +#define _ITER_CHECK_RANGE 352 +#define _ITER_JUMP_RANGE 353 +#define _GUARD_NOT_EXHAUSTED_RANGE 354 +#define _ITER_NEXT_RANGE 355 #define _FOR_ITER_GEN FOR_ITER_GEN #define _BEFORE_ASYNC_WITH BEFORE_ASYNC_WITH #define _BEFORE_WITH BEFORE_WITH #define _WITH_EXCEPT_START WITH_EXCEPT_START #define _PUSH_EXC_INFO PUSH_EXC_INFO -#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 369 -#define _GUARD_KEYS_VERSION 370 -#define _LOAD_ATTR_METHOD_WITH_VALUES 371 -#define _LOAD_ATTR_METHOD_NO_DICT 372 -#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 373 -#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 374 -#define _CHECK_ATTR_METHOD_LAZY_DICT 375 -#define _LOAD_ATTR_METHOD_LAZY_DICT 376 +#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 356 +#define _GUARD_KEYS_VERSION 357 +#define _LOAD_ATTR_METHOD_WITH_VALUES 358 +#define _LOAD_ATTR_METHOD_NO_DICT 359 +#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 360 +#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 361 +#define _CHECK_ATTR_METHOD_LAZY_DICT 362 +#define _LOAD_ATTR_METHOD_LAZY_DICT 363 #define _INSTRUMENTED_CALL INSTRUMENTED_CALL -#define _SPECIALIZE_CALL 377 -#define _CALL 378 -#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 379 -#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 380 -#define _CHECK_PEP_523 381 -#define _CHECK_FUNCTION_EXACT_ARGS 382 -#define _CHECK_STACK_SPACE 383 -#define _INIT_CALL_PY_EXACT_ARGS 384 -#define _PUSH_FRAME 385 +#define _CALL 364 +#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 365 +#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 366 +#define _CHECK_PEP_523 367 +#define _CHECK_FUNCTION_EXACT_ARGS 368 +#define _CHECK_STACK_SPACE 369 +#define _INIT_CALL_PY_EXACT_ARGS 370 +#define _PUSH_FRAME 371 #define _CALL_PY_WITH_DEFAULTS CALL_PY_WITH_DEFAULTS #define _CALL_TYPE_1 CALL_TYPE_1 #define _CALL_STR_1 CALL_STR_1 @@ -226,7 +199,6 @@ extern "C" { #define _CALL_BUILTIN_FAST_WITH_KEYWORDS CALL_BUILTIN_FAST_WITH_KEYWORDS #define _CALL_LEN CALL_LEN #define _CALL_ISINSTANCE CALL_ISINSTANCE -#define _CALL_LIST_APPEND CALL_LIST_APPEND #define _CALL_METHOD_DESCRIPTOR_O CALL_METHOD_DESCRIPTOR_O #define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS #define _CALL_METHOD_DESCRIPTOR_NOARGS CALL_METHOD_DESCRIPTOR_NOARGS @@ -237,14 +209,12 @@ extern "C" { #define _CALL_FUNCTION_EX CALL_FUNCTION_EX #define _MAKE_FUNCTION MAKE_FUNCTION #define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE -#define _RETURN_GENERATOR RETURN_GENERATOR #define _BUILD_SLICE BUILD_SLICE #define _CONVERT_VALUE CONVERT_VALUE #define _FORMAT_SIMPLE FORMAT_SIMPLE #define _FORMAT_WITH_SPEC FORMAT_WITH_SPEC #define _COPY COPY -#define _SPECIALIZE_BINARY_OP 386 -#define _BINARY_OP 387 +#define _BINARY_OP 372 #define _SWAP SWAP #define _INSTRUMENTED_INSTRUCTION INSTRUMENTED_INSTRUCTION #define _INSTRUMENTED_JUMP_FORWARD INSTRUMENTED_JUMP_FORWARD @@ -253,16 +223,17 @@ extern "C" { #define _INSTRUMENTED_POP_JUMP_IF_FALSE INSTRUMENTED_POP_JUMP_IF_FALSE #define _INSTRUMENTED_POP_JUMP_IF_NONE INSTRUMENTED_POP_JUMP_IF_NONE #define _INSTRUMENTED_POP_JUMP_IF_NOT_NONE INSTRUMENTED_POP_JUMP_IF_NOT_NONE -#define _GUARD_IS_TRUE_POP 388 -#define _GUARD_IS_FALSE_POP 389 -#define _GUARD_IS_NONE_POP 390 -#define _GUARD_IS_NOT_NONE_POP 391 -#define _JUMP_TO_TOP 392 -#define _SAVE_RETURN_OFFSET 393 -#define _INSERT 394 -#define _CHECK_VALIDITY 395 +#define _GUARD_IS_TRUE_POP 373 +#define _GUARD_IS_FALSE_POP 374 +#define _GUARD_IS_NONE_POP 375 +#define _GUARD_IS_NOT_NONE_POP 376 +#define _JUMP_TO_TOP 377 +#define _SAVE_RETURN_OFFSET 378 +#define _INSERT 379 +#define _CHECK_VALIDITY 380 +#define MAX_UOP_ID 380 #ifdef __cplusplus } #endif -#endif /* !Py_OPCODE_IDS_H */ +#endif /* !Py_CORE_UOP_IDS_H */ diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h new file mode 100644 index 00000000000000..300bd3baa7b377 --- /dev/null +++ b/Include/internal/pycore_uop_metadata.h @@ -0,0 +1,403 @@ +// This file is generated by Tools/cases_generator/uop_metadata_generator.py +// from: +// Python/bytecodes.c +// Do not edit! + +#ifndef Py_CORE_UOP_METADATA_H +#define Py_CORE_UOP_METADATA_H +#ifdef __cplusplus +extern "C" { +#endif + +#include +#include "pycore_uop_ids.h" +extern const uint16_t _PyUop_Flags[MAX_UOP_ID+1]; +extern const char * const _PyOpcode_uop_name[MAX_UOP_ID+1]; + +#ifdef NEED_OPCODE_METADATA +const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { + [_NOP] = 0, + [_RESUME_CHECK] = HAS_DEOPT_FLAG, + [_LOAD_FAST_CHECK] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG, + [_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, + [_LOAD_FAST_AND_CLEAR] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, + [_LOAD_FAST_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, + [_LOAD_CONST] = HAS_ARG_FLAG | HAS_CONST_FLAG, + [_STORE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, + [_STORE_FAST_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, + [_STORE_FAST_STORE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, + [_POP_TOP] = 0, + [_PUSH_NULL] = 0, + [_END_SEND] = 0, + [_UNARY_NEGATIVE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_UNARY_NOT] = 0, + [_TO_BOOL] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_TO_BOOL_BOOL] = HAS_DEOPT_FLAG, + [_TO_BOOL_INT] = HAS_DEOPT_FLAG, + [_TO_BOOL_LIST] = HAS_DEOPT_FLAG, + [_TO_BOOL_NONE] = HAS_DEOPT_FLAG, + [_TO_BOOL_STR] = HAS_DEOPT_FLAG, + [_TO_BOOL_ALWAYS_TRUE] = HAS_DEOPT_FLAG, + [_UNARY_INVERT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_GUARD_BOTH_INT] = HAS_DEOPT_FLAG, + [_BINARY_OP_MULTIPLY_INT] = HAS_ERROR_FLAG, + [_BINARY_OP_ADD_INT] = HAS_ERROR_FLAG, + [_BINARY_OP_SUBTRACT_INT] = HAS_ERROR_FLAG, + [_GUARD_BOTH_FLOAT] = HAS_DEOPT_FLAG, + [_BINARY_OP_MULTIPLY_FLOAT] = 0, + [_BINARY_OP_ADD_FLOAT] = 0, + [_BINARY_OP_SUBTRACT_FLOAT] = 0, + [_GUARD_BOTH_UNICODE] = HAS_DEOPT_FLAG, + [_BINARY_OP_ADD_UNICODE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_BINARY_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_BINARY_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_STORE_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_BINARY_SUBSCR_LIST_INT] = HAS_DEOPT_FLAG, + [_BINARY_SUBSCR_STR_INT] = HAS_DEOPT_FLAG, + [_BINARY_SUBSCR_TUPLE_INT] = HAS_DEOPT_FLAG, + [_BINARY_SUBSCR_DICT] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LIST_APPEND] = HAS_ARG_FLAG | HAS_ERROR_FLAG, + [_SET_ADD] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_STORE_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_STORE_SUBSCR_LIST_INT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, + [_STORE_SUBSCR_DICT] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_DELETE_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_INTRINSIC_1] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_INTRINSIC_2] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_POP_FRAME] = HAS_ESCAPES_FLAG, + [_GET_AITER] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_GET_ANEXT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_GET_AWAITABLE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_POP_EXCEPT] = HAS_ESCAPES_FLAG, + [_LOAD_ASSERTION_ERROR] = 0, + [_LOAD_BUILD_CLASS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_STORE_NAME] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_DELETE_NAME] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_UNPACK_SEQUENCE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_UNPACK_SEQUENCE_TWO_TUPLE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_UNPACK_SEQUENCE_TUPLE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_UNPACK_SEQUENCE_LIST] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_UNPACK_EX] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_STORE_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_DELETE_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_STORE_GLOBAL] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_DELETE_GLOBAL] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_LOCALS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_FROM_DICT_OR_GLOBALS] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_NAME] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_GLOBAL] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_GUARD_GLOBALS_VERSION] = HAS_DEOPT_FLAG, + [_GUARD_BUILTINS_VERSION] = HAS_DEOPT_FLAG, + [_LOAD_GLOBAL_MODULE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_LOAD_GLOBAL_BUILTINS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_DELETE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG, + [_MAKE_CELL] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_DELETE_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_FROM_DICT_OR_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_STORE_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ESCAPES_FLAG, + [_COPY_FREE_VARS] = HAS_ARG_FLAG, + [_BUILD_STRING] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_BUILD_TUPLE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_BUILD_LIST] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LIST_EXTEND] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_SET_UPDATE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_BUILD_SET] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_BUILD_MAP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_SETUP_ANNOTATIONS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_BUILD_CONST_KEY_MAP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_DICT_UPDATE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_DICT_MERGE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_MAP_ADD] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_SUPER_ATTR_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_SUPER_ATTR_METHOD] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_GUARD_TYPE_VERSION] = HAS_DEOPT_FLAG, + [_CHECK_MANAGED_OBJECT_HAS_VALUES] = HAS_DEOPT_FLAG, + [_LOAD_ATTR_INSTANCE_VALUE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_CHECK_ATTR_MODULE] = HAS_DEOPT_FLAG, + [_LOAD_ATTR_MODULE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_CHECK_ATTR_WITH_HINT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_ATTR_WITH_HINT] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_ATTR_SLOT] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_CHECK_ATTR_CLASS] = HAS_DEOPT_FLAG, + [_LOAD_ATTR_CLASS] = HAS_ARG_FLAG, + [_GUARD_DORV_VALUES] = HAS_DEOPT_FLAG, + [_STORE_ATTR_INSTANCE_VALUE] = HAS_ESCAPES_FLAG, + [_STORE_ATTR_SLOT] = HAS_ESCAPES_FLAG, + [_COMPARE_OP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_COMPARE_OP_FLOAT] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, + [_COMPARE_OP_INT] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, + [_COMPARE_OP_STR] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, + [_IS_OP] = HAS_ARG_FLAG, + [_CONTAINS_OP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CHECK_EG_MATCH] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CHECK_EXC_MATCH] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_IS_NONE] = 0, + [_GET_LEN] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_MATCH_CLASS] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_MATCH_MAPPING] = 0, + [_MATCH_SEQUENCE] = 0, + [_MATCH_KEYS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_GET_ITER] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_GET_YIELD_FROM_ITER] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_FOR_ITER_TIER_TWO] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_ITER_CHECK_LIST] = HAS_DEOPT_FLAG, + [_GUARD_NOT_EXHAUSTED_LIST] = HAS_DEOPT_FLAG, + [_ITER_NEXT_LIST] = 0, + [_ITER_CHECK_TUPLE] = HAS_DEOPT_FLAG, + [_GUARD_NOT_EXHAUSTED_TUPLE] = HAS_DEOPT_FLAG, + [_ITER_NEXT_TUPLE] = 0, + [_ITER_CHECK_RANGE] = HAS_DEOPT_FLAG, + [_GUARD_NOT_EXHAUSTED_RANGE] = HAS_DEOPT_FLAG, + [_ITER_NEXT_RANGE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_BEFORE_ASYNC_WITH] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_BEFORE_WITH] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_WITH_EXCEPT_START] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_PUSH_EXC_INFO] = 0, + [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = HAS_DEOPT_FLAG, + [_GUARD_KEYS_VERSION] = HAS_DEOPT_FLAG, + [_LOAD_ATTR_METHOD_WITH_VALUES] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_ATTR_METHOD_NO_DICT] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = HAS_ARG_FLAG, + [_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = HAS_ARG_FLAG, + [_CHECK_ATTR_METHOD_LAZY_DICT] = HAS_DEOPT_FLAG, + [_LOAD_ATTR_METHOD_LAZY_DICT] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, + [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = HAS_ARG_FLAG, + [_CHECK_PEP_523] = HAS_DEOPT_FLAG, + [_CHECK_FUNCTION_EXACT_ARGS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_CHECK_STACK_SPACE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_INIT_CALL_PY_EXACT_ARGS] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, + [_PUSH_FRAME] = 0, + [_CALL_TYPE_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_CALL_STR_1] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_TUPLE_1] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_EXIT_INIT_CHECK] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_BUILTIN_CLASS] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG, + [_CALL_BUILTIN_O] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_BUILTIN_FAST] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_BUILTIN_FAST_WITH_KEYWORDS] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_LEN] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_ISINSTANCE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_METHOD_DESCRIPTOR_O] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_METHOD_DESCRIPTOR_NOARGS] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_METHOD_DESCRIPTOR_FAST] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_MAKE_FUNCTION] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_SET_FUNCTION_ATTRIBUTE] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, + [_BUILD_SLICE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CONVERT_VALUE] = HAS_ARG_FLAG | HAS_ERROR_FLAG, + [_FORMAT_SIMPLE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_FORMAT_WITH_SPEC] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_COPY] = HAS_ARG_FLAG, + [_BINARY_OP] = HAS_ARG_FLAG | HAS_ERROR_FLAG, + [_SWAP] = HAS_ARG_FLAG, + [_GUARD_IS_TRUE_POP] = HAS_DEOPT_FLAG, + [_GUARD_IS_FALSE_POP] = HAS_DEOPT_FLAG, + [_GUARD_IS_NONE_POP] = HAS_DEOPT_FLAG, + [_GUARD_IS_NOT_NONE_POP] = HAS_DEOPT_FLAG, + [_JUMP_TO_TOP] = HAS_EVAL_BREAK_FLAG, + [_SET_IP] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, + [_SAVE_RETURN_OFFSET] = HAS_ARG_FLAG, + [_EXIT_TRACE] = HAS_DEOPT_FLAG, + [_INSERT] = HAS_ARG_FLAG, + [_CHECK_VALIDITY] = HAS_DEOPT_FLAG, +}; + +const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { + [_BEFORE_ASYNC_WITH] = "_BEFORE_ASYNC_WITH", + [_BEFORE_WITH] = "_BEFORE_WITH", + [_BINARY_OP] = "_BINARY_OP", + [_BINARY_OP_ADD_FLOAT] = "_BINARY_OP_ADD_FLOAT", + [_BINARY_OP_ADD_INT] = "_BINARY_OP_ADD_INT", + [_BINARY_OP_ADD_UNICODE] = "_BINARY_OP_ADD_UNICODE", + [_BINARY_OP_MULTIPLY_FLOAT] = "_BINARY_OP_MULTIPLY_FLOAT", + [_BINARY_OP_MULTIPLY_INT] = "_BINARY_OP_MULTIPLY_INT", + [_BINARY_OP_SUBTRACT_FLOAT] = "_BINARY_OP_SUBTRACT_FLOAT", + [_BINARY_OP_SUBTRACT_INT] = "_BINARY_OP_SUBTRACT_INT", + [_BINARY_SLICE] = "_BINARY_SLICE", + [_BINARY_SUBSCR] = "_BINARY_SUBSCR", + [_BINARY_SUBSCR_DICT] = "_BINARY_SUBSCR_DICT", + [_BINARY_SUBSCR_LIST_INT] = "_BINARY_SUBSCR_LIST_INT", + [_BINARY_SUBSCR_STR_INT] = "_BINARY_SUBSCR_STR_INT", + [_BINARY_SUBSCR_TUPLE_INT] = "_BINARY_SUBSCR_TUPLE_INT", + [_BUILD_CONST_KEY_MAP] = "_BUILD_CONST_KEY_MAP", + [_BUILD_LIST] = "_BUILD_LIST", + [_BUILD_MAP] = "_BUILD_MAP", + [_BUILD_SET] = "_BUILD_SET", + [_BUILD_SLICE] = "_BUILD_SLICE", + [_BUILD_STRING] = "_BUILD_STRING", + [_BUILD_TUPLE] = "_BUILD_TUPLE", + [_CALL_BUILTIN_CLASS] = "_CALL_BUILTIN_CLASS", + [_CALL_BUILTIN_FAST] = "_CALL_BUILTIN_FAST", + [_CALL_BUILTIN_FAST_WITH_KEYWORDS] = "_CALL_BUILTIN_FAST_WITH_KEYWORDS", + [_CALL_BUILTIN_O] = "_CALL_BUILTIN_O", + [_CALL_INTRINSIC_1] = "_CALL_INTRINSIC_1", + [_CALL_INTRINSIC_2] = "_CALL_INTRINSIC_2", + [_CALL_ISINSTANCE] = "_CALL_ISINSTANCE", + [_CALL_LEN] = "_CALL_LEN", + [_CALL_METHOD_DESCRIPTOR_FAST] = "_CALL_METHOD_DESCRIPTOR_FAST", + [_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = "_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS", + [_CALL_METHOD_DESCRIPTOR_NOARGS] = "_CALL_METHOD_DESCRIPTOR_NOARGS", + [_CALL_METHOD_DESCRIPTOR_O] = "_CALL_METHOD_DESCRIPTOR_O", + [_CALL_STR_1] = "_CALL_STR_1", + [_CALL_TUPLE_1] = "_CALL_TUPLE_1", + [_CALL_TYPE_1] = "_CALL_TYPE_1", + [_CHECK_ATTR_CLASS] = "_CHECK_ATTR_CLASS", + [_CHECK_ATTR_METHOD_LAZY_DICT] = "_CHECK_ATTR_METHOD_LAZY_DICT", + [_CHECK_ATTR_MODULE] = "_CHECK_ATTR_MODULE", + [_CHECK_ATTR_WITH_HINT] = "_CHECK_ATTR_WITH_HINT", + [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = "_CHECK_CALL_BOUND_METHOD_EXACT_ARGS", + [_CHECK_EG_MATCH] = "_CHECK_EG_MATCH", + [_CHECK_EXC_MATCH] = "_CHECK_EXC_MATCH", + [_CHECK_FUNCTION_EXACT_ARGS] = "_CHECK_FUNCTION_EXACT_ARGS", + [_CHECK_MANAGED_OBJECT_HAS_VALUES] = "_CHECK_MANAGED_OBJECT_HAS_VALUES", + [_CHECK_PEP_523] = "_CHECK_PEP_523", + [_CHECK_STACK_SPACE] = "_CHECK_STACK_SPACE", + [_CHECK_VALIDITY] = "_CHECK_VALIDITY", + [_COMPARE_OP] = "_COMPARE_OP", + [_COMPARE_OP_FLOAT] = "_COMPARE_OP_FLOAT", + [_COMPARE_OP_INT] = "_COMPARE_OP_INT", + [_COMPARE_OP_STR] = "_COMPARE_OP_STR", + [_CONTAINS_OP] = "_CONTAINS_OP", + [_CONVERT_VALUE] = "_CONVERT_VALUE", + [_COPY] = "_COPY", + [_COPY_FREE_VARS] = "_COPY_FREE_VARS", + [_DELETE_ATTR] = "_DELETE_ATTR", + [_DELETE_DEREF] = "_DELETE_DEREF", + [_DELETE_FAST] = "_DELETE_FAST", + [_DELETE_GLOBAL] = "_DELETE_GLOBAL", + [_DELETE_NAME] = "_DELETE_NAME", + [_DELETE_SUBSCR] = "_DELETE_SUBSCR", + [_DICT_MERGE] = "_DICT_MERGE", + [_DICT_UPDATE] = "_DICT_UPDATE", + [_END_SEND] = "_END_SEND", + [_EXIT_INIT_CHECK] = "_EXIT_INIT_CHECK", + [_EXIT_TRACE] = "_EXIT_TRACE", + [_FORMAT_SIMPLE] = "_FORMAT_SIMPLE", + [_FORMAT_WITH_SPEC] = "_FORMAT_WITH_SPEC", + [_FOR_ITER_TIER_TWO] = "_FOR_ITER_TIER_TWO", + [_GET_AITER] = "_GET_AITER", + [_GET_ANEXT] = "_GET_ANEXT", + [_GET_AWAITABLE] = "_GET_AWAITABLE", + [_GET_ITER] = "_GET_ITER", + [_GET_LEN] = "_GET_LEN", + [_GET_YIELD_FROM_ITER] = "_GET_YIELD_FROM_ITER", + [_GUARD_BOTH_FLOAT] = "_GUARD_BOTH_FLOAT", + [_GUARD_BOTH_INT] = "_GUARD_BOTH_INT", + [_GUARD_BOTH_UNICODE] = "_GUARD_BOTH_UNICODE", + [_GUARD_BUILTINS_VERSION] = "_GUARD_BUILTINS_VERSION", + [_GUARD_DORV_VALUES] = "_GUARD_DORV_VALUES", + [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = "_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT", + [_GUARD_GLOBALS_VERSION] = "_GUARD_GLOBALS_VERSION", + [_GUARD_IS_FALSE_POP] = "_GUARD_IS_FALSE_POP", + [_GUARD_IS_NONE_POP] = "_GUARD_IS_NONE_POP", + [_GUARD_IS_NOT_NONE_POP] = "_GUARD_IS_NOT_NONE_POP", + [_GUARD_IS_TRUE_POP] = "_GUARD_IS_TRUE_POP", + [_GUARD_KEYS_VERSION] = "_GUARD_KEYS_VERSION", + [_GUARD_NOT_EXHAUSTED_LIST] = "_GUARD_NOT_EXHAUSTED_LIST", + [_GUARD_NOT_EXHAUSTED_RANGE] = "_GUARD_NOT_EXHAUSTED_RANGE", + [_GUARD_NOT_EXHAUSTED_TUPLE] = "_GUARD_NOT_EXHAUSTED_TUPLE", + [_GUARD_TYPE_VERSION] = "_GUARD_TYPE_VERSION", + [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = "_INIT_CALL_BOUND_METHOD_EXACT_ARGS", + [_INIT_CALL_PY_EXACT_ARGS] = "_INIT_CALL_PY_EXACT_ARGS", + [_INSERT] = "_INSERT", + [_IS_NONE] = "_IS_NONE", + [_IS_OP] = "_IS_OP", + [_ITER_CHECK_LIST] = "_ITER_CHECK_LIST", + [_ITER_CHECK_RANGE] = "_ITER_CHECK_RANGE", + [_ITER_CHECK_TUPLE] = "_ITER_CHECK_TUPLE", + [_ITER_NEXT_LIST] = "_ITER_NEXT_LIST", + [_ITER_NEXT_RANGE] = "_ITER_NEXT_RANGE", + [_ITER_NEXT_TUPLE] = "_ITER_NEXT_TUPLE", + [_JUMP_TO_TOP] = "_JUMP_TO_TOP", + [_LIST_APPEND] = "_LIST_APPEND", + [_LIST_EXTEND] = "_LIST_EXTEND", + [_LOAD_ASSERTION_ERROR] = "_LOAD_ASSERTION_ERROR", + [_LOAD_ATTR] = "_LOAD_ATTR", + [_LOAD_ATTR_CLASS] = "_LOAD_ATTR_CLASS", + [_LOAD_ATTR_INSTANCE_VALUE] = "_LOAD_ATTR_INSTANCE_VALUE", + [_LOAD_ATTR_METHOD_LAZY_DICT] = "_LOAD_ATTR_METHOD_LAZY_DICT", + [_LOAD_ATTR_METHOD_NO_DICT] = "_LOAD_ATTR_METHOD_NO_DICT", + [_LOAD_ATTR_METHOD_WITH_VALUES] = "_LOAD_ATTR_METHOD_WITH_VALUES", + [_LOAD_ATTR_MODULE] = "_LOAD_ATTR_MODULE", + [_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = "_LOAD_ATTR_NONDESCRIPTOR_NO_DICT", + [_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = "_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", + [_LOAD_ATTR_SLOT] = "_LOAD_ATTR_SLOT", + [_LOAD_ATTR_WITH_HINT] = "_LOAD_ATTR_WITH_HINT", + [_LOAD_BUILD_CLASS] = "_LOAD_BUILD_CLASS", + [_LOAD_CONST] = "_LOAD_CONST", + [_LOAD_DEREF] = "_LOAD_DEREF", + [_LOAD_FAST] = "_LOAD_FAST", + [_LOAD_FAST_AND_CLEAR] = "_LOAD_FAST_AND_CLEAR", + [_LOAD_FAST_CHECK] = "_LOAD_FAST_CHECK", + [_LOAD_FAST_LOAD_FAST] = "_LOAD_FAST_LOAD_FAST", + [_LOAD_FROM_DICT_OR_DEREF] = "_LOAD_FROM_DICT_OR_DEREF", + [_LOAD_FROM_DICT_OR_GLOBALS] = "_LOAD_FROM_DICT_OR_GLOBALS", + [_LOAD_GLOBAL] = "_LOAD_GLOBAL", + [_LOAD_GLOBAL_BUILTINS] = "_LOAD_GLOBAL_BUILTINS", + [_LOAD_GLOBAL_MODULE] = "_LOAD_GLOBAL_MODULE", + [_LOAD_LOCALS] = "_LOAD_LOCALS", + [_LOAD_NAME] = "_LOAD_NAME", + [_LOAD_SUPER_ATTR_ATTR] = "_LOAD_SUPER_ATTR_ATTR", + [_LOAD_SUPER_ATTR_METHOD] = "_LOAD_SUPER_ATTR_METHOD", + [_MAKE_CELL] = "_MAKE_CELL", + [_MAKE_FUNCTION] = "_MAKE_FUNCTION", + [_MAP_ADD] = "_MAP_ADD", + [_MATCH_CLASS] = "_MATCH_CLASS", + [_MATCH_KEYS] = "_MATCH_KEYS", + [_MATCH_MAPPING] = "_MATCH_MAPPING", + [_MATCH_SEQUENCE] = "_MATCH_SEQUENCE", + [_NOP] = "_NOP", + [_POP_EXCEPT] = "_POP_EXCEPT", + [_POP_FRAME] = "_POP_FRAME", + [_POP_TOP] = "_POP_TOP", + [_PUSH_EXC_INFO] = "_PUSH_EXC_INFO", + [_PUSH_FRAME] = "_PUSH_FRAME", + [_PUSH_NULL] = "_PUSH_NULL", + [_RESUME_CHECK] = "_RESUME_CHECK", + [_SAVE_RETURN_OFFSET] = "_SAVE_RETURN_OFFSET", + [_SETUP_ANNOTATIONS] = "_SETUP_ANNOTATIONS", + [_SET_ADD] = "_SET_ADD", + [_SET_FUNCTION_ATTRIBUTE] = "_SET_FUNCTION_ATTRIBUTE", + [_SET_IP] = "_SET_IP", + [_SET_UPDATE] = "_SET_UPDATE", + [_STORE_ATTR] = "_STORE_ATTR", + [_STORE_ATTR_INSTANCE_VALUE] = "_STORE_ATTR_INSTANCE_VALUE", + [_STORE_ATTR_SLOT] = "_STORE_ATTR_SLOT", + [_STORE_DEREF] = "_STORE_DEREF", + [_STORE_FAST] = "_STORE_FAST", + [_STORE_FAST_LOAD_FAST] = "_STORE_FAST_LOAD_FAST", + [_STORE_FAST_STORE_FAST] = "_STORE_FAST_STORE_FAST", + [_STORE_GLOBAL] = "_STORE_GLOBAL", + [_STORE_NAME] = "_STORE_NAME", + [_STORE_SLICE] = "_STORE_SLICE", + [_STORE_SUBSCR] = "_STORE_SUBSCR", + [_STORE_SUBSCR_DICT] = "_STORE_SUBSCR_DICT", + [_STORE_SUBSCR_LIST_INT] = "_STORE_SUBSCR_LIST_INT", + [_SWAP] = "_SWAP", + [_TO_BOOL] = "_TO_BOOL", + [_TO_BOOL_ALWAYS_TRUE] = "_TO_BOOL_ALWAYS_TRUE", + [_TO_BOOL_BOOL] = "_TO_BOOL_BOOL", + [_TO_BOOL_INT] = "_TO_BOOL_INT", + [_TO_BOOL_LIST] = "_TO_BOOL_LIST", + [_TO_BOOL_NONE] = "_TO_BOOL_NONE", + [_TO_BOOL_STR] = "_TO_BOOL_STR", + [_UNARY_INVERT] = "_UNARY_INVERT", + [_UNARY_NEGATIVE] = "_UNARY_NEGATIVE", + [_UNARY_NOT] = "_UNARY_NOT", + [_UNPACK_EX] = "_UNPACK_EX", + [_UNPACK_SEQUENCE] = "_UNPACK_SEQUENCE", + [_UNPACK_SEQUENCE_LIST] = "_UNPACK_SEQUENCE_LIST", + [_UNPACK_SEQUENCE_TUPLE] = "_UNPACK_SEQUENCE_TUPLE", + [_UNPACK_SEQUENCE_TWO_TUPLE] = "_UNPACK_SEQUENCE_TWO_TUPLE", + [_WITH_EXCEPT_START] = "_WITH_EXCEPT_START", +}; +#endif // NEED_OPCODE_METADATA + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_CORE_UOP_METADATA_H */ diff --git a/Include/opcode_ids.h b/Include/opcode_ids.h index e2e27ca00fd47b..fe969342ee79e7 100644 --- a/Include/opcode_ids.h +++ b/Include/opcode_ids.h @@ -231,7 +231,7 @@ extern "C" { #define SETUP_WITH 266 #define STORE_FAST_MAYBE_NULL 267 -#define HAVE_ARGUMENT 45 +#define HAVE_ARGUMENT 44 #define MIN_INSTRUMENTED_OPCODE 236 #ifdef __cplusplus diff --git a/Lib/_opcode_metadata.py b/Lib/_opcode_metadata.py index 5dd06ae487dfcf..fdb099bd0c2ecf 100644 --- a/Lib/_opcode_metadata.py +++ b/Lib/_opcode_metadata.py @@ -1,8 +1,7 @@ -# This file is generated by Tools/cases_generator/generate_cases.py +# This file is generated by Tools/cases_generator/py_metadata_generator.py # from: # Python/bytecodes.c # Do not edit! - _specializations = { "RESUME": [ "RESUME_CHECK", @@ -23,6 +22,7 @@ "BINARY_OP_ADD_FLOAT", "BINARY_OP_SUBTRACT_FLOAT", "BINARY_OP_ADD_UNICODE", + "BINARY_OP_INPLACE_ADD_UNICODE", ], "BINARY_SUBSCR": [ "BINARY_SUBSCR_DICT", @@ -103,14 +103,11 @@ ], } -# An irregular case: -_specializations["BINARY_OP"].append("BINARY_OP_INPLACE_ADD_UNICODE") - _specialized_opmap = { - 'BINARY_OP_INPLACE_ADD_UNICODE': 3, 'BINARY_OP_ADD_FLOAT': 150, 'BINARY_OP_ADD_INT': 151, 'BINARY_OP_ADD_UNICODE': 152, + 'BINARY_OP_INPLACE_ADD_UNICODE': 3, 'BINARY_OP_MULTIPLY_FLOAT': 153, 'BINARY_OP_MULTIPLY_INT': 154, 'BINARY_OP_SUBTRACT_FLOAT': 155, @@ -181,6 +178,9 @@ opmap = { 'CACHE': 0, + 'RESERVED': 17, + 'RESUME': 149, + 'INSTRUMENTED_LINE': 254, 'BEFORE_ASYNC_WITH': 1, 'BEFORE_WITH': 2, 'BINARY_SLICE': 4, @@ -196,7 +196,6 @@ 'FORMAT_SIMPLE': 14, 'FORMAT_WITH_SPEC': 15, 'GET_AITER': 16, - 'RESERVED': 17, 'GET_ANEXT': 18, 'GET_ITER': 19, 'GET_LEN': 20, @@ -298,7 +297,6 @@ 'UNPACK_EX': 116, 'UNPACK_SEQUENCE': 117, 'YIELD_VALUE': 118, - 'RESUME': 149, 'INSTRUMENTED_RESUME': 236, 'INSTRUMENTED_END_FOR': 237, 'INSTRUMENTED_END_SEND': 238, @@ -317,7 +315,6 @@ 'INSTRUMENTED_POP_JUMP_IF_FALSE': 251, 'INSTRUMENTED_POP_JUMP_IF_NONE': 252, 'INSTRUMENTED_POP_JUMP_IF_NOT_NONE': 253, - 'INSTRUMENTED_LINE': 254, 'JUMP': 256, 'JUMP_NO_INTERRUPT': 257, 'LOAD_CLOSURE': 258, @@ -331,5 +328,6 @@ 'SETUP_WITH': 266, 'STORE_FAST_MAYBE_NULL': 267, } + +HAVE_ARGUMENT = 44 MIN_INSTRUMENTED_OPCODE = 236 -HAVE_ARGUMENT = 45 diff --git a/Lib/asyncio/base_subprocess.py b/Lib/asyncio/base_subprocess.py index 4c9b0dd5653c0c..6dbde2b696ad1f 100644 --- a/Lib/asyncio/base_subprocess.py +++ b/Lib/asyncio/base_subprocess.py @@ -115,7 +115,8 @@ def close(self): try: self._proc.kill() - except ProcessLookupError: + except (ProcessLookupError, PermissionError): + # the process may have already exited or may be running setuid pass # Don't clear the _proc reference yet: _post_init() may still run diff --git a/Lib/asyncio/sslproto.py b/Lib/asyncio/sslproto.py index 3eb65a8a08b5a0..cbb6527d0b28e0 100644 --- a/Lib/asyncio/sslproto.py +++ b/Lib/asyncio/sslproto.py @@ -243,13 +243,12 @@ def abort(self): The protocol's connection_lost() method will (eventually) be called with None as its argument. """ - self._closed = True - if self._ssl_protocol is not None: - self._ssl_protocol._abort() + self._force_close(None) def _force_close(self, exc): self._closed = True - self._ssl_protocol._abort(exc) + if self._ssl_protocol is not None: + self._ssl_protocol._abort(exc) def _test__append_write_backlog(self, data): # for test only @@ -614,7 +613,7 @@ def _start_shutdown(self): if self._app_transport is not None: self._app_transport._closed = True if self._state == SSLProtocolState.DO_HANDSHAKE: - self._abort() + self._abort(None) else: self._set_state(SSLProtocolState.FLUSHING) self._shutdown_timeout_handle = self._loop.call_later( @@ -661,10 +660,10 @@ def _on_shutdown_complete(self, shutdown_exc): else: self._loop.call_soon(self._transport.close) - def _abort(self): + def _abort(self, exc): self._set_state(SSLProtocolState.UNWRAPPED) if self._transport is not None: - self._transport.abort() + self._transport._force_close(exc) # Outgoing flow diff --git a/Lib/idlelib/idle_test/test_calltip.py b/Lib/idlelib/idle_test/test_calltip.py index 1ccb63b9dbd65f..15e1ff3f3cf717 100644 --- a/Lib/idlelib/idle_test/test_calltip.py +++ b/Lib/idlelib/idle_test/test_calltip.py @@ -7,6 +7,7 @@ import types import re from idlelib.idle_test.mock_tk import Text +from test.support import MISSING_C_DOCSTRINGS # Test Class TC is used in multiple get_argspec test methods @@ -50,6 +51,8 @@ class Get_argspecTest(unittest.TestCase): # but a red buildbot is better than a user crash (as has happened). # For a simple mismatch, change the expected output to the actual. + @unittest.skipIf(MISSING_C_DOCSTRINGS, + "Signature information for builtins requires docstrings") def test_builtins(self): def tiptest(obj, out): @@ -143,6 +146,8 @@ def f(): pass f.__doc__ = 'a'*300 self.assertEqual(get_spec(f), f"()\n{'a'*(calltip._MAX_COLS-3) + '...'}") + @unittest.skipIf(MISSING_C_DOCSTRINGS, + "Signature information for builtins requires docstrings") def test_multiline_docstring(self): # Test fewer lines than max. self.assertEqual(get_spec(range), @@ -157,6 +162,7 @@ def test_multiline_docstring(self): bytes(int) -> bytes object of size given by the parameter initialized with null bytes bytes() -> empty bytes object''') + def test_multiline_docstring_2(self): # Test more than max lines def f(): pass f.__doc__ = 'a\n' * 15 diff --git a/Lib/importlib/metadata/__init__.py b/Lib/importlib/metadata/__init__.py index 5c09666b6a40d9..7b142e786e829e 100644 --- a/Lib/importlib/metadata/__init__.py +++ b/Lib/importlib/metadata/__init__.py @@ -3,7 +3,10 @@ import abc import csv import sys +import json import email +import types +import inspect import pathlib import zipfile import operator @@ -13,7 +16,6 @@ import itertools import posixpath import collections -import inspect from . import _adapters, _meta from ._collections import FreezableDefaultDict, Pair @@ -25,8 +27,7 @@ from importlib import import_module from importlib.abc import MetaPathFinder from itertools import starmap -from typing import List, Mapping, Optional, cast - +from typing import Iterable, List, Mapping, Optional, Set, Union, cast __all__ = [ 'Distribution', @@ -47,11 +48,11 @@ class PackageNotFoundError(ModuleNotFoundError): """The package was not found.""" - def __str__(self): + def __str__(self) -> str: return f"No package metadata was found for {self.name}" @property - def name(self): + def name(self) -> str: # type: ignore[override] (name,) = self.args return name @@ -117,38 +118,11 @@ def read(text, filter_=None): yield Pair(name, value) @staticmethod - def valid(line): + def valid(line: str): return line and not line.startswith('#') -class DeprecatedTuple: - """ - Provide subscript item access for backward compatibility. - - >>> recwarn = getfixture('recwarn') - >>> ep = EntryPoint(name='name', value='value', group='group') - >>> ep[:] - ('name', 'value', 'group') - >>> ep[0] - 'name' - >>> len(recwarn) - 1 - """ - - # Do not remove prior to 2023-05-01 or Python 3.13 - _warn = functools.partial( - warnings.warn, - "EntryPoint tuple interface is deprecated. Access members by name.", - DeprecationWarning, - stacklevel=2, - ) - - def __getitem__(self, item): - self._warn() - return self._key()[item] - - -class EntryPoint(DeprecatedTuple): +class EntryPoint: """An entry point as defined by Python packaging conventions. See `the packaging docs on entry points @@ -192,7 +166,7 @@ class EntryPoint(DeprecatedTuple): dist: Optional['Distribution'] = None - def __init__(self, name, value, group): + def __init__(self, name: str, value: str, group: str) -> None: vars(self).update(name=name, value=value, group=group) def load(self): @@ -206,18 +180,21 @@ def load(self): return functools.reduce(getattr, attrs, module) @property - def module(self): + def module(self) -> str: match = self.pattern.match(self.value) + assert match is not None return match.group('module') @property - def attr(self): + def attr(self) -> str: match = self.pattern.match(self.value) + assert match is not None return match.group('attr') @property - def extras(self): + def extras(self) -> List[str]: match = self.pattern.match(self.value) + assert match is not None return re.findall(r'\w+', match.group('extras') or '') def _for(self, dist): @@ -265,7 +242,7 @@ def __repr__(self): f'group={self.group!r})' ) - def __hash__(self): + def __hash__(self) -> int: return hash(self._key()) @@ -276,7 +253,7 @@ class EntryPoints(tuple): __slots__ = () - def __getitem__(self, name): # -> EntryPoint: + def __getitem__(self, name: str) -> EntryPoint: # type: ignore[override] """ Get the EntryPoint in self matching name. """ @@ -285,6 +262,13 @@ def __getitem__(self, name): # -> EntryPoint: except StopIteration: raise KeyError(name) + def __repr__(self): + """ + Repr with classname and tuple constructor to + signal that we deviate from regular tuple behavior. + """ + return '%s(%r)' % (self.__class__.__name__, tuple(self)) + def select(self, **params): """ Select entry points from self that match the @@ -293,14 +277,14 @@ def select(self, **params): return EntryPoints(ep for ep in self if ep.matches(**params)) @property - def names(self): + def names(self) -> Set[str]: """ Return the set of all names of all entry points. """ return {ep.name for ep in self} @property - def groups(self): + def groups(self) -> Set[str]: """ Return the set of all groups of all entry points. """ @@ -321,24 +305,28 @@ def _from_text(text): class PackagePath(pathlib.PurePosixPath): """A reference to a path in a package""" - def read_text(self, encoding='utf-8'): + hash: Optional["FileHash"] + size: int + dist: "Distribution" + + def read_text(self, encoding: str = 'utf-8') -> str: # type: ignore[override] with self.locate().open(encoding=encoding) as stream: return stream.read() - def read_binary(self): + def read_binary(self) -> bytes: with self.locate().open('rb') as stream: return stream.read() - def locate(self): + def locate(self) -> pathlib.Path: """Return a path-like object for this path""" return self.dist.locate_file(self) class FileHash: - def __init__(self, spec): + def __init__(self, spec: str) -> None: self.mode, _, self.value = spec.partition('=') - def __repr__(self): + def __repr__(self) -> str: return f'' @@ -373,14 +361,14 @@ def read_text(self, filename) -> Optional[str]: """ @abc.abstractmethod - def locate_file(self, path): + def locate_file(self, path: Union[str, os.PathLike[str]]) -> pathlib.Path: """ Given a path to a file in this distribution, return a path to it. """ @classmethod - def from_name(cls, name: str): + def from_name(cls, name: str) -> "Distribution": """Return the Distribution for the given package name. :param name: The name of the distribution package to search for. @@ -393,12 +381,12 @@ def from_name(cls, name: str): if not name: raise ValueError("A distribution name is required.") try: - return next(cls.discover(name=name)) + return next(iter(cls.discover(name=name))) except StopIteration: raise PackageNotFoundError(name) @classmethod - def discover(cls, **kwargs): + def discover(cls, **kwargs) -> Iterable["Distribution"]: """Return an iterable of Distribution objects for all packages. Pass a ``context`` or pass keyword arguments for constructing @@ -416,7 +404,7 @@ def discover(cls, **kwargs): ) @staticmethod - def at(path): + def at(path: Union[str, os.PathLike[str]]) -> "Distribution": """Return a Distribution for the indicated metadata path :param path: a string or path-like object @@ -451,7 +439,7 @@ def metadata(self) -> _meta.PackageMetadata: return _adapters.Message(email.message_from_string(text)) @property - def name(self): + def name(self) -> str: """Return the 'Name' metadata for the distribution package.""" return self.metadata['Name'] @@ -461,16 +449,16 @@ def _normalized_name(self): return Prepared.normalize(self.name) @property - def version(self): + def version(self) -> str: """Return the 'Version' metadata for the distribution package.""" return self.metadata['Version'] @property - def entry_points(self): + def entry_points(self) -> EntryPoints: return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self) @property - def files(self): + def files(self) -> Optional[List[PackagePath]]: """Files in this distribution. :return: List of PackagePath for this distribution or None @@ -555,7 +543,7 @@ def _read_files_egginfo_sources(self): return text and map('"{}"'.format, text.splitlines()) @property - def requires(self): + def requires(self) -> Optional[List[str]]: """Generated requirements specified for this Distribution""" reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs() return reqs and list(reqs) @@ -606,6 +594,16 @@ def url_req_space(req): space = url_req_space(section.value) yield section.value + space + quoted_marker(section.name) + @property + def origin(self): + return self._load_json('direct_url.json') + + def _load_json(self, filename): + return pass_none(json.loads)( + self.read_text(filename), + object_hook=lambda data: types.SimpleNamespace(**data), + ) + class DistributionFinder(MetaPathFinder): """ @@ -634,7 +632,7 @@ def __init__(self, **kwargs): vars(self).update(kwargs) @property - def path(self): + def path(self) -> List[str]: """ The sequence of directory path that a distribution finder should search. @@ -645,7 +643,7 @@ def path(self): return vars(self).get('path', sys.path) @abc.abstractmethod - def find_distributions(self, context=Context()): + def find_distributions(self, context=Context()) -> Iterable[Distribution]: """ Find distributions. @@ -774,7 +772,9 @@ def __bool__(self): class MetadataPathFinder(DistributionFinder): @classmethod - def find_distributions(cls, context=DistributionFinder.Context()): + def find_distributions( + cls, context=DistributionFinder.Context() + ) -> Iterable["PathDistribution"]: """ Find distributions. @@ -794,19 +794,19 @@ def _search_paths(cls, name, paths): path.search(prepared) for path in map(FastPath, paths) ) - def invalidate_caches(cls): + def invalidate_caches(cls) -> None: FastPath.__new__.cache_clear() class PathDistribution(Distribution): - def __init__(self, path: SimplePath): + def __init__(self, path: SimplePath) -> None: """Construct a distribution. :param path: SimplePath indicating the metadata directory. """ self._path = path - def read_text(self, filename): + def read_text(self, filename: Union[str, os.PathLike[str]]) -> Optional[str]: with suppress( FileNotFoundError, IsADirectoryError, @@ -816,9 +816,11 @@ def read_text(self, filename): ): return self._path.joinpath(filename).read_text(encoding='utf-8') + return None + read_text.__doc__ = Distribution.read_text.__doc__ - def locate_file(self, path): + def locate_file(self, path: Union[str, os.PathLike[str]]) -> pathlib.Path: return self._path.parent / path @property @@ -851,7 +853,7 @@ def _name_from_stem(stem): return name -def distribution(distribution_name): +def distribution(distribution_name: str) -> Distribution: """Get the ``Distribution`` instance for the named package. :param distribution_name: The name of the distribution package as a string. @@ -860,7 +862,7 @@ def distribution(distribution_name): return Distribution.from_name(distribution_name) -def distributions(**kwargs): +def distributions(**kwargs) -> Iterable[Distribution]: """Get all ``Distribution`` instances in the current environment. :return: An iterable of ``Distribution`` instances. @@ -868,7 +870,7 @@ def distributions(**kwargs): return Distribution.discover(**kwargs) -def metadata(distribution_name) -> _meta.PackageMetadata: +def metadata(distribution_name: str) -> _meta.PackageMetadata: """Get the metadata for the named package. :param distribution_name: The name of the distribution package to query. @@ -877,7 +879,7 @@ def metadata(distribution_name) -> _meta.PackageMetadata: return Distribution.from_name(distribution_name).metadata -def version(distribution_name): +def version(distribution_name: str) -> str: """Get the version string for the named package. :param distribution_name: The name of the distribution package to query. @@ -911,7 +913,7 @@ def entry_points(**params) -> EntryPoints: return EntryPoints(eps).select(**params) -def files(distribution_name): +def files(distribution_name: str) -> Optional[List[PackagePath]]: """Return a list of files for the named package. :param distribution_name: The name of the distribution package to query. @@ -920,11 +922,11 @@ def files(distribution_name): return distribution(distribution_name).files -def requires(distribution_name): +def requires(distribution_name: str) -> Optional[List[str]]: """ Return a list of requirements for the named package. - :return: An iterator of requirements, suitable for + :return: An iterable of requirements, suitable for packaging.requirement.Requirement. """ return distribution(distribution_name).requires @@ -951,13 +953,42 @@ def _top_level_declared(dist): return (dist.read_text('top_level.txt') or '').split() +def _topmost(name: PackagePath) -> Optional[str]: + """ + Return the top-most parent as long as there is a parent. + """ + top, *rest = name.parts + return top if rest else None + + +def _get_toplevel_name(name: PackagePath) -> str: + """ + Infer a possibly importable module name from a name presumed on + sys.path. + + >>> _get_toplevel_name(PackagePath('foo.py')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo.pyc')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo/__init__.py')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo.pth')) + 'foo.pth' + >>> _get_toplevel_name(PackagePath('foo.dist-info')) + 'foo.dist-info' + """ + return _topmost(name) or ( + # python/typeshed#10328 + inspect.getmodulename(name) # type: ignore + or str(name) + ) + + def _top_level_inferred(dist): - opt_names = { - f.parts[0] if len(f.parts) > 1 else inspect.getmodulename(f) - for f in always_iterable(dist.files) - } + opt_names = set(map(_get_toplevel_name, always_iterable(dist.files))) - @pass_none def importable_name(name): return '.' not in name diff --git a/Lib/importlib/metadata/_adapters.py b/Lib/importlib/metadata/_adapters.py index 6aed69a30857e4..591168808953ba 100644 --- a/Lib/importlib/metadata/_adapters.py +++ b/Lib/importlib/metadata/_adapters.py @@ -53,7 +53,7 @@ def __iter__(self): def __getitem__(self, item): """ Warn users that a ``KeyError`` can be expected when a - mising key is supplied. Ref python/importlib_metadata#371. + missing key is supplied. Ref python/importlib_metadata#371. """ res = super().__getitem__(item) if res is None: diff --git a/Lib/importlib/metadata/_meta.py b/Lib/importlib/metadata/_meta.py index c9a7ef906a8a8c..f670016de7fef2 100644 --- a/Lib/importlib/metadata/_meta.py +++ b/Lib/importlib/metadata/_meta.py @@ -49,7 +49,7 @@ class SimplePath(Protocol[_T]): A minimal subset of pathlib.Path required by PathDistribution. """ - def joinpath(self) -> _T: + def joinpath(self, other: Union[str, _T]) -> _T: ... # pragma: no cover def __truediv__(self, other: Union[str, _T]) -> _T: diff --git a/Lib/importlib/metadata/diagnose.py b/Lib/importlib/metadata/diagnose.py new file mode 100644 index 00000000000000..e405471ac4d943 --- /dev/null +++ b/Lib/importlib/metadata/diagnose.py @@ -0,0 +1,21 @@ +import sys + +from . import Distribution + + +def inspect(path): + print("Inspecting", path) + dists = list(Distribution.discover(path=[path])) + if not dists: + return + print("Found", len(dists), "packages:", end=' ') + print(', '.join(dist.name for dist in dists)) + + +def run(): + for path in sys.path: + inspect(path) + + +if __name__ == '__main__': + run() diff --git a/Lib/multiprocessing/util.py b/Lib/multiprocessing/util.py index 28c77df1c32ea8..32871850ddec8b 100644 --- a/Lib/multiprocessing/util.py +++ b/Lib/multiprocessing/util.py @@ -43,19 +43,19 @@ def sub_debug(msg, *args): if _logger: - _logger.log(SUBDEBUG, msg, *args) + _logger.log(SUBDEBUG, msg, *args, stacklevel=2) def debug(msg, *args): if _logger: - _logger.log(DEBUG, msg, *args) + _logger.log(DEBUG, msg, *args, stacklevel=2) def info(msg, *args): if _logger: - _logger.log(INFO, msg, *args) + _logger.log(INFO, msg, *args, stacklevel=2) def sub_warning(msg, *args): if _logger: - _logger.log(SUBWARNING, msg, *args) + _logger.log(SUBWARNING, msg, *args, stacklevel=2) def get_logger(): ''' diff --git a/Lib/os.py b/Lib/os.py index 8c4b93250918eb..7f38e14e7bdd96 100644 --- a/Lib/os.py +++ b/Lib/os.py @@ -131,6 +131,7 @@ def _add(str, fn): _set = set() _add("HAVE_FCHDIR", "chdir") _add("HAVE_FCHMOD", "chmod") + _add("MS_WINDOWS", "chmod") _add("HAVE_FCHOWN", "chown") _add("HAVE_FDOPENDIR", "listdir") _add("HAVE_FDOPENDIR", "scandir") diff --git a/Lib/pathlib/__init__.py b/Lib/pathlib/__init__.py index b020d2db350da8..bfd2a924979746 100644 --- a/Lib/pathlib/__init__.py +++ b/Lib/pathlib/__init__.py @@ -59,6 +59,7 @@ class PurePath(_abc.PurePathBase): # path. It's set when `__hash__()` is called for the first time. '_hash', ) + pathmod = os.path def __new__(cls, *args, **kwargs): """Construct a PurePath from one or several strings and or existing @@ -99,6 +100,9 @@ def __reduce__(self): # when pickling related paths. return (self.__class__, self.parts) + def __repr__(self): + return "{}({!r})".format(self.__class__.__name__, self.as_posix()) + def __fspath__(self): return str(self) diff --git a/Lib/pathlib/_abc.py b/Lib/pathlib/_abc.py index 4808d0e61f7038..43e2670c4d0258 100644 --- a/Lib/pathlib/_abc.py +++ b/Lib/pathlib/_abc.py @@ -1,7 +1,6 @@ import functools import io import ntpath -import os import posixpath import sys import warnings @@ -204,7 +203,7 @@ class PurePathBase: # work from occurring when `resolve()` calls `stat()` or `readlink()`. '_resolving', ) - pathmod = os.path + pathmod = posixpath def __init__(self, *paths): self._raw_paths = paths @@ -282,9 +281,6 @@ def as_posix(self): slashes.""" return str(self).replace(self.pathmod.sep, '/') - def __repr__(self): - return "{}({!r})".format(self.__class__.__name__, self.as_posix()) - @property def drive(self): """The drive prefix (letter or UNC path), if any.""" diff --git a/Lib/pickle.py b/Lib/pickle.py index 4f5ad5b71e8899..988c0887341310 100644 --- a/Lib/pickle.py +++ b/Lib/pickle.py @@ -857,13 +857,13 @@ def save_str(self, obj): else: self.write(BINUNICODE + pack(" None: def run_cases_test(self, input: str, expected: str): with open(self.temp_input_filename, "w+") as temp_input: - temp_input.write(analysis.BEGIN_MARKER) + temp_input.write(parser.BEGIN_MARKER) temp_input.write(input) - temp_input.write(analysis.END_MARKER) + temp_input.write(parser.END_MARKER) temp_input.flush() with handle_stderr(): @@ -636,13 +621,13 @@ def test_cond_effect(self): PyObject *output = NULL; PyObject *zz; cc = stack_pointer[-1]; - if ((oparg & 1) == 1) { input = stack_pointer[-1 - ((((oparg & 1) == 1) ? 1 : 0))]; } - aa = stack_pointer[-2 - ((((oparg & 1) == 1) ? 1 : 0))]; + if ((oparg & 1) == 1) { input = stack_pointer[-1 - (((oparg & 1) == 1) ? 1 : 0)]; } + aa = stack_pointer[-2 - (((oparg & 1) == 1) ? 1 : 0)]; output = spam(oparg, input); - stack_pointer[-2 - ((((oparg & 1) == 1) ? 1 : 0))] = xx; - if (oparg & 2) stack_pointer[-1 - ((((oparg & 1) == 1) ? 1 : 0))] = output; - stack_pointer[-1 - ((((oparg & 1) == 1) ? 1 : 0)) + (((oparg & 2) ? 1 : 0))] = zz; - stack_pointer += -((((oparg & 1) == 1) ? 1 : 0)) + (((oparg & 2) ? 1 : 0)); + stack_pointer[-2 - (((oparg & 1) == 1) ? 1 : 0)] = xx; + if (oparg & 2) stack_pointer[-1 - (((oparg & 1) == 1) ? 1 : 0)] = output; + stack_pointer[-1 - (((oparg & 1) == 1) ? 1 : 0) + ((oparg & 2) ? 1 : 0)] = zz; + stack_pointer += -(((oparg & 1) == 1) ? 1 : 0) + ((oparg & 2) ? 1 : 0); DISPATCH(); } """ @@ -682,8 +667,8 @@ def test_macro_cond_effect(self): } stack_pointer[-3] = deep; if (oparg) stack_pointer[-2] = extra; - stack_pointer[-2 + (((oparg) ? 1 : 0))] = res; - stack_pointer += -1 + (((oparg) ? 1 : 0)); + stack_pointer[-2 + ((oparg) ? 1 : 0)] = res; + stack_pointer += -1 + ((oparg) ? 1 : 0); DISPATCH(); } """ diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index 48c0a43f29e27f..7b0126226c4aba 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -1632,6 +1632,14 @@ def test_circular_from_import(self): str(cm.exception), ) + def test_circular_import(self): + with self.assertRaisesRegex( + AttributeError, + r"partially initialized module 'test.test_import.data.circular_imports.import_cycle' " + r"from '.*' has no attribute 'some_attribute' \(most likely due to a circular import\)" + ): + import test.test_import.data.circular_imports.import_cycle + def test_absolute_circular_submodule(self): with self.assertRaises(AttributeError) as cm: import test.test_import.data.circular_imports.subpkg2.parent diff --git a/Lib/test/test_import/data/circular_imports/import_cycle.py b/Lib/test/test_import/data/circular_imports/import_cycle.py new file mode 100644 index 00000000000000..cd9507b5f69e25 --- /dev/null +++ b/Lib/test/test_import/data/circular_imports/import_cycle.py @@ -0,0 +1,3 @@ +import test.test_import.data.circular_imports.import_cycle as m + +m.some_attribute diff --git a/Lib/test/test_importlib/_path.py b/Lib/test/test_importlib/_path.py index 71a704389b986e..25c799fa44cd55 100644 --- a/Lib/test/test_importlib/_path.py +++ b/Lib/test/test_importlib/_path.py @@ -1,17 +1,18 @@ -# from jaraco.path 3.5 +# from jaraco.path 3.7 import functools import pathlib -from typing import Dict, Union +from typing import Dict, Protocol, Union +from typing import runtime_checkable -try: - from typing import Protocol, runtime_checkable -except ImportError: # pragma: no cover - # Python 3.7 - from typing_extensions import Protocol, runtime_checkable # type: ignore + +class Symlink(str): + """ + A string indicating the target of a symlink. + """ -FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']] # type: ignore +FilesSpec = Dict[str, Union[str, bytes, Symlink, 'FilesSpec']] # type: ignore @runtime_checkable @@ -28,6 +29,9 @@ def write_text(self, content, **kwargs): def write_bytes(self, content): ... # pragma: no cover + def symlink_to(self, target): + ... # pragma: no cover + def _ensure_tree_maker(obj: Union[str, TreeMaker]) -> TreeMaker: return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj) # type: ignore @@ -51,12 +55,16 @@ def build( ... "__init__.py": "", ... }, ... "baz.py": "# Some code", - ... } + ... "bar.py": Symlink("baz.py"), + ... }, + ... "bing": Symlink("foo"), ... } >>> target = getfixture('tmp_path') >>> build(spec, target) >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8') '# Some code' + >>> target.joinpath('bing/bar.py').read_text(encoding='utf-8') + '# Some code' """ for name, contents in spec.items(): create(contents, _ensure_tree_maker(prefix) / name) @@ -79,8 +87,8 @@ def _(content: str, path): @create.register -def _(content: str, path): - path.write_text(content, encoding='utf-8') +def _(content: Symlink, path): + path.symlink_to(content) class Recording: @@ -107,3 +115,6 @@ def write_text(self, content, **kwargs): def mkdir(self, **kwargs): return + + def symlink_to(self, target): + pass diff --git a/Lib/test/test_importlib/extension/test_loader.py b/Lib/test/test_importlib/extension/test_loader.py index 64c8a5485106e3..84a0680e4ec653 100644 --- a/Lib/test/test_importlib/extension/test_loader.py +++ b/Lib/test/test_importlib/extension/test_loader.py @@ -9,6 +9,7 @@ import warnings import importlib.util import importlib +from test.support import MISSING_C_DOCSTRINGS class LoaderTests: @@ -373,7 +374,8 @@ def test_nonascii(self): with self.subTest(name): module = self.load_module_by_name(name) self.assertEqual(module.__name__, name) - self.assertEqual(module.__doc__, "Module named in %s" % lang) + if not MISSING_C_DOCSTRINGS: + self.assertEqual(module.__doc__, "Module named in %s" % lang) (Frozen_MultiPhaseExtensionModuleTests, diff --git a/Lib/test/test_importlib/fixtures.py b/Lib/test/test_importlib/fixtures.py index 73e5da2ba92279..8c973356b5660d 100644 --- a/Lib/test/test_importlib/fixtures.py +++ b/Lib/test/test_importlib/fixtures.py @@ -1,6 +1,7 @@ import os import sys import copy +import json import shutil import pathlib import tempfile @@ -86,7 +87,15 @@ def setUp(self): self.fixtures.enter_context(self.add_sys_path(self.site_dir)) -class DistInfoPkg(OnSysPath, SiteDir): +class SiteBuilder(SiteDir): + def setUp(self): + super().setUp() + for cls in self.__class__.mro(): + with contextlib.suppress(AttributeError): + build_files(cls.files, prefix=self.site_dir) + + +class DistInfoPkg(OnSysPath, SiteBuilder): files: FilesSpec = { "distinfo_pkg-1.0.0.dist-info": { "METADATA": """ @@ -113,10 +122,6 @@ def main(): """, } - def setUp(self): - super().setUp() - build_files(DistInfoPkg.files, self.site_dir) - def make_uppercase(self): """ Rewrite metadata with everything uppercase. @@ -128,7 +133,28 @@ def make_uppercase(self): build_files(files, self.site_dir) -class DistInfoPkgWithDot(OnSysPath, SiteDir): +class DistInfoPkgEditable(DistInfoPkg): + """ + Package with a PEP 660 direct_url.json. + """ + + some_hash = '524127ce937f7cb65665130c695abd18ca386f60bb29687efb976faa1596fdcc' + files: FilesSpec = { + 'distinfo_pkg-1.0.0.dist-info': { + 'direct_url.json': json.dumps( + { + "archive_info": { + "hash": f"sha256={some_hash}", + "hashes": {"sha256": f"{some_hash}"}, + }, + "url": "file:///path/to/distinfo_pkg-1.0.0.editable-py3-none-any.whl", + } + ) + }, + } + + +class DistInfoPkgWithDot(OnSysPath, SiteBuilder): files: FilesSpec = { "pkg_dot-1.0.0.dist-info": { "METADATA": """ @@ -138,12 +164,8 @@ class DistInfoPkgWithDot(OnSysPath, SiteDir): }, } - def setUp(self): - super().setUp() - build_files(DistInfoPkgWithDot.files, self.site_dir) - -class DistInfoPkgWithDotLegacy(OnSysPath, SiteDir): +class DistInfoPkgWithDotLegacy(OnSysPath, SiteBuilder): files: FilesSpec = { "pkg.dot-1.0.0.dist-info": { "METADATA": """ @@ -159,18 +181,12 @@ class DistInfoPkgWithDotLegacy(OnSysPath, SiteDir): }, } - def setUp(self): - super().setUp() - build_files(DistInfoPkgWithDotLegacy.files, self.site_dir) - -class DistInfoPkgOffPath(SiteDir): - def setUp(self): - super().setUp() - build_files(DistInfoPkg.files, self.site_dir) +class DistInfoPkgOffPath(SiteBuilder): + files = DistInfoPkg.files -class EggInfoPkg(OnSysPath, SiteDir): +class EggInfoPkg(OnSysPath, SiteBuilder): files: FilesSpec = { "egginfo_pkg.egg-info": { "PKG-INFO": """ @@ -205,12 +221,8 @@ def main(): """, } - def setUp(self): - super().setUp() - build_files(EggInfoPkg.files, prefix=self.site_dir) - -class EggInfoPkgPipInstalledNoToplevel(OnSysPath, SiteDir): +class EggInfoPkgPipInstalledNoToplevel(OnSysPath, SiteBuilder): files: FilesSpec = { "egg_with_module_pkg.egg-info": { "PKG-INFO": "Name: egg_with_module-pkg", @@ -240,12 +252,8 @@ def main(): """, } - def setUp(self): - super().setUp() - build_files(EggInfoPkgPipInstalledNoToplevel.files, prefix=self.site_dir) - -class EggInfoPkgPipInstalledNoModules(OnSysPath, SiteDir): +class EggInfoPkgPipInstalledNoModules(OnSysPath, SiteBuilder): files: FilesSpec = { "egg_with_no_modules_pkg.egg-info": { "PKG-INFO": "Name: egg_with_no_modules-pkg", @@ -270,12 +278,8 @@ class EggInfoPkgPipInstalledNoModules(OnSysPath, SiteDir): }, } - def setUp(self): - super().setUp() - build_files(EggInfoPkgPipInstalledNoModules.files, prefix=self.site_dir) - -class EggInfoPkgSourcesFallback(OnSysPath, SiteDir): +class EggInfoPkgSourcesFallback(OnSysPath, SiteBuilder): files: FilesSpec = { "sources_fallback_pkg.egg-info": { "PKG-INFO": "Name: sources_fallback-pkg", @@ -296,12 +300,8 @@ def main(): """, } - def setUp(self): - super().setUp() - build_files(EggInfoPkgSourcesFallback.files, prefix=self.site_dir) - -class EggInfoFile(OnSysPath, SiteDir): +class EggInfoFile(OnSysPath, SiteBuilder): files: FilesSpec = { "egginfo_file.egg-info": """ Metadata-Version: 1.0 @@ -317,10 +317,6 @@ class EggInfoFile(OnSysPath, SiteDir): """, } - def setUp(self): - super().setUp() - build_files(EggInfoFile.files, prefix=self.site_dir) - # dedent all text strings before writing orig = _path.create.registry[str] diff --git a/Lib/test/test_importlib/test_main.py b/Lib/test/test_importlib/test_main.py index 3b49227255eb58..1d3817151edf64 100644 --- a/Lib/test/test_importlib/test_main.py +++ b/Lib/test/test_importlib/test_main.py @@ -12,6 +12,7 @@ from . import fixtures from ._context import suppress +from ._path import Symlink from importlib.metadata import ( Distribution, EntryPoint, @@ -68,7 +69,7 @@ def test_abc_enforced(self): dict(name=''), ) def test_invalid_inputs_to_from_name(self, name): - with self.assertRaises(ValueError): + with self.assertRaises(Exception): Distribution.from_name(name) @@ -207,6 +208,20 @@ def test_invalid_usage(self): with self.assertRaises(ValueError): list(distributions(context='something', name='else')) + def test_interleaved_discovery(self): + """ + Ensure interleaved searches are safe. + + When the search is cached, it is possible for searches to be + interleaved, so make sure those use-cases are safe. + + Ref #293 + """ + dists = distributions() + next(dists) + version('egginfo-pkg') + next(dists) + class DirectoryTest(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): def test_egg_info(self): @@ -388,6 +403,27 @@ def test_packages_distributions_all_module_types(self): assert not any(name.endswith('.dist-info') for name in distributions) + def test_packages_distributions_symlinked_top_level(self) -> None: + """ + Distribution is resolvable from a simple top-level symlink in RECORD. + See #452. + """ + + files: fixtures.FilesSpec = { + "symlinked_pkg-1.0.0.dist-info": { + "METADATA": """ + Name: symlinked-pkg + Version: 1.0.0 + """, + "RECORD": "symlinked,,\n", + }, + ".symlink.target": {}, + "symlinked": Symlink(".symlink.target"), + } + + fixtures.build_files(files, self.site_dir) + assert packages_distributions()['symlinked'] == ['symlinked-pkg'] + class PackagesDistributionsEggTest( fixtures.EggInfoPkg, @@ -424,3 +460,10 @@ def import_names_from_package(package_name): # sources_fallback-pkg has one import ('sources_fallback') inferred from # SOURCES.txt (top_level.txt and installed-files.txt is missing) assert import_names_from_package('sources_fallback-pkg') == {'sources_fallback'} + + +class EditableDistributionTest(fixtures.DistInfoPkgEditable, unittest.TestCase): + def test_origin(self): + dist = Distribution.from_name('distinfo-pkg') + assert dist.origin.url.endswith('.whl') + assert dist.origin.archive_info.hashes.sha256 diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py index 09d50859970c99..4611f62b293ff9 100644 --- a/Lib/test/test_inspect/test_inspect.py +++ b/Lib/test/test_inspect/test_inspect.py @@ -3990,6 +3990,8 @@ def foo(a, *, b:1): pass foo_sig = MySignature.from_callable(foo) self.assertIsInstance(foo_sig, MySignature) + @unittest.skipIf(MISSING_C_DOCSTRINGS, + "Signature information for builtins requires docstrings") def test_signature_from_callable_class(self): # A regression test for a class inheriting its signature from `object`. class MySignature(inspect.Signature): pass @@ -4080,7 +4082,8 @@ def test_signature_eval_str(self): par('c', PORK, annotation="'MyClass'"), ))) - self.assertEqual(signature_func(isa.UnannotatedClass), sig()) + if not MISSING_C_DOCSTRINGS: + self.assertEqual(signature_func(isa.UnannotatedClass), sig()) self.assertEqual(signature_func(isa.unannotated_function), sig( parameters=( diff --git a/Lib/test/test_interpreters/__main__.py b/Lib/test/test_interpreters/__main__.py index 8641229877b2be..40a23a297ec2b4 100644 --- a/Lib/test/test_interpreters/__main__.py +++ b/Lib/test/test_interpreters/__main__.py @@ -1,4 +1,4 @@ from . import load_tests import unittest -nittest.main() +unittest.main() diff --git a/Lib/test/test_json/test_recursion.py b/Lib/test/test_json/test_recursion.py index 9919d7fbe54ef7..164ff2013eb552 100644 --- a/Lib/test/test_json/test_recursion.py +++ b/Lib/test/test_json/test_recursion.py @@ -85,10 +85,10 @@ def test_highly_nested_objects_encoding(self): for x in range(100000): l, d = [l], {'k':d} with self.assertRaises(RecursionError): - with support.infinite_recursion(): + with support.infinite_recursion(5000): self.dumps(l) with self.assertRaises(RecursionError): - with support.infinite_recursion(): + with support.infinite_recursion(5000): self.dumps(d) def test_endless_recursion(self): @@ -99,7 +99,7 @@ def default(self, o): return [o] with self.assertRaises(RecursionError): - with support.infinite_recursion(): + with support.infinite_recursion(1000): EndlessJSONEncoder(check_circular=False).encode(5j) diff --git a/Lib/test/test_module/__init__.py b/Lib/test/test_module/__init__.py index d49c44df4d839d..98d1cbe824df12 100644 --- a/Lib/test/test_module/__init__.py +++ b/Lib/test/test_module/__init__.py @@ -30,7 +30,7 @@ def test_uninitialized(self): self.fail("__name__ = %s" % repr(s)) except AttributeError: pass - self.assertEqual(foo.__doc__, ModuleType.__doc__) + self.assertEqual(foo.__doc__, ModuleType.__doc__ or '') def test_uninitialized_missing_getattr(self): # Issue 8297 diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py index 00cfdd37e568a6..db5f3b2634be97 100644 --- a/Lib/test/test_pathlib/test_pathlib.py +++ b/Lib/test/test_pathlib/test_pathlib.py @@ -2,8 +2,10 @@ import os import sys import errno +import ntpath import pathlib import pickle +import posixpath import socket import stat import tempfile @@ -39,6 +41,50 @@ class PurePathTest(test_pathlib_abc.DummyPurePathTest): cls = pathlib.PurePath + # Make sure any symbolic links in the base test path are resolved. + base = os.path.realpath(TESTFN) + + def test_concrete_class(self): + if self.cls is pathlib.PurePath: + expected = pathlib.PureWindowsPath if os.name == 'nt' else pathlib.PurePosixPath + else: + expected = self.cls + p = self.cls('a') + self.assertIs(type(p), expected) + + def test_concrete_pathmod(self): + if self.cls is pathlib.PurePosixPath: + expected = posixpath + elif self.cls is pathlib.PureWindowsPath: + expected = ntpath + else: + expected = os.path + p = self.cls('a') + self.assertIs(p.pathmod, expected) + + def test_different_pathmods_unequal(self): + p = self.cls('a') + if p.pathmod is posixpath: + q = pathlib.PureWindowsPath('a') + else: + q = pathlib.PurePosixPath('a') + self.assertNotEqual(p, q) + + def test_different_pathmods_unordered(self): + p = self.cls('a') + if p.pathmod is posixpath: + q = pathlib.PureWindowsPath('a') + else: + q = pathlib.PurePosixPath('a') + with self.assertRaises(TypeError): + p < q + with self.assertRaises(TypeError): + p <= q + with self.assertRaises(TypeError): + p > q + with self.assertRaises(TypeError): + p >= q + def test_constructor_nested(self): P = self.cls P(FakePath("a/b/c")) @@ -60,14 +106,28 @@ def test_div_nested(self): def test_pickling_common(self): P = self.cls - p = P('/a/b') - for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): - dumped = pickle.dumps(p, proto) - pp = pickle.loads(dumped) - self.assertIs(pp.__class__, p.__class__) - self.assertEqual(pp, p) - self.assertEqual(hash(pp), hash(p)) - self.assertEqual(str(pp), str(p)) + for pathstr in ('a', 'a/', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c', 'a/b/c/'): + with self.subTest(pathstr=pathstr): + p = P(pathstr) + for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): + dumped = pickle.dumps(p, proto) + pp = pickle.loads(dumped) + self.assertIs(pp.__class__, p.__class__) + self.assertEqual(pp, p) + self.assertEqual(hash(pp), hash(p)) + self.assertEqual(str(pp), str(p)) + + def test_repr_common(self): + for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): + with self.subTest(pathstr=pathstr): + p = self.cls(pathstr) + clsname = p.__class__.__name__ + r = repr(p) + # The repr() is in the form ClassName("forward-slashes path"). + self.assertTrue(r.startswith(clsname + '('), r) + self.assertTrue(r.endswith(')'), r) + inner = r[len(clsname) + 1 : -1] + self.assertEqual(eval(inner), p.as_posix()) def test_fspath_common(self): P = self.cls @@ -944,6 +1004,19 @@ def tempdir(self): self.addCleanup(os_helper.rmtree, d) return d + def test_matches_pathbase_api(self): + our_names = {name for name in dir(self.cls) if name[0] != '_'} + path_names = {name for name in dir(pathlib._abc.PathBase) if name[0] != '_'} + self.assertEqual(our_names, path_names) + for attr_name in our_names: + if attr_name == 'pathmod': + # On Windows, Path.pathmod is ntpath, but PathBase.pathmod is + # posixpath, and so their docstrings differ. + continue + our_attr = getattr(self.cls, attr_name) + path_attr = getattr(pathlib._abc.PathBase, attr_name) + self.assertEqual(our_attr.__doc__, path_attr.__doc__) + def test_concrete_class(self): if self.cls is pathlib.Path: expected = pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath diff --git a/Lib/test/test_pathlib/test_pathlib_abc.py b/Lib/test/test_pathlib/test_pathlib_abc.py index a272973d9c1d61..568a3183b40b8d 100644 --- a/Lib/test/test_pathlib/test_pathlib_abc.py +++ b/Lib/test/test_pathlib/test_pathlib_abc.py @@ -2,20 +2,20 @@ import io import os import errno -import pathlib -import pickle -import posixpath import stat import unittest +from pathlib._abc import UnsupportedOperation, PurePathBase, PathBase +import posixpath + from test.support import set_recursion_limit from test.support.os_helper import TESTFN class UnsupportedOperationTest(unittest.TestCase): def test_is_notimplemented(self): - self.assertTrue(issubclass(pathlib.UnsupportedOperation, NotImplementedError)) - self.assertTrue(isinstance(pathlib.UnsupportedOperation(), NotImplementedError)) + self.assertTrue(issubclass(UnsupportedOperation, NotImplementedError)) + self.assertTrue(isinstance(UnsupportedOperation(), NotImplementedError)) # @@ -24,13 +24,14 @@ def test_is_notimplemented(self): class PurePathBaseTest(unittest.TestCase): - cls = pathlib._abc.PurePathBase + cls = PurePathBase def test_magic_methods(self): P = self.cls self.assertFalse(hasattr(P, '__fspath__')) self.assertFalse(hasattr(P, '__bytes__')) self.assertIs(P.__reduce__, object.__reduce__) + self.assertIs(P.__repr__, object.__repr__) self.assertIs(P.__hash__, object.__hash__) self.assertIs(P.__eq__, object.__eq__) self.assertIs(P.__lt__, object.__lt__) @@ -38,8 +39,11 @@ def test_magic_methods(self): self.assertIs(P.__gt__, object.__gt__) self.assertIs(P.__ge__, object.__ge__) + def test_pathmod(self): + self.assertIs(self.cls.pathmod, posixpath) -class DummyPurePath(pathlib._abc.PurePathBase): + +class DummyPurePath(PurePathBase): def __eq__(self, other): if not isinstance(other, DummyPurePath): return NotImplemented @@ -52,8 +56,8 @@ def __hash__(self): class DummyPurePathTest(unittest.TestCase): cls = DummyPurePath - # Make sure any symbolic links in the base test path are resolved. - base = os.path.realpath(TESTFN) + # Use a base path that's unrelated to any real filesystem path. + base = f'/this/path/kills/fascists/{TESTFN}' # Keys are canonical paths, values are list of tuples of arguments # supposed to produce equal paths. @@ -86,37 +90,6 @@ def test_constructor_common(self): P('a/b/c') P('/a/b/c') - def test_concrete_class(self): - if self.cls is pathlib.PurePath: - expected = pathlib.PureWindowsPath if os.name == 'nt' else pathlib.PurePosixPath - else: - expected = self.cls - p = self.cls('a') - self.assertIs(type(p), expected) - - def test_different_pathmods_unequal(self): - p = self.cls('a') - if p.pathmod is posixpath: - q = pathlib.PureWindowsPath('a') - else: - q = pathlib.PurePosixPath('a') - self.assertNotEqual(p, q) - - def test_different_pathmods_unordered(self): - p = self.cls('a') - if p.pathmod is posixpath: - q = pathlib.PureWindowsPath('a') - else: - q = pathlib.PurePosixPath('a') - with self.assertRaises(TypeError): - p < q - with self.assertRaises(TypeError): - p <= q - with self.assertRaises(TypeError): - p > q - with self.assertRaises(TypeError): - p >= q - def _check_str_subclass(self, *args): # Issue #21127: it should be possible to construct a PurePath object # from a str subclass instance, and it then gets converted to @@ -227,18 +200,6 @@ def test_as_posix_common(self): self.assertEqual(P(pathstr).as_posix(), pathstr) # Other tests for as_posix() are in test_equivalences(). - def test_repr_common(self): - for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): - with self.subTest(pathstr=pathstr): - p = self.cls(pathstr) - clsname = p.__class__.__name__ - r = repr(p) - # The repr() is in the form ClassName("forward-slashes path"). - self.assertTrue(r.startswith(clsname + '('), r) - self.assertTrue(r.endswith(')'), r) - inner = r[len(clsname) + 1 : -1] - self.assertEqual(eval(inner), p.as_posix()) - def test_eq_common(self): P = self.cls self.assertEqual(P('a/b'), P('a/b')) @@ -677,12 +638,12 @@ def test_is_relative_to_common(self): # class PathBaseTest(PurePathBaseTest): - cls = pathlib._abc.PathBase + cls = PathBase def test_unsupported_operation(self): P = self.cls p = self.cls() - e = pathlib.UnsupportedOperation + e = UnsupportedOperation self.assertRaises(e, p.stat) self.assertRaises(e, p.lstat) self.assertRaises(e, p.exists) @@ -724,7 +685,7 @@ def test_unsupported_operation(self): self.assertRaises(e, p.as_uri) def test_as_uri_common(self): - e = pathlib.UnsupportedOperation + e = UnsupportedOperation self.assertRaises(e, self.cls().as_uri) def test_fspath_common(self): @@ -733,15 +694,6 @@ def test_fspath_common(self): def test_as_bytes_common(self): self.assertRaises(TypeError, bytes, self.cls()) - def test_matches_path_api(self): - our_names = {name for name in dir(self.cls) if name[0] != '_'} - path_names = {name for name in dir(pathlib.Path) if name[0] != '_'} - self.assertEqual(our_names, path_names) - for attr_name in our_names: - our_attr = getattr(self.cls, attr_name) - path_attr = getattr(pathlib.Path, attr_name) - self.assertEqual(our_attr.__doc__, path_attr.__doc__) - class DummyPathIO(io.BytesIO): """ @@ -758,7 +710,7 @@ def close(self): super().close() -class DummyPath(pathlib._abc.PathBase): +class DummyPath(PathBase): """ Simple implementation of PathBase that keeps files and directories in memory. @@ -917,11 +869,13 @@ def assertFileNotFound(self, func, *args, **kwargs): self.assertEqual(cm.exception.errno, errno.ENOENT) def assertEqualNormCase(self, path_a, path_b): - self.assertEqual(os.path.normcase(path_a), os.path.normcase(path_b)) + normcase = self.pathmod.normcase + self.assertEqual(normcase(path_a), normcase(path_b)) def test_samefile(self): - fileA_path = os.path.join(self.base, 'fileA') - fileB_path = os.path.join(self.base, 'dirB', 'fileB') + pathmod = self.pathmod + fileA_path = pathmod.join(self.base, 'fileA') + fileB_path = pathmod.join(self.base, 'dirB', 'fileB') p = self.cls(fileA_path) pp = self.cls(fileA_path) q = self.cls(fileB_path) @@ -930,7 +884,7 @@ def test_samefile(self): self.assertFalse(p.samefile(fileB_path)) self.assertFalse(p.samefile(q)) # Test the non-existent file case - non_existent = os.path.join(self.base, 'foo') + non_existent = pathmod.join(self.base, 'foo') r = self.cls(non_existent) self.assertRaises(FileNotFoundError, p.samefile, r) self.assertRaises(FileNotFoundError, p.samefile, non_existent) @@ -1372,7 +1326,7 @@ def test_readlink(self): def test_readlink_unsupported(self): P = self.cls(self.base) p = P / 'fileA' - with self.assertRaises(pathlib.UnsupportedOperation): + with self.assertRaises(UnsupportedOperation): q.readlink(p) def _check_resolve(self, p, expected, strict=True): @@ -1391,14 +1345,15 @@ def test_resolve_common(self): p.resolve(strict=True) self.assertEqual(cm.exception.errno, errno.ENOENT) # Non-strict + pathmod = self.pathmod self.assertEqualNormCase(str(p.resolve(strict=False)), - os.path.join(self.base, 'foo')) + pathmod.join(self.base, 'foo')) p = P(self.base, 'foo', 'in', 'spam') self.assertEqualNormCase(str(p.resolve(strict=False)), - os.path.join(self.base, 'foo', 'in', 'spam')) + pathmod.join(self.base, 'foo', 'in', 'spam')) p = P(self.base, '..', 'foo', 'in', 'spam') self.assertEqualNormCase(str(p.resolve(strict=False)), - os.path.abspath(os.path.join('foo', 'in', 'spam'))) + pathmod.join(pathmod.dirname(self.base), 'foo', 'in', 'spam')) # These are all relative symlinks. p = P(self.base, 'dirB', 'fileB') self._check_resolve_relative(p, p) @@ -1413,7 +1368,7 @@ def test_resolve_common(self): self._check_resolve_relative(p, P(self.base, 'dirB', 'fileB', 'foo', 'in', 'spam'), False) p = P(self.base, 'dirA', 'linkC', '..', 'foo', 'in', 'spam') - if os.name == 'nt' and isinstance(p, pathlib.Path): + if self.cls.pathmod is not posixpath: # In Windows, if linkY points to dirB, 'dirA\linkY\..' # resolves to 'dirA' without resolving linkY first. self._check_resolve_relative(p, P(self.base, 'dirA', 'foo', 'in', @@ -1433,7 +1388,7 @@ def test_resolve_common(self): self._check_resolve_relative(p, P(self.base, 'dirB', 'foo', 'in', 'spam'), False) p = P(self.base, 'dirA', 'linkX', 'linkY', '..', 'foo', 'in', 'spam') - if os.name == 'nt' and isinstance(p, pathlib.Path): + if self.cls.pathmod is not posixpath: # In Windows, if linkY points to dirB, 'dirA\linkY\..' # resolves to 'dirA' without resolving linkY first. self._check_resolve_relative(p, P(d, 'foo', 'in', 'spam'), False) @@ -1446,10 +1401,11 @@ def test_resolve_dot(self): # See http://web.archive.org/web/20200623062557/https://bitbucket.org/pitrou/pathlib/issues/9/ if not self.can_symlink: self.skipTest("symlinks required") + pathmod = self.pathmod p = self.cls(self.base) p.joinpath('0').symlink_to('.', target_is_directory=True) - p.joinpath('1').symlink_to(os.path.join('0', '0'), target_is_directory=True) - p.joinpath('2').symlink_to(os.path.join('1', '1'), target_is_directory=True) + p.joinpath('1').symlink_to(pathmod.join('0', '0'), target_is_directory=True) + p.joinpath('2').symlink_to(pathmod.join('1', '1'), target_is_directory=True) q = p / '2' self.assertEqual(q.resolve(strict=True), p) r = q / '3' / '4' @@ -1466,7 +1422,7 @@ def _check_symlink_loop(self, *args): def test_resolve_loop(self): if not self.can_symlink: self.skipTest("symlinks required") - if os.name == 'nt' and issubclass(self.cls, pathlib.Path): + if self.cls.pathmod is not posixpath: self.skipTest("symlink loops work differently with concrete Windows paths") # Loops with relative symlinks. self.cls(self.base, 'linkX').symlink_to('linkX/inside') @@ -1655,13 +1611,6 @@ def test_is_char_device_false(self): self.assertIs((P / 'fileA\udfff').is_char_device(), False) self.assertIs((P / 'fileA\x00').is_char_device(), False) - def test_pickling_common(self): - p = self.cls(self.base, 'fileA') - for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): - dumped = pickle.dumps(p, proto) - pp = pickle.loads(dumped) - self.assertEqual(pp.stat(), p.stat()) - def test_parts_interning(self): P = self.cls p = P('/usr/bin/foo') @@ -1676,10 +1625,11 @@ def _check_complex_symlinks(self, link0_target): self.skipTest("symlinks required") # Test solving a non-looping chain of symlinks (issue #19887). + pathmod = self.pathmod P = self.cls(self.base) - P.joinpath('link1').symlink_to(os.path.join('link0', 'link0'), target_is_directory=True) - P.joinpath('link2').symlink_to(os.path.join('link1', 'link1'), target_is_directory=True) - P.joinpath('link3').symlink_to(os.path.join('link2', 'link2'), target_is_directory=True) + P.joinpath('link1').symlink_to(pathmod.join('link0', 'link0'), target_is_directory=True) + P.joinpath('link2').symlink_to(pathmod.join('link1', 'link1'), target_is_directory=True) + P.joinpath('link3').symlink_to(pathmod.join('link2', 'link2'), target_is_directory=True) P.joinpath('link0').symlink_to(link0_target, target_is_directory=True) # Resolve absolute paths. @@ -1699,7 +1649,7 @@ def _check_complex_symlinks(self, link0_target): # Resolve relative paths. try: self.cls().absolute() - except pathlib.UnsupportedOperation: + except UnsupportedOperation: return old_path = os.getcwd() os.chdir(self.base) @@ -1726,7 +1676,7 @@ def test_complex_symlinks_relative(self): self._check_complex_symlinks('.') def test_complex_symlinks_relative_dot_dot(self): - self._check_complex_symlinks(os.path.join('dirA', '..')) + self._check_complex_symlinks(self.pathmod.join('dirA', '..')) def setUpWalk(self): # Build: diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index 55cc5e4c6e4f03..9c382ace806e0f 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -936,6 +936,7 @@ def test_utime(self): posix.utime(os_helper.TESTFN, (now, now)) def check_chmod(self, chmod_func, target, **kwargs): + closefd = not isinstance(target, int) mode = os.stat(target).st_mode try: new_mode = mode & ~(stat.S_IWOTH | stat.S_IWGRP | stat.S_IWUSR) @@ -943,7 +944,7 @@ def check_chmod(self, chmod_func, target, **kwargs): self.assertEqual(os.stat(target).st_mode, new_mode) if stat.S_ISREG(mode): try: - with open(target, 'wb+'): + with open(target, 'wb+', closefd=closefd): pass except PermissionError: pass @@ -951,10 +952,10 @@ def check_chmod(self, chmod_func, target, **kwargs): chmod_func(target, new_mode, **kwargs) self.assertEqual(os.stat(target).st_mode, new_mode) if stat.S_ISREG(mode): - with open(target, 'wb+'): + with open(target, 'wb+', closefd=closefd): pass finally: - posix.chmod(target, mode) + chmod_func(target, mode) @os_helper.skip_unless_working_chmod def test_chmod_file(self): @@ -971,6 +972,12 @@ def test_chmod_dir(self): target = self.tempdir() self.check_chmod(posix.chmod, target) + @os_helper.skip_unless_working_chmod + def test_fchmod_file(self): + with open(os_helper.TESTFN, 'wb+') as f: + self.check_chmod(posix.fchmod, f.fileno()) + self.check_chmod(posix.chmod, f.fileno()) + @unittest.skipUnless(hasattr(posix, 'lchmod'), 'test needs os.lchmod()') def test_lchmod_file(self): self.check_chmod(posix.lchmod, os_helper.TESTFN) diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py index eb50510e12b7b6..982ee60c0be4f7 100644 --- a/Lib/test/test_pydoc.py +++ b/Lib/test/test_pydoc.py @@ -32,7 +32,7 @@ from test.support import threading_helper from test.support import (reap_children, captured_output, captured_stdout, captured_stderr, is_emscripten, is_wasi, - requires_docstrings) + requires_docstrings, MISSING_C_DOCSTRINGS) from test.support.os_helper import (TESTFN, rmtree, unlink) from test import pydoc_mod @@ -906,12 +906,13 @@ class A(builtins.object) | ---------------------------------------------------------------------- | Data descriptors defined here: | - | __dict__ - | dictionary for instance variables + | __dict__%s | - | __weakref__ - | list of weak references to the object -''' % __name__) + | __weakref__%s +''' % (__name__, + '' if MISSING_C_DOCSTRINGS else '\n | dictionary for instance variables', + '' if MISSING_C_DOCSTRINGS else '\n | list of weak references to the object', + )) def func( arg1: Callable[[Annotated[int, 'Some doc']], str], @@ -1154,13 +1155,15 @@ def test_generic_alias(self): doc = pydoc.render_doc(typing.List[int], renderer=pydoc.plaintext) self.assertIn('_GenericAlias in module typing', doc) self.assertIn('List = class list(object)', doc) - self.assertIn(list.__doc__.strip().splitlines()[0], doc) + if not MISSING_C_DOCSTRINGS: + self.assertIn(list.__doc__.strip().splitlines()[0], doc) self.assertEqual(pydoc.describe(list[int]), 'GenericAlias') doc = pydoc.render_doc(list[int], renderer=pydoc.plaintext) self.assertIn('GenericAlias in module builtins', doc) self.assertIn('\nclass list(object)', doc) - self.assertIn(list.__doc__.strip().splitlines()[0], doc) + if not MISSING_C_DOCSTRINGS: + self.assertIn(list.__doc__.strip().splitlines()[0], doc) def test_union_type(self): self.assertEqual(pydoc.describe(typing.Union[int, str]), '_UnionGenericAlias') @@ -1174,7 +1177,8 @@ def test_union_type(self): doc = pydoc.render_doc(int | str, renderer=pydoc.plaintext) self.assertIn('UnionType in module types object', doc) self.assertIn('\nclass UnionType(builtins.object)', doc) - self.assertIn(types.UnionType.__doc__.strip().splitlines()[0], doc) + if not MISSING_C_DOCSTRINGS: + self.assertIn(types.UnionType.__doc__.strip().splitlines()[0], doc) def test_special_form(self): self.assertEqual(pydoc.describe(typing.NoReturn), '_SpecialForm') @@ -1327,6 +1331,7 @@ def test_bound_builtin_classmethod_o(self): "__class_getitem__(object, /) method of builtins.type instance") @support.cpython_only + @requires_docstrings def test_module_level_callable_unrepresentable_default(self): import _testcapi builtin = _testcapi.func_with_unrepresentable_signature @@ -1334,6 +1339,7 @@ def test_module_level_callable_unrepresentable_default(self): "func_with_unrepresentable_signature(a, b=)") @support.cpython_only + @requires_docstrings def test_builtin_staticmethod_unrepresentable_default(self): self.assertEqual(self._get_summary_line(str.maketrans), "maketrans(x, y=, z=, /)") @@ -1343,6 +1349,7 @@ def test_builtin_staticmethod_unrepresentable_default(self): "staticmeth(a, b=)") @support.cpython_only + @requires_docstrings def test_unbound_builtin_method_unrepresentable_default(self): self.assertEqual(self._get_summary_line(dict.pop), "pop(self, key, default=, /)") @@ -1352,6 +1359,7 @@ def test_unbound_builtin_method_unrepresentable_default(self): "meth(self, /, a, b=)") @support.cpython_only + @requires_docstrings def test_bound_builtin_method_unrepresentable_default(self): self.assertEqual(self._get_summary_line({}.pop), "pop(key, default=, /) " @@ -1363,6 +1371,7 @@ def test_bound_builtin_method_unrepresentable_default(self): "method of _testcapi.DocStringUnrepresentableSignatureTest instance") @support.cpython_only + @requires_docstrings def test_unbound_builtin_classmethod_unrepresentable_default(self): import _testcapi cls = _testcapi.DocStringUnrepresentableSignatureTest @@ -1371,6 +1380,7 @@ def test_unbound_builtin_classmethod_unrepresentable_default(self): "classmeth(type, /, a, b=)") @support.cpython_only + @requires_docstrings def test_bound_builtin_classmethod_unrepresentable_default(self): import _testcapi cls = _testcapi.DocStringUnrepresentableSignatureTest diff --git a/Lib/test/test_rlcompleter.py b/Lib/test/test_rlcompleter.py index 7347fca71be2fe..273ce2cf5c7dd2 100644 --- a/Lib/test/test_rlcompleter.py +++ b/Lib/test/test_rlcompleter.py @@ -2,6 +2,7 @@ from unittest.mock import patch import builtins import rlcompleter +from test.support import MISSING_C_DOCSTRINGS class CompleteMe: """ Trivial class used in testing rlcompleter.Completer. """ @@ -40,12 +41,12 @@ def test_global_matches(self): # test with a customized namespace self.assertEqual(self.completer.global_matches('CompleteM'), - ['CompleteMe()']) + ['CompleteMe(' if MISSING_C_DOCSTRINGS else 'CompleteMe()']) self.assertEqual(self.completer.global_matches('eg'), ['egg(']) # XXX: see issue5256 self.assertEqual(self.completer.global_matches('CompleteM'), - ['CompleteMe()']) + ['CompleteMe(' if MISSING_C_DOCSTRINGS else 'CompleteMe()']) def test_attr_matches(self): # test with builtins namespace diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 5ce8e5d77fbbf3..cc5459aa08fe33 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -1101,19 +1101,18 @@ def test_copymode_follow_symlinks(self): shutil.copymode(src, dst) self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode) # On Windows, os.chmod does not follow symlinks (issue #15411) - if os.name != 'nt': - # follow src link - os.chmod(dst, stat.S_IRWXO) - shutil.copymode(src_link, dst) - self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode) - # follow dst link - os.chmod(dst, stat.S_IRWXO) - shutil.copymode(src, dst_link) - self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode) - # follow both links - os.chmod(dst, stat.S_IRWXO) - shutil.copymode(src_link, dst_link) - self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode) + # follow src link + os.chmod(dst, stat.S_IRWXO) + shutil.copymode(src_link, dst) + self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode) + # follow dst link + os.chmod(dst, stat.S_IRWXO) + shutil.copymode(src, dst_link) + self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode) + # follow both links + os.chmod(dst, stat.S_IRWXO) + shutil.copymode(src_link, dst_link) + self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode) @unittest.skipUnless(hasattr(os, 'lchmod'), 'requires os.lchmod') @os_helper.skip_unless_symlink diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py index acb7e9d4c6074d..637a0ca3b36972 100644 --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -1,5 +1,6 @@ import enum import errno +import functools import inspect import os import random @@ -76,6 +77,9 @@ class PosixTests(unittest.TestCase): def trivial_signal_handler(self, *args): pass + def create_handler_with_partial(self, argument): + return functools.partial(self.trivial_signal_handler, argument) + def test_out_of_range_signal_number_raises_error(self): self.assertRaises(ValueError, signal.getsignal, 4242) @@ -96,6 +100,28 @@ def test_getsignal(self): signal.signal(signal.SIGHUP, hup) self.assertEqual(signal.getsignal(signal.SIGHUP), hup) + def test_no_repr_is_called_on_signal_handler(self): + # See https://github.com/python/cpython/issues/112559. + + class MyArgument: + def __init__(self): + self.repr_count = 0 + + def __repr__(self): + self.repr_count += 1 + return super().__repr__() + + argument = MyArgument() + self.assertEqual(0, argument.repr_count) + + handler = self.create_handler_with_partial(argument) + hup = signal.signal(signal.SIGHUP, handler) + self.assertIsInstance(hup, signal.Handlers) + self.assertEqual(signal.getsignal(signal.SIGHUP), handler) + signal.signal(signal.SIGHUP, hup) + self.assertEqual(signal.getsignal(signal.SIGHUP), hup) + self.assertEqual(0, argument.repr_count) + def test_strsignal(self): self.assertIn("Interrupt", signal.strsignal(signal.SIGINT)) self.assertIn("Terminated", signal.strsignal(signal.SIGTERM)) diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py index 43162c540b55ae..f8e968b9b56f82 100644 --- a/Lib/test/test_super.py +++ b/Lib/test/test_super.py @@ -396,6 +396,33 @@ def method(self): with self.assertRaisesRegex(TypeError, "argument 1 must be a type"): C().method() + def test_supercheck_fail(self): + class C: + def method(self, type_, obj): + return super(type_, obj).method() + + c = C() + err_msg = ( + r"super\(type, obj\): obj \({} {}\) is not " + r"an instance or subtype of type \({}\)." + ) + + cases = ( + (int, c, int.__name__, C.__name__, "instance of"), + # obj is instance of type + (C, list(), C.__name__, list.__name__, "instance of"), + # obj is type itself + (C, list, C.__name__, list.__name__, "type"), + ) + + for case in cases: + with self.subTest(case=case): + type_, obj, type_str, obj_str, instance_or_type = case + regex = err_msg.format(instance_or_type, obj_str, type_str) + + with self.assertRaisesRegex(TypeError, regex): + c.method(type_, obj) + def test_super___class__(self): class C: def method(self): diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py index c34b0e5e015702..d160cbf0645b47 100644 --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -630,7 +630,7 @@ def recursive_function(depth): if depth: recursive_function(depth - 1) - for max_depth in (5, 25, 250): + for max_depth in (5, 25, 250, 2500): with support.infinite_recursion(max_depth): available = support.get_recursion_available() diff --git a/Lib/test/test_symtable.py b/Lib/test/test_symtable.py index 987e9e32afc325..92b78a8086a83d 100644 --- a/Lib/test/test_symtable.py +++ b/Lib/test/test_symtable.py @@ -337,7 +337,6 @@ def test_stdin(self): symtable.main(['-']) self.assertEqual(stdout.getvalue(), out) lines = out.splitlines() - print(out) self.assertIn("symbol table for module from file '':", lines) diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index ece1366076798c..8b3ca69c9fe155 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -2017,6 +2017,7 @@ def f(x: *b) import re import doctest +import textwrap import unittest from test import support @@ -2279,6 +2280,31 @@ def test_nested_named_except_blocks(self): code += f"{' '*4*12}pass" self._check_error(code, "too many statically nested blocks") + @support.cpython_only + def test_with_statement_many_context_managers(self): + # See gh-113297 + + def get_code(n): + code = textwrap.dedent(""" + def bug(): + with ( + a + """) + for i in range(n): + code += f" as a{i}, a\n" + code += "): yield a" + return code + + CO_MAXBLOCKS = 20 # static nesting limit of the compiler + + for n in range(CO_MAXBLOCKS): + with self.subTest(f"within range: {n=}"): + compile(get_code(n), "", "exec") + + for n in range(CO_MAXBLOCKS, CO_MAXBLOCKS + 5): + with self.subTest(f"out of range: {n=}"): + self._check_error(get_code(n), "too many statically nested blocks") + def test_barry_as_flufl_with_syntax_errors(self): # The "barry_as_flufl" rule can produce some "bugs-at-a-distance" if # is reading the wrong token in the presence of syntax errors later diff --git a/Lib/test/test_tools/test_freeze.py b/Lib/test/test_tools/test_freeze.py index 671ec2961e7f8f..0e7ed67de71067 100644 --- a/Lib/test/test_tools/test_freeze.py +++ b/Lib/test/test_tools/test_freeze.py @@ -14,6 +14,8 @@ @support.requires_zlib() @unittest.skipIf(sys.platform.startswith('win'), 'not supported on Windows') +@unittest.skipIf(sys.platform == 'darwin' and sys._framework, + 'not supported for frameworks builds on macOS') @support.skip_if_buildbot('not all buildbots have enough space') # gh-103053: Skip test if Python is built with Profile Guided Optimization # (PGO), since the test is just too slow in this case. diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py index da32c4ea6477ce..bfecd8eb71220c 100644 --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -1,6 +1,6 @@ # Python test set -- part 6, built-in types -from test.support import run_with_locale, cpython_only +from test.support import run_with_locale, cpython_only, MISSING_C_DOCSTRINGS import collections.abc from collections import namedtuple import copy @@ -598,6 +598,8 @@ def test_slot_wrapper_types(self): self.assertIsInstance(object.__lt__, types.WrapperDescriptorType) self.assertIsInstance(int.__lt__, types.WrapperDescriptorType) + @unittest.skipIf(MISSING_C_DOCSTRINGS, + "Signature information for builtins requires docstrings") def test_dunder_get_signature(self): sig = inspect.signature(object.__init__.__get__) self.assertEqual(list(sig.parameters), ["instance", "owner"]) diff --git a/Lib/test/test_webbrowser.py b/Lib/test/test_webbrowser.py index 2d695bc883131f..ca481c57c3d972 100644 --- a/Lib/test/test_webbrowser.py +++ b/Lib/test/test_webbrowser.py @@ -272,6 +272,17 @@ def test_register_preferred(self): self._check_registration(preferred=True) + @unittest.skipUnless(sys.platform == "darwin", "macOS specific test") + def test_no_xdg_settings_on_macOS(self): + # On macOS webbrowser should not use xdg-settings to + # look for X11 based browsers (for those users with + # XQuartz installed) + with mock.patch("subprocess.check_output") as ck_o: + webbrowser.register_standard_browsers() + + ck_o.assert_not_called() + + class ImportTest(unittest.TestCase): def test_register(self): webbrowser = import_helper.import_fresh_module('webbrowser') diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py index b9e7937b0bbc00..80ee064896f59a 100644 --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2535,7 +2535,7 @@ def __eq__(self, o): e.extend([ET.Element('bar')]) self.assertRaises(ValueError, e.remove, X('baz')) - @support.infinite_recursion(25) + @support.infinite_recursion() def test_recursive_repr(self): # Issue #25455 e = ET.Element('foo') diff --git a/Lib/test/test_zoneinfo/test_zoneinfo.py b/Lib/test/test_zoneinfo/test_zoneinfo.py index 3766ceac8385f2..7b6b69d0109d88 100644 --- a/Lib/test/test_zoneinfo/test_zoneinfo.py +++ b/Lib/test/test_zoneinfo/test_zoneinfo.py @@ -17,6 +17,7 @@ from datetime import date, datetime, time, timedelta, timezone from functools import cached_property +from test.support import MISSING_C_DOCSTRINGS from test.test_zoneinfo import _support as test_support from test.test_zoneinfo._support import OS_ENV_LOCK, TZPATH_TEST_LOCK, ZoneInfoTestBase from test.support.import_helper import import_module @@ -404,6 +405,8 @@ def test_time_fixed_offset(self): class CZoneInfoTest(ZoneInfoTest): module = c_zoneinfo + @unittest.skipIf(MISSING_C_DOCSTRINGS, + "Signature information for builtins requires docstrings") def test_signatures(self): """Ensure that C module has valid method signatures.""" import inspect diff --git a/Lib/typing.py b/Lib/typing.py index 61b88a560e9dc5..d7d793539b35b1 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -3301,7 +3301,7 @@ def __enter__(self) -> 'TextIO': def reveal_type[T](obj: T, /) -> T: - """Reveal the inferred type of a variable. + """Ask a static type checker to reveal the inferred type of an expression. When a static type checker encounters a call to ``reveal_type()``, it will emit the inferred type of the argument:: @@ -3313,7 +3313,7 @@ def reveal_type[T](obj: T, /) -> T: will produce output similar to 'Revealed type is "builtins.int"'. At runtime, the function prints the runtime type of the - argument and returns it unchanged. + argument and returns the argument unchanged. """ print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr) return obj diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index c6b46eea657a21..2adb3d70662b1a 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -2229,8 +2229,11 @@ def __get__(self, obj, _type=None): return self.create_mock() -_CODE_ATTRS = dir(CodeType) -_CODE_SIG = inspect.signature(partial(CodeType.__init__, None)) +try: + _CODE_SIG = inspect.signature(partial(CodeType.__init__, None)) + _CODE_ATTRS = dir(CodeType) +except ValueError: + _CODE_SIG = None class AsyncMockMixin(Base): @@ -2250,9 +2253,12 @@ def __init__(self, /, *args, **kwargs): self.__dict__['_mock_await_count'] = 0 self.__dict__['_mock_await_args'] = None self.__dict__['_mock_await_args_list'] = _CallList() - code_mock = NonCallableMock(spec_set=_CODE_ATTRS) - code_mock.__dict__["_spec_class"] = CodeType - code_mock.__dict__["_spec_signature"] = _CODE_SIG + if _CODE_SIG: + code_mock = NonCallableMock(spec_set=_CODE_ATTRS) + code_mock.__dict__["_spec_class"] = CodeType + code_mock.__dict__["_spec_signature"] = _CODE_SIG + else: + code_mock = NonCallableMock(spec_set=CodeType) code_mock.co_flags = ( inspect.CO_COROUTINE + inspect.CO_VARARGS diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 8b0628745c57fc..6f9c6a6de177e6 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -495,7 +495,12 @@ def register_standard_browsers(): register("microsoft-edge", None, Edge("MicrosoftEdge.exe")) else: # Prefer X browsers if present - if os.environ.get("DISPLAY") or os.environ.get("WAYLAND_DISPLAY"): + # + # NOTE: Do not check for X11 browser on macOS, + # XQuartz installation sets a DISPLAY environment variable and will + # autostart when someone tries to access the display. Mac users in + # general don't need an X11 browser. + if sys.platform != "darwin" and (os.environ.get("DISPLAY") or os.environ.get("WAYLAND_DISPLAY")): try: cmd = "xdg-settings get default-web-browser".split() raw_result = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) diff --git a/Makefile.pre.in b/Makefile.pre.in index 195fc0ddddecd3..6a64547e97d266 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -657,11 +657,13 @@ check-clean-src: @if test -n "$(VPATH)" -a \( \ -f "$(srcdir)/$(BUILDPYTHON)" \ -o -f "$(srcdir)/Programs/python.o" \ - -o -f "$(srcdir)\Python/frozen_modules/importlib._bootstrap.h" \ + -o -f "$(srcdir)/Python/frozen_modules/importlib._bootstrap.h" \ \); then \ echo "Error: The source directory ($(srcdir)) is not clean" ; \ echo "Building Python out of the source tree (in $(abs_builddir)) requires a clean source tree ($(abs_srcdir))" ; \ - echo "Try to run: make -C \"$(srcdir)\" clean" ; \ + echo "Build artifacts such as .o files, executables, and Python/frozen_modules/*.h must not exist within $(srcdir)." ; \ + echo "Try to run:" ; \ + echo " (cd \"$(srcdir)\" && make clean || git clean -fdx -e Doc/venv)" ; \ exit 1; \ fi @@ -1562,7 +1564,7 @@ Objects/dictobject.o: $(srcdir)/Objects/stringlib/eq.h Objects/setobject.o: $(srcdir)/Objects/stringlib/eq.h Objects/obmalloc.o: $(srcdir)/Objects/mimalloc/alloc.c \ - $(srcdir)/Objects/mimalloc/alloc-aligned.c \ + $(srcdir)/Objects/mimalloc/alloc-aligned.c \ $(srcdir)/Objects/mimalloc/alloc-posix.c \ $(srcdir)/Objects/mimalloc/arena.c \ $(srcdir)/Objects/mimalloc/bitmap.c \ @@ -1575,7 +1577,10 @@ Objects/obmalloc.o: $(srcdir)/Objects/mimalloc/alloc.c \ $(srcdir)/Objects/mimalloc/segment.c \ $(srcdir)/Objects/mimalloc/segment-map.c \ $(srcdir)/Objects/mimalloc/stats.c \ - $(srcdir)/Objects/mimalloc/prim/prim.c + $(srcdir)/Objects/mimalloc/prim/prim.c \ + $(srcdir)/Objects/mimalloc/prim/osx/prim.c \ + $(srcdir)/Objects/mimalloc/prim/unix/prim.c \ + $(srcdir)/Objects/mimalloc/prim/wasi/prim.c Objects/mimalloc/page.o: $(srcdir)/Objects/mimalloc/page-queue.c @@ -1583,30 +1588,29 @@ Objects/mimalloc/page.o: $(srcdir)/Objects/mimalloc/page-queue.c regen-cases: # Regenerate various files from Python/bytecodes.c # Pass CASESFLAG=-l to insert #line directives in the output - PYTHONPATH=$(srcdir)/Tools/cases_generator \ - $(PYTHON_FOR_REGEN) \ - $(srcdir)/Tools/cases_generator/generate_cases.py \ - $(CASESFLAG) \ - -t $(srcdir)/Python/opcode_targets.h.new \ - -m $(srcdir)/Include/internal/pycore_opcode_metadata.h.new \ - -p $(srcdir)/Lib/_opcode_metadata.py.new \ - -a $(srcdir)/Python/abstract_interp_cases.c.h.new \ - $(srcdir)/Python/bytecodes.c - $(PYTHON_FOR_REGEN) \ - $(srcdir)/Tools/cases_generator/opcode_id_generator.py -o $(srcdir)/Include/opcode_ids.h.new $(srcdir)/Python/bytecodes.c - $(PYTHON_FOR_REGEN) \ - $(srcdir)/Tools/cases_generator/uop_id_generator.py -o $(srcdir)/Include/internal/pycore_uop_ids.h.new $(srcdir)/Python/bytecodes.c - $(PYTHON_FOR_REGEN) \ - $(srcdir)/Tools/cases_generator/tier1_generator.py -o $(srcdir)/Python/generated_cases.c.h.new $(srcdir)/Python/bytecodes.c - $(PYTHON_FOR_REGEN) \ - $(srcdir)/Tools/cases_generator/tier2_generator.py -o $(srcdir)/Python/executor_cases.c.h.new $(srcdir)/Python/bytecodes.c + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/cases_generator/opcode_id_generator.py \ + -o $(srcdir)/Include/opcode_ids.h.new $(srcdir)/Python/bytecodes.c + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/cases_generator/target_generator.py \ + -o $(srcdir)/Python/opcode_targets.h.new $(srcdir)/Python/bytecodes.c + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/cases_generator/uop_id_generator.py \ + -o $(srcdir)/Include/internal/pycore_uop_ids.h.new $(srcdir)/Python/bytecodes.c + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/cases_generator/py_metadata_generator.py \ + -o $(srcdir)/Lib/_opcode_metadata.py.new $(srcdir)/Python/bytecodes.c + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/cases_generator/tier1_generator.py \ + -o $(srcdir)/Python/generated_cases.c.h.new $(srcdir)/Python/bytecodes.c + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/cases_generator/tier2_generator.py \ + -o $(srcdir)/Python/executor_cases.c.h.new $(srcdir)/Python/bytecodes.c + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/cases_generator/opcode_metadata_generator.py \ + -o $(srcdir)/Include/internal/pycore_opcode_metadata.h.new $(srcdir)/Python/bytecodes.c + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/cases_generator/uop_metadata_generator.py -o \ + $(srcdir)/Include/internal/pycore_uop_metadata.h.new $(srcdir)/Python/bytecodes.c $(UPDATE_FILE) $(srcdir)/Python/generated_cases.c.h $(srcdir)/Python/generated_cases.c.h.new $(UPDATE_FILE) $(srcdir)/Include/opcode_ids.h $(srcdir)/Include/opcode_ids.h.new $(UPDATE_FILE) $(srcdir)/Include/internal/pycore_uop_ids.h $(srcdir)/Include/internal/pycore_uop_ids.h.new $(UPDATE_FILE) $(srcdir)/Python/opcode_targets.h $(srcdir)/Python/opcode_targets.h.new $(UPDATE_FILE) $(srcdir)/Include/internal/pycore_opcode_metadata.h $(srcdir)/Include/internal/pycore_opcode_metadata.h.new + $(UPDATE_FILE) $(srcdir)/Include/internal/pycore_uop_metadata.h $(srcdir)/Include/internal/pycore_uop_metadata.h.new $(UPDATE_FILE) $(srcdir)/Python/executor_cases.c.h $(srcdir)/Python/executor_cases.c.h.new - $(UPDATE_FILE) $(srcdir)/Python/abstract_interp_cases.c.h $(srcdir)/Python/abstract_interp_cases.c.h.new $(UPDATE_FILE) $(srcdir)/Lib/_opcode_metadata.py $(srcdir)/Lib/_opcode_metadata.py.new Python/compile.o: $(srcdir)/Include/internal/pycore_opcode_metadata.h @@ -1791,7 +1795,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/warnings.h \ $(srcdir)/Include/cpython/weakrefobject.h \ \ - @MIMALLOC_HEADERS@ \ + $(MIMALLOC_HEADERS) \ \ $(srcdir)/Include/internal/pycore_abstract.h \ $(srcdir)/Include/internal/pycore_asdl.h \ @@ -1888,6 +1892,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_unicodeobject.h \ $(srcdir)/Include/internal/pycore_unicodeobject_generated.h \ $(srcdir)/Include/internal/pycore_uops.h \ + $(srcdir)/Include/internal/pycore_uop_metadata.h \ $(srcdir)/Include/internal/pycore_warnings.h \ $(srcdir)/Include/internal/pycore_weakref.h \ $(DTRACE_HEADERS) \ diff --git a/Misc/NEWS.d/next/Build/2023-12-21-05-35-06.gh-issue-112305.VfqQPx.rst b/Misc/NEWS.d/next/Build/2023-12-21-05-35-06.gh-issue-112305.VfqQPx.rst new file mode 100644 index 00000000000000..2df3207f4e6f6c --- /dev/null +++ b/Misc/NEWS.d/next/Build/2023-12-21-05-35-06.gh-issue-112305.VfqQPx.rst @@ -0,0 +1,3 @@ +Fixed the ``check-clean-src`` step performed on out of tree builds to detect +errant ``$(srcdir)/Python/frozen_modules/*.h`` files and recommend +appropriate source tree cleanup steps to get a working build again. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-01-08-16-10.gh-issue-95754.ae4gwy.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-01-08-16-10.gh-issue-95754.ae4gwy.rst new file mode 100644 index 00000000000000..0884bc4a4be726 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-12-01-08-16-10.gh-issue-95754.ae4gwy.rst @@ -0,0 +1 @@ +Provide a better error message when accessing invalid attributes on partially initialized modules. The origin of the module being accessed is now included in the message to help with the common issue of shadowing other modules. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-15-16-26-01.gh-issue-112215.xJS6_6.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-15-16-26-01.gh-issue-112215.xJS6_6.rst new file mode 100644 index 00000000000000..01ca1cc7f79b8f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-12-15-16-26-01.gh-issue-112215.xJS6_6.rst @@ -0,0 +1,3 @@ +Increase the C recursion limit by a factor of 3 for non-debug builds, except +for webassembly and s390 platforms which are unchanged. This mitigates some +regressions in 3.12 with deep recursion mixing builtin (C) and Python code. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-19-22-03-43.gh-issue-111375.M9vuA6.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-19-22-03-43.gh-issue-111375.M9vuA6.rst new file mode 100644 index 00000000000000..fbb517173451f8 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-12-19-22-03-43.gh-issue-111375.M9vuA6.rst @@ -0,0 +1,2 @@ +Only use ``NULL`` in the exception stack to indicate an exception was +handled. Patch by Carey Metcalfe. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-20-08-54-54.gh-issue-113212.62AUlw.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-20-08-54-54.gh-issue-113212.62AUlw.rst new file mode 100644 index 00000000000000..6edbc9c60d968c --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-12-20-08-54-54.gh-issue-113212.62AUlw.rst @@ -0,0 +1 @@ +Improve :py:class:`super` error messages. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-20-18-27-11.gh-issue-113297.BZyAI_.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-20-18-27-11.gh-issue-113297.BZyAI_.rst new file mode 100644 index 00000000000000..b6aee1f241fd23 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-12-20-18-27-11.gh-issue-113297.BZyAI_.rst @@ -0,0 +1 @@ +Fix segfault in the compiler on with statement with 19 context managers. diff --git a/Misc/NEWS.d/next/Library/2023-12-06-16-01-33.gh-issue-112800.TNsGJ-.rst b/Misc/NEWS.d/next/Library/2023-12-06-16-01-33.gh-issue-112800.TNsGJ-.rst new file mode 100644 index 00000000000000..e88eac169177a9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-06-16-01-33.gh-issue-112800.TNsGJ-.rst @@ -0,0 +1,2 @@ +Fix :mod:`asyncio` ``SubprocessTransport.close()`` not to throw +``PermissionError`` when used with setuid executables. diff --git a/Misc/NEWS.d/next/Library/2023-12-12-20-15-57.gh-issue-112559.IgXkje.rst b/Misc/NEWS.d/next/Library/2023-12-12-20-15-57.gh-issue-112559.IgXkje.rst new file mode 100644 index 00000000000000..c08cb7c3ba5ea5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-12-20-15-57.gh-issue-112559.IgXkje.rst @@ -0,0 +1,3 @@ +:func:`signal.signal` and :func:`signal.getsignal` no longer call ``repr`` on +callable handlers. :func:`asyncio.run` and :meth:`asyncio.Runner.run` no longer +call ``repr`` on the task results. Patch by Yilei Yang. diff --git a/Misc/NEWS.d/next/Library/2023-12-15-09-51-41.gh-issue-113175.RHsNwE.rst b/Misc/NEWS.d/next/Library/2023-12-15-09-51-41.gh-issue-113175.RHsNwE.rst new file mode 100644 index 00000000000000..1b43803d1a7aa4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-15-09-51-41.gh-issue-113175.RHsNwE.rst @@ -0,0 +1,5 @@ +Sync with importlib_metadata 7.0, including improved type annotations, fixed +issue with symlinked packages in ``package_distributions``, added +``EntryPoints.__repr__``, introduced the ``diagnose`` script, added +``Distribution.origin`` property, and removed deprecated ``EntryPoint`` +access by numeric index (tuple behavior). diff --git a/Misc/NEWS.d/next/Library/2023-12-15-20-29-49.gh-issue-113188.AvoraB.rst b/Misc/NEWS.d/next/Library/2023-12-15-20-29-49.gh-issue-113188.AvoraB.rst new file mode 100644 index 00000000000000..17c69572d9f2b1 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-15-20-29-49.gh-issue-113188.AvoraB.rst @@ -0,0 +1,6 @@ +Fix :func:`shutil.copymode` and :func:`shutil.copystat` on Windows. +Previously they worked differenly if *dst* is a symbolic link: +they modified the permission bits of *dst* itself +rather than the file it points to if *follow_symlinks* is true or *src* is +not a symbolic link, and did not modify the permission bits if +*follow_symlinks* is false and *src* is a symbolic link. diff --git a/Misc/NEWS.d/next/Library/2023-12-15-21-33-42.gh-issue-113191.Il155b.rst b/Misc/NEWS.d/next/Library/2023-12-15-21-33-42.gh-issue-113191.Il155b.rst new file mode 100644 index 00000000000000..13fe4ff5f6a8bd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-15-21-33-42.gh-issue-113191.Il155b.rst @@ -0,0 +1,2 @@ +Add support of :func:`os.fchmod` and a file descriptor in :func:`os.chmod` +on Windows. diff --git a/Misc/NEWS.d/next/Library/2023-12-20-21-18-51.gh-issue-113214.JcV9Mn.rst b/Misc/NEWS.d/next/Library/2023-12-20-21-18-51.gh-issue-113214.JcV9Mn.rst new file mode 100644 index 00000000000000..6db74cda166e92 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-20-21-18-51.gh-issue-113214.JcV9Mn.rst @@ -0,0 +1 @@ +Fix an ``AttributeError`` during asyncio SSL protocol aborts in SSL-over-SSL scenarios. diff --git a/Misc/NEWS.d/next/Library/2023-12-22-20-49-52.gh-issue-113407.C_O13_.rst b/Misc/NEWS.d/next/Library/2023-12-22-20-49-52.gh-issue-113407.C_O13_.rst new file mode 100644 index 00000000000000..da00977f03cefd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-22-20-49-52.gh-issue-113407.C_O13_.rst @@ -0,0 +1 @@ +Fix import of :mod:`unittest.mock` when CPython is built without docstrings. diff --git a/Misc/NEWS.d/next/Library/2023-12-23-13-10-42.gh-issue-111784.Nb4L1j.rst b/Misc/NEWS.d/next/Library/2023-12-23-13-10-42.gh-issue-111784.Nb4L1j.rst new file mode 100644 index 00000000000000..51ac0752cfae84 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-23-13-10-42.gh-issue-111784.Nb4L1j.rst @@ -0,0 +1,5 @@ +Fix segfaults in the ``_elementtree`` module. +Fix first segfault during deallocation of ``_elementtree.XMLParser`` instances by keeping strong reference +to ``pyexpat`` module in module state for capsule lifetime. +Fix second segfault which happens in the same deallocation process by keeping strong reference +to ``_elementtree`` module in ``XMLParser`` structure for ``_elementtree`` module lifetime. diff --git a/Misc/NEWS.d/next/Library/2023-12-23-16-10-07.gh-issue-113421.w7vs08.rst b/Misc/NEWS.d/next/Library/2023-12-23-16-10-07.gh-issue-113421.w7vs08.rst new file mode 100644 index 00000000000000..2082fe6391d261 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-23-16-10-07.gh-issue-113421.w7vs08.rst @@ -0,0 +1 @@ +Fix multiprocessing logger for ``%(filename)s``. diff --git a/Misc/NEWS.d/next/Library/2023-12-23-16-51-17.gh-issue-113028.3Jmdoj.rst b/Misc/NEWS.d/next/Library/2023-12-23-16-51-17.gh-issue-113028.3Jmdoj.rst new file mode 100644 index 00000000000000..5f66d6a00b4d3d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-23-16-51-17.gh-issue-113028.3Jmdoj.rst @@ -0,0 +1,6 @@ +When a second reference to a string appears in the input to :mod:`pickle`, +and the Python implementation is in use, +we are guaranteed that a single copy gets pickled +and a single object is shared when reloaded. +Previously, in protocol 0, when a string contained certain characters +(e.g. newline) it resulted in duplicate objects. diff --git a/Misc/NEWS.d/next/macOS/2023-12-21-09-41-42.gh-issue-87277.IF6EZZ.rst b/Misc/NEWS.d/next/macOS/2023-12-21-09-41-42.gh-issue-87277.IF6EZZ.rst new file mode 100644 index 00000000000000..4ae55c0293198a --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-12-21-09-41-42.gh-issue-87277.IF6EZZ.rst @@ -0,0 +1,3 @@ +webbrowser: Don't look for X11 browsers on macOS. Those are generally not +used and probing for them can result in starting XQuartz even if it isn't +used otherwise. diff --git a/Misc/NEWS.d/next/macOS/2023-12-21-10-20-41.gh-issue-65701.Q2hNbN.rst b/Misc/NEWS.d/next/macOS/2023-12-21-10-20-41.gh-issue-65701.Q2hNbN.rst new file mode 100644 index 00000000000000..870b84a4d1af80 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-12-21-10-20-41.gh-issue-65701.Q2hNbN.rst @@ -0,0 +1,2 @@ +The :program:`freeze` tool doesn't work with framework builds of Python. +Document this and bail out early when running the tool with such a build. diff --git a/Misc/NEWS.d/next/macOS/2023-12-21-11-53-47.gh-issue-74573.MA6Vys.rst b/Misc/NEWS.d/next/macOS/2023-12-21-11-53-47.gh-issue-74573.MA6Vys.rst new file mode 100644 index 00000000000000..96dcd4765d95da --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-12-21-11-53-47.gh-issue-74573.MA6Vys.rst @@ -0,0 +1,3 @@ +Document that :mod:`dbm.ndbm` can silently corrupt DBM files on updates when +exceeding undocumented platform limits, and can crash (segmentation fault) +when reading such a corrupted file. (FB8919203) diff --git a/Misc/sbom.spdx.json b/Misc/sbom.spdx.json index 81f8486ea350c1..5b3cd04ffa7f74 100644 --- a/Misc/sbom.spdx.json +++ b/Misc/sbom.spdx.json @@ -1700,7 +1700,7 @@ "checksums": [ { "algorithm": "SHA256", - "checksumValue": "7ccf472345f20d35bdc9d1841ff5f313260c2c33fe417f48c30ac46cccabf5be" + "checksumValue": "5052d7889c1f9d05224cd41741acb7c5d6fa735ab34e339624a614eaaa7e7d76" } ], "downloadLocation": "https://files.pythonhosted.org/packages/15/aa/3f4c7bcee2057a76562a5b33ecbd199be08cdb4443a02e26bd2c3cf6fc39/pip-23.3.2-py3-none-any.whl", diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c index f9d5793f9b6497..5bf67870767698 100644 --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -98,6 +98,7 @@ typedef struct { PyTypeObject *TreeBuilder_Type; PyTypeObject *XMLParser_Type; + PyObject *expat_capsule; struct PyExpat_CAPI *expat_capi; } elementtreestate; @@ -155,6 +156,7 @@ elementtree_clear(PyObject *m) Py_CLEAR(st->ElementIter_Type); Py_CLEAR(st->TreeBuilder_Type); Py_CLEAR(st->XMLParser_Type); + Py_CLEAR(st->expat_capsule); st->expat_capi = NULL; return 0; @@ -175,6 +177,7 @@ elementtree_traverse(PyObject *m, visitproc visit, void *arg) Py_VISIT(st->ElementIter_Type); Py_VISIT(st->TreeBuilder_Type); Py_VISIT(st->XMLParser_Type); + Py_VISIT(st->expat_capsule); return 0; } @@ -3066,6 +3069,7 @@ typedef struct { PyObject *handle_close; elementtreestate *state; + PyObject *elementtree_module; } XMLParserObject; /* helpers */ @@ -3607,7 +3611,11 @@ xmlparser_new(PyTypeObject *type, PyObject *args, PyObject *kwds) self->handle_start = self->handle_data = self->handle_end = NULL; self->handle_comment = self->handle_pi = self->handle_close = NULL; self->handle_doctype = NULL; - self->state = get_elementtree_state_by_type(type); + self->elementtree_module = PyType_GetModuleByDef(type, &elementtreemodule); + assert(self->elementtree_module != NULL); + Py_INCREF(self->elementtree_module); + // See gh-111784 for explanation why is reference to module needed here. + self->state = get_elementtree_state(self->elementtree_module); } return (PyObject *)self; } @@ -3784,6 +3792,7 @@ xmlparser_gc_clear(XMLParserObject *self) EXPAT(st, ParserFree)(parser); } + Py_CLEAR(self->elementtree_module); Py_CLEAR(self->handle_close); Py_CLEAR(self->handle_pi); Py_CLEAR(self->handle_comment); @@ -4343,7 +4352,10 @@ module_exec(PyObject *m) goto error; /* link against pyexpat */ - st->expat_capi = PyCapsule_Import(PyExpat_CAPSULE_NAME, 0); + if (!(st->expat_capsule = _PyImport_GetModuleAttrString("pyexpat", "expat_CAPI"))) + goto error; + if (!(st->expat_capi = PyCapsule_GetPointer(st->expat_capsule, PyExpat_CAPSULE_NAME))) + goto error; if (st->expat_capi) { /* check that it's usable */ if (strcmp(st->expat_capi->magic, PyExpat_CAPI_MAGIC) != 0 || diff --git a/Modules/_io/clinic/bufferedio.c.h b/Modules/_io/clinic/bufferedio.c.h index ec46d5409a3d82..d5bec5f71f5be8 100644 --- a/Modules/_io/clinic/bufferedio.c.h +++ b/Modules/_io/clinic/bufferedio.c.h @@ -327,11 +327,16 @@ _io__Buffered_simple_flush(buffered *self, PyObject *Py_UNUSED(ignored)) return return_value; } +#if defined(_io__Buffered_closed_HAS_DOCSTR) +# define _io__Buffered_closed_DOCSTR _io__Buffered_closed__doc__ +#else +# define _io__Buffered_closed_DOCSTR NULL +#endif #if defined(_IO__BUFFERED_CLOSED_GETSETDEF) # undef _IO__BUFFERED_CLOSED_GETSETDEF -# define _IO__BUFFERED_CLOSED_GETSETDEF {"closed", (getter)_io__Buffered_closed_get, (setter)_io__Buffered_closed_set, NULL}, +# define _IO__BUFFERED_CLOSED_GETSETDEF {"closed", (getter)_io__Buffered_closed_get, (setter)_io__Buffered_closed_set, _io__Buffered_closed_DOCSTR}, #else -# define _IO__BUFFERED_CLOSED_GETSETDEF {"closed", (getter)_io__Buffered_closed_get, NULL, NULL}, +# define _IO__BUFFERED_CLOSED_GETSETDEF {"closed", (getter)_io__Buffered_closed_get, NULL, _io__Buffered_closed_DOCSTR}, #endif static PyObject * @@ -464,11 +469,16 @@ _io__Buffered_writable(buffered *self, PyObject *Py_UNUSED(ignored)) return return_value; } +#if defined(_io__Buffered_name_HAS_DOCSTR) +# define _io__Buffered_name_DOCSTR _io__Buffered_name__doc__ +#else +# define _io__Buffered_name_DOCSTR NULL +#endif #if defined(_IO__BUFFERED_NAME_GETSETDEF) # undef _IO__BUFFERED_NAME_GETSETDEF -# define _IO__BUFFERED_NAME_GETSETDEF {"name", (getter)_io__Buffered_name_get, (setter)_io__Buffered_name_set, NULL}, +# define _IO__BUFFERED_NAME_GETSETDEF {"name", (getter)_io__Buffered_name_get, (setter)_io__Buffered_name_set, _io__Buffered_name_DOCSTR}, #else -# define _IO__BUFFERED_NAME_GETSETDEF {"name", (getter)_io__Buffered_name_get, NULL, NULL}, +# define _IO__BUFFERED_NAME_GETSETDEF {"name", (getter)_io__Buffered_name_get, NULL, _io__Buffered_name_DOCSTR}, #endif static PyObject * @@ -486,11 +496,16 @@ _io__Buffered_name_get(buffered *self, void *Py_UNUSED(context)) return return_value; } +#if defined(_io__Buffered_mode_HAS_DOCSTR) +# define _io__Buffered_mode_DOCSTR _io__Buffered_mode__doc__ +#else +# define _io__Buffered_mode_DOCSTR NULL +#endif #if defined(_IO__BUFFERED_MODE_GETSETDEF) # undef _IO__BUFFERED_MODE_GETSETDEF -# define _IO__BUFFERED_MODE_GETSETDEF {"mode", (getter)_io__Buffered_mode_get, (setter)_io__Buffered_mode_set, NULL}, +# define _IO__BUFFERED_MODE_GETSETDEF {"mode", (getter)_io__Buffered_mode_get, (setter)_io__Buffered_mode_set, _io__Buffered_mode_DOCSTR}, #else -# define _IO__BUFFERED_MODE_GETSETDEF {"mode", (getter)_io__Buffered_mode_get, NULL, NULL}, +# define _IO__BUFFERED_MODE_GETSETDEF {"mode", (getter)_io__Buffered_mode_get, NULL, _io__Buffered_mode_DOCSTR}, #endif static PyObject * @@ -1230,4 +1245,4 @@ _io_BufferedRandom___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=0999c33f666dc692 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=442b05b9a117df6c input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/stringio.c.h b/Modules/_io/clinic/stringio.c.h index fc2962d1c9c9a7..6bdb2181985f7d 100644 --- a/Modules/_io/clinic/stringio.c.h +++ b/Modules/_io/clinic/stringio.c.h @@ -475,11 +475,16 @@ _io_StringIO___setstate__(stringio *self, PyObject *state) return return_value; } +#if defined(_io_StringIO_closed_HAS_DOCSTR) +# define _io_StringIO_closed_DOCSTR _io_StringIO_closed__doc__ +#else +# define _io_StringIO_closed_DOCSTR NULL +#endif #if defined(_IO_STRINGIO_CLOSED_GETSETDEF) # undef _IO_STRINGIO_CLOSED_GETSETDEF -# define _IO_STRINGIO_CLOSED_GETSETDEF {"closed", (getter)_io_StringIO_closed_get, (setter)_io_StringIO_closed_set, NULL}, +# define _IO_STRINGIO_CLOSED_GETSETDEF {"closed", (getter)_io_StringIO_closed_get, (setter)_io_StringIO_closed_set, _io_StringIO_closed_DOCSTR}, #else -# define _IO_STRINGIO_CLOSED_GETSETDEF {"closed", (getter)_io_StringIO_closed_get, NULL, NULL}, +# define _IO_STRINGIO_CLOSED_GETSETDEF {"closed", (getter)_io_StringIO_closed_get, NULL, _io_StringIO_closed_DOCSTR}, #endif static PyObject * @@ -497,11 +502,16 @@ _io_StringIO_closed_get(stringio *self, void *Py_UNUSED(context)) return return_value; } +#if defined(_io_StringIO_line_buffering_HAS_DOCSTR) +# define _io_StringIO_line_buffering_DOCSTR _io_StringIO_line_buffering__doc__ +#else +# define _io_StringIO_line_buffering_DOCSTR NULL +#endif #if defined(_IO_STRINGIO_LINE_BUFFERING_GETSETDEF) # undef _IO_STRINGIO_LINE_BUFFERING_GETSETDEF -# define _IO_STRINGIO_LINE_BUFFERING_GETSETDEF {"line_buffering", (getter)_io_StringIO_line_buffering_get, (setter)_io_StringIO_line_buffering_set, NULL}, +# define _IO_STRINGIO_LINE_BUFFERING_GETSETDEF {"line_buffering", (getter)_io_StringIO_line_buffering_get, (setter)_io_StringIO_line_buffering_set, _io_StringIO_line_buffering_DOCSTR}, #else -# define _IO_STRINGIO_LINE_BUFFERING_GETSETDEF {"line_buffering", (getter)_io_StringIO_line_buffering_get, NULL, NULL}, +# define _IO_STRINGIO_LINE_BUFFERING_GETSETDEF {"line_buffering", (getter)_io_StringIO_line_buffering_get, NULL, _io_StringIO_line_buffering_DOCSTR}, #endif static PyObject * @@ -519,11 +529,16 @@ _io_StringIO_line_buffering_get(stringio *self, void *Py_UNUSED(context)) return return_value; } +#if defined(_io_StringIO_newlines_HAS_DOCSTR) +# define _io_StringIO_newlines_DOCSTR _io_StringIO_newlines__doc__ +#else +# define _io_StringIO_newlines_DOCSTR NULL +#endif #if defined(_IO_STRINGIO_NEWLINES_GETSETDEF) # undef _IO_STRINGIO_NEWLINES_GETSETDEF -# define _IO_STRINGIO_NEWLINES_GETSETDEF {"newlines", (getter)_io_StringIO_newlines_get, (setter)_io_StringIO_newlines_set, NULL}, +# define _IO_STRINGIO_NEWLINES_GETSETDEF {"newlines", (getter)_io_StringIO_newlines_get, (setter)_io_StringIO_newlines_set, _io_StringIO_newlines_DOCSTR}, #else -# define _IO_STRINGIO_NEWLINES_GETSETDEF {"newlines", (getter)_io_StringIO_newlines_get, NULL, NULL}, +# define _IO_STRINGIO_NEWLINES_GETSETDEF {"newlines", (getter)_io_StringIO_newlines_get, NULL, _io_StringIO_newlines_DOCSTR}, #endif static PyObject * @@ -540,4 +555,4 @@ _io_StringIO_newlines_get(stringio *self, void *Py_UNUSED(context)) return return_value; } -/*[clinic end generated code: output=27726751d98ab617 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=9ffea20cd32d4cd8 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/textio.c.h b/Modules/_io/clinic/textio.c.h index f24f65f0c1d4f9..23b3cc8d71e098 100644 --- a/Modules/_io/clinic/textio.c.h +++ b/Modules/_io/clinic/textio.c.h @@ -201,6 +201,89 @@ _io__TextIOBase_write(PyObject *self, PyTypeObject *cls, PyObject *const *args, return return_value; } +PyDoc_STRVAR(_io__TextIOBase_encoding__doc__, +"Encoding of the text stream.\n" +"\n" +"Subclasses should override."); +#define _io__TextIOBase_encoding_HAS_DOCSTR + +#if defined(_io__TextIOBase_encoding_HAS_DOCSTR) +# define _io__TextIOBase_encoding_DOCSTR _io__TextIOBase_encoding__doc__ +#else +# define _io__TextIOBase_encoding_DOCSTR NULL +#endif +#if defined(_IO__TEXTIOBASE_ENCODING_GETSETDEF) +# undef _IO__TEXTIOBASE_ENCODING_GETSETDEF +# define _IO__TEXTIOBASE_ENCODING_GETSETDEF {"encoding", (getter)_io__TextIOBase_encoding_get, (setter)_io__TextIOBase_encoding_set, _io__TextIOBase_encoding_DOCSTR}, +#else +# define _IO__TEXTIOBASE_ENCODING_GETSETDEF {"encoding", (getter)_io__TextIOBase_encoding_get, NULL, _io__TextIOBase_encoding_DOCSTR}, +#endif + +static PyObject * +_io__TextIOBase_encoding_get_impl(PyObject *self); + +static PyObject * +_io__TextIOBase_encoding_get(PyObject *self, void *Py_UNUSED(context)) +{ + return _io__TextIOBase_encoding_get_impl(self); +} + +PyDoc_STRVAR(_io__TextIOBase_newlines__doc__, +"Line endings translated so far.\n" +"\n" +"Only line endings translated during reading are considered.\n" +"\n" +"Subclasses should override."); +#define _io__TextIOBase_newlines_HAS_DOCSTR + +#if defined(_io__TextIOBase_newlines_HAS_DOCSTR) +# define _io__TextIOBase_newlines_DOCSTR _io__TextIOBase_newlines__doc__ +#else +# define _io__TextIOBase_newlines_DOCSTR NULL +#endif +#if defined(_IO__TEXTIOBASE_NEWLINES_GETSETDEF) +# undef _IO__TEXTIOBASE_NEWLINES_GETSETDEF +# define _IO__TEXTIOBASE_NEWLINES_GETSETDEF {"newlines", (getter)_io__TextIOBase_newlines_get, (setter)_io__TextIOBase_newlines_set, _io__TextIOBase_newlines_DOCSTR}, +#else +# define _IO__TEXTIOBASE_NEWLINES_GETSETDEF {"newlines", (getter)_io__TextIOBase_newlines_get, NULL, _io__TextIOBase_newlines_DOCSTR}, +#endif + +static PyObject * +_io__TextIOBase_newlines_get_impl(PyObject *self); + +static PyObject * +_io__TextIOBase_newlines_get(PyObject *self, void *Py_UNUSED(context)) +{ + return _io__TextIOBase_newlines_get_impl(self); +} + +PyDoc_STRVAR(_io__TextIOBase_errors__doc__, +"The error setting of the decoder or encoder.\n" +"\n" +"Subclasses should override."); +#define _io__TextIOBase_errors_HAS_DOCSTR + +#if defined(_io__TextIOBase_errors_HAS_DOCSTR) +# define _io__TextIOBase_errors_DOCSTR _io__TextIOBase_errors__doc__ +#else +# define _io__TextIOBase_errors_DOCSTR NULL +#endif +#if defined(_IO__TEXTIOBASE_ERRORS_GETSETDEF) +# undef _IO__TEXTIOBASE_ERRORS_GETSETDEF +# define _IO__TEXTIOBASE_ERRORS_GETSETDEF {"errors", (getter)_io__TextIOBase_errors_get, (setter)_io__TextIOBase_errors_set, _io__TextIOBase_errors_DOCSTR}, +#else +# define _IO__TEXTIOBASE_ERRORS_GETSETDEF {"errors", (getter)_io__TextIOBase_errors_get, NULL, _io__TextIOBase_errors_DOCSTR}, +#endif + +static PyObject * +_io__TextIOBase_errors_get_impl(PyObject *self); + +static PyObject * +_io__TextIOBase_errors_get(PyObject *self, void *Py_UNUSED(context)) +{ + return _io__TextIOBase_errors_get_impl(self); +} + PyDoc_STRVAR(_io_IncrementalNewlineDecoder___init____doc__, "IncrementalNewlineDecoder(decoder, translate, errors=\'strict\')\n" "--\n" @@ -1048,11 +1131,16 @@ _io_TextIOWrapper_close(textio *self, PyObject *Py_UNUSED(ignored)) return return_value; } +#if defined(_io_TextIOWrapper_name_HAS_DOCSTR) +# define _io_TextIOWrapper_name_DOCSTR _io_TextIOWrapper_name__doc__ +#else +# define _io_TextIOWrapper_name_DOCSTR NULL +#endif #if defined(_IO_TEXTIOWRAPPER_NAME_GETSETDEF) # undef _IO_TEXTIOWRAPPER_NAME_GETSETDEF -# define _IO_TEXTIOWRAPPER_NAME_GETSETDEF {"name", (getter)_io_TextIOWrapper_name_get, (setter)_io_TextIOWrapper_name_set, NULL}, +# define _IO_TEXTIOWRAPPER_NAME_GETSETDEF {"name", (getter)_io_TextIOWrapper_name_get, (setter)_io_TextIOWrapper_name_set, _io_TextIOWrapper_name_DOCSTR}, #else -# define _IO_TEXTIOWRAPPER_NAME_GETSETDEF {"name", (getter)_io_TextIOWrapper_name_get, NULL, NULL}, +# define _IO_TEXTIOWRAPPER_NAME_GETSETDEF {"name", (getter)_io_TextIOWrapper_name_get, NULL, _io_TextIOWrapper_name_DOCSTR}, #endif static PyObject * @@ -1070,11 +1158,16 @@ _io_TextIOWrapper_name_get(textio *self, void *Py_UNUSED(context)) return return_value; } +#if defined(_io_TextIOWrapper_closed_HAS_DOCSTR) +# define _io_TextIOWrapper_closed_DOCSTR _io_TextIOWrapper_closed__doc__ +#else +# define _io_TextIOWrapper_closed_DOCSTR NULL +#endif #if defined(_IO_TEXTIOWRAPPER_CLOSED_GETSETDEF) # undef _IO_TEXTIOWRAPPER_CLOSED_GETSETDEF -# define _IO_TEXTIOWRAPPER_CLOSED_GETSETDEF {"closed", (getter)_io_TextIOWrapper_closed_get, (setter)_io_TextIOWrapper_closed_set, NULL}, +# define _IO_TEXTIOWRAPPER_CLOSED_GETSETDEF {"closed", (getter)_io_TextIOWrapper_closed_get, (setter)_io_TextIOWrapper_closed_set, _io_TextIOWrapper_closed_DOCSTR}, #else -# define _IO_TEXTIOWRAPPER_CLOSED_GETSETDEF {"closed", (getter)_io_TextIOWrapper_closed_get, NULL, NULL}, +# define _IO_TEXTIOWRAPPER_CLOSED_GETSETDEF {"closed", (getter)_io_TextIOWrapper_closed_get, NULL, _io_TextIOWrapper_closed_DOCSTR}, #endif static PyObject * @@ -1092,11 +1185,16 @@ _io_TextIOWrapper_closed_get(textio *self, void *Py_UNUSED(context)) return return_value; } +#if defined(_io_TextIOWrapper_newlines_HAS_DOCSTR) +# define _io_TextIOWrapper_newlines_DOCSTR _io_TextIOWrapper_newlines__doc__ +#else +# define _io_TextIOWrapper_newlines_DOCSTR NULL +#endif #if defined(_IO_TEXTIOWRAPPER_NEWLINES_GETSETDEF) # undef _IO_TEXTIOWRAPPER_NEWLINES_GETSETDEF -# define _IO_TEXTIOWRAPPER_NEWLINES_GETSETDEF {"newlines", (getter)_io_TextIOWrapper_newlines_get, (setter)_io_TextIOWrapper_newlines_set, NULL}, +# define _IO_TEXTIOWRAPPER_NEWLINES_GETSETDEF {"newlines", (getter)_io_TextIOWrapper_newlines_get, (setter)_io_TextIOWrapper_newlines_set, _io_TextIOWrapper_newlines_DOCSTR}, #else -# define _IO_TEXTIOWRAPPER_NEWLINES_GETSETDEF {"newlines", (getter)_io_TextIOWrapper_newlines_get, NULL, NULL}, +# define _IO_TEXTIOWRAPPER_NEWLINES_GETSETDEF {"newlines", (getter)_io_TextIOWrapper_newlines_get, NULL, _io_TextIOWrapper_newlines_DOCSTR}, #endif static PyObject * @@ -1114,11 +1212,16 @@ _io_TextIOWrapper_newlines_get(textio *self, void *Py_UNUSED(context)) return return_value; } +#if defined(_io_TextIOWrapper_errors_HAS_DOCSTR) +# define _io_TextIOWrapper_errors_DOCSTR _io_TextIOWrapper_errors__doc__ +#else +# define _io_TextIOWrapper_errors_DOCSTR NULL +#endif #if defined(_IO_TEXTIOWRAPPER_ERRORS_GETSETDEF) # undef _IO_TEXTIOWRAPPER_ERRORS_GETSETDEF -# define _IO_TEXTIOWRAPPER_ERRORS_GETSETDEF {"errors", (getter)_io_TextIOWrapper_errors_get, (setter)_io_TextIOWrapper_errors_set, NULL}, +# define _IO_TEXTIOWRAPPER_ERRORS_GETSETDEF {"errors", (getter)_io_TextIOWrapper_errors_get, (setter)_io_TextIOWrapper_errors_set, _io_TextIOWrapper_errors_DOCSTR}, #else -# define _IO_TEXTIOWRAPPER_ERRORS_GETSETDEF {"errors", (getter)_io_TextIOWrapper_errors_get, NULL, NULL}, +# define _IO_TEXTIOWRAPPER_ERRORS_GETSETDEF {"errors", (getter)_io_TextIOWrapper_errors_get, NULL, _io_TextIOWrapper_errors_DOCSTR}, #endif static PyObject * @@ -1136,11 +1239,16 @@ _io_TextIOWrapper_errors_get(textio *self, void *Py_UNUSED(context)) return return_value; } +#if defined(_io_TextIOWrapper__CHUNK_SIZE_HAS_DOCSTR) +# define _io_TextIOWrapper__CHUNK_SIZE_DOCSTR _io_TextIOWrapper__CHUNK_SIZE__doc__ +#else +# define _io_TextIOWrapper__CHUNK_SIZE_DOCSTR NULL +#endif #if defined(_IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF) # undef _IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF -# define _IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF {"_CHUNK_SIZE", (getter)_io_TextIOWrapper__CHUNK_SIZE_get, (setter)_io_TextIOWrapper__CHUNK_SIZE_set, NULL}, +# define _IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF {"_CHUNK_SIZE", (getter)_io_TextIOWrapper__CHUNK_SIZE_get, (setter)_io_TextIOWrapper__CHUNK_SIZE_set, _io_TextIOWrapper__CHUNK_SIZE_DOCSTR}, #else -# define _IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF {"_CHUNK_SIZE", (getter)_io_TextIOWrapper__CHUNK_SIZE_get, NULL, NULL}, +# define _IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF {"_CHUNK_SIZE", (getter)_io_TextIOWrapper__CHUNK_SIZE_get, NULL, _io_TextIOWrapper__CHUNK_SIZE_DOCSTR}, #endif static PyObject * @@ -1158,9 +1266,14 @@ _io_TextIOWrapper__CHUNK_SIZE_get(textio *self, void *Py_UNUSED(context)) return return_value; } +#if defined(_IO_TEXTIOWRAPPER__CHUNK_SIZE_HAS_DOCSTR) +# define _io_TextIOWrapper__CHUNK_SIZE_DOCSTR _io_TextIOWrapper__CHUNK_SIZE__doc__ +#else +# define _io_TextIOWrapper__CHUNK_SIZE_DOCSTR NULL +#endif #if defined(_IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF) # undef _IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF -# define _IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF {"_CHUNK_SIZE", (getter)_io_TextIOWrapper__CHUNK_SIZE_get, (setter)_io_TextIOWrapper__CHUNK_SIZE_set, NULL}, +# define _IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF {"_CHUNK_SIZE", (getter)_io_TextIOWrapper__CHUNK_SIZE_get, (setter)_io_TextIOWrapper__CHUNK_SIZE_set, _io_TextIOWrapper__CHUNK_SIZE_DOCSTR}, #else # define _IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF {"_CHUNK_SIZE", NULL, (setter)_io_TextIOWrapper__CHUNK_SIZE_set, NULL}, #endif @@ -1179,4 +1292,4 @@ _io_TextIOWrapper__CHUNK_SIZE_set(textio *self, PyObject *value, void *Py_UNUSED return return_value; } -/*[clinic end generated code: output=7af87bf848a5d3f3 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d01aa598647c1385 input=a9049054013a1b77]*/ diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c index 702336ca2aeb06..4507930c14bb50 100644 --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -131,40 +131,52 @@ _io__TextIOBase_write_impl(PyObject *self, PyTypeObject *cls, return _unsupported(state, "write"); } -PyDoc_STRVAR(textiobase_encoding_doc, - "Encoding of the text stream.\n" - "\n" - "Subclasses should override.\n" - ); +/*[clinic input] +@getter +_io._TextIOBase.encoding + +Encoding of the text stream. + +Subclasses should override. +[clinic start generated code]*/ static PyObject * -textiobase_encoding_get(PyObject *self, void *context) +_io__TextIOBase_encoding_get_impl(PyObject *self) +/*[clinic end generated code: output=e0f5d8f548b92432 input=4736d7621dd38f43]*/ { Py_RETURN_NONE; } -PyDoc_STRVAR(textiobase_newlines_doc, - "Line endings translated so far.\n" - "\n" - "Only line endings translated during reading are considered.\n" - "\n" - "Subclasses should override.\n" - ); +/*[clinic input] +@getter +_io._TextIOBase.newlines + +Line endings translated so far. + +Only line endings translated during reading are considered. + +Subclasses should override. +[clinic start generated code]*/ static PyObject * -textiobase_newlines_get(PyObject *self, void *context) +_io__TextIOBase_newlines_get_impl(PyObject *self) +/*[clinic end generated code: output=46ec147fb9f00c2a input=a5b196d076af1164]*/ { Py_RETURN_NONE; } -PyDoc_STRVAR(textiobase_errors_doc, - "The error setting of the decoder or encoder.\n" - "\n" - "Subclasses should override.\n" - ); +/*[clinic input] +@getter +_io._TextIOBase.errors + +The error setting of the decoder or encoder. + +Subclasses should override. +[clinic start generated code]*/ static PyObject * -textiobase_errors_get(PyObject *self, void *context) +_io__TextIOBase_errors_get_impl(PyObject *self) +/*[clinic end generated code: output=c6623d6addcd087d input=974aa52d1db93a82]*/ { Py_RETURN_NONE; } @@ -179,9 +191,9 @@ static PyMethodDef textiobase_methods[] = { }; static PyGetSetDef textiobase_getset[] = { - {"encoding", (getter)textiobase_encoding_get, NULL, textiobase_encoding_doc}, - {"newlines", (getter)textiobase_newlines_get, NULL, textiobase_newlines_doc}, - {"errors", (getter)textiobase_errors_get, NULL, textiobase_errors_doc}, + _IO__TEXTIOBASE_ENCODING_GETSETDEF + _IO__TEXTIOBASE_NEWLINES_GETSETDEF + _IO__TEXTIOBASE_ERRORS_GETSETDEF {NULL} }; diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index f36872a1eb7a0f..b7639af4b78a9d 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -601,7 +601,7 @@ os_chmod(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw return return_value; } -#if defined(HAVE_FCHMOD) +#if (defined(HAVE_FCHMOD) || defined(MS_WINDOWS)) PyDoc_STRVAR(os_fchmod__doc__, "fchmod($module, /, fd, mode)\n" @@ -676,7 +676,7 @@ os_fchmod(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k return return_value; } -#endif /* defined(HAVE_FCHMOD) */ +#endif /* (defined(HAVE_FCHMOD) || defined(MS_WINDOWS)) */ #if (defined(HAVE_LCHMOD) || defined(MS_WINDOWS)) @@ -12422,4 +12422,4 @@ os__supports_virtual_terminal(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #define OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #endif /* !defined(OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF) */ -/*[clinic end generated code: output=1be15e60a553b40d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=b82391c4f58231b6 input=a9049054013a1b77]*/ diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index c7ee591f30c51f..c635fd4d993d57 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -2855,6 +2855,8 @@ FTRUNCATE #ifdef MS_WINDOWS #undef PATH_HAVE_FTRUNCATE #define PATH_HAVE_FTRUNCATE 1 + #undef PATH_HAVE_FCHMOD + #define PATH_HAVE_FCHMOD 1 #endif /*[python input] @@ -3332,7 +3334,38 @@ win32_lchmod(LPCWSTR path, int mode) } return SetFileAttributesW(path, attr); } -#endif + +static int +win32_hchmod(HANDLE hfile, int mode) +{ + FILE_BASIC_INFO info; + if (!GetFileInformationByHandleEx(hfile, FileBasicInfo, + &info, sizeof(info))) + { + return 0; + } + if (mode & _S_IWRITE) { + info.FileAttributes &= ~FILE_ATTRIBUTE_READONLY; + } + else { + info.FileAttributes |= FILE_ATTRIBUTE_READONLY; + } + return SetFileInformationByHandle(hfile, FileBasicInfo, + &info, sizeof(info)); +} + +static int +win32_fchmod(int fd, int mode) +{ + HANDLE hfile = _Py_get_osfhandle_noraise(fd); + if (hfile == INVALID_HANDLE_VALUE) { + SetLastError(ERROR_INVALID_HANDLE); + return 0; + } + return win32_hchmod(hfile, mode); +} + +#endif /* MS_WINDOWS */ /*[clinic input] os.chmod @@ -3395,27 +3428,16 @@ os_chmod_impl(PyObject *module, path_t *path, int mode, int dir_fd, #ifdef MS_WINDOWS result = 0; Py_BEGIN_ALLOW_THREADS - if (follow_symlinks) { - HANDLE hfile; - FILE_BASIC_INFO info; - - hfile = CreateFileW(path->wide, - FILE_READ_ATTRIBUTES|FILE_WRITE_ATTRIBUTES, - 0, NULL, - OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); + if (path->fd != -1) { + result = win32_fchmod(path->fd, mode); + } + else if (follow_symlinks) { + HANDLE hfile = CreateFileW(path->wide, + FILE_READ_ATTRIBUTES|FILE_WRITE_ATTRIBUTES, + 0, NULL, + OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); if (hfile != INVALID_HANDLE_VALUE) { - if (GetFileInformationByHandleEx(hfile, FileBasicInfo, - &info, sizeof(info))) - { - if (mode & _S_IWRITE) { - info.FileAttributes &= ~FILE_ATTRIBUTE_READONLY; - } - else { - info.FileAttributes |= FILE_ATTRIBUTE_READONLY; - } - result = SetFileInformationByHandle(hfile, FileBasicInfo, - &info, sizeof(info)); - } + result = win32_hchmod(hfile, mode); (void)CloseHandle(hfile); } } @@ -3511,7 +3533,7 @@ os_chmod_impl(PyObject *module, path_t *path, int mode, int dir_fd, } -#ifdef HAVE_FCHMOD +#if defined(HAVE_FCHMOD) || defined(MS_WINDOWS) /*[clinic input] os.fchmod @@ -3533,12 +3555,21 @@ os_fchmod_impl(PyObject *module, int fd, int mode) /*[clinic end generated code: output=afd9bc05b4e426b3 input=b5594618bbbc22df]*/ { int res; - int async_err = 0; if (PySys_Audit("os.chmod", "iii", fd, mode, -1) < 0) { return NULL; } +#ifdef MS_WINDOWS + res = 0; + Py_BEGIN_ALLOW_THREADS + res = win32_fchmod(fd, mode); + Py_END_ALLOW_THREADS + if (!res) { + return PyErr_SetFromWindowsErr(0); + } +#else /* MS_WINDOWS */ + int async_err = 0; do { Py_BEGIN_ALLOW_THREADS res = fchmod(fd, mode); @@ -3546,10 +3577,11 @@ os_fchmod_impl(PyObject *module, int fd, int mode) } while (res != 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); if (res != 0) return (!async_err) ? posix_error() : NULL; +#endif /* MS_WINDOWS */ Py_RETURN_NONE; } -#endif /* HAVE_FCHMOD */ +#endif /* HAVE_FCHMOD || MS_WINDOWS */ #if defined(HAVE_LCHMOD) || defined(MS_WINDOWS) diff --git a/Objects/classobject.c b/Objects/classobject.c index 618d88894debbe..d7e520f556d9a0 100644 --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -319,6 +319,13 @@ method_traverse(PyMethodObject *im, visitproc visit, void *arg) return 0; } +static PyObject * +method_descr_get(PyObject *meth, PyObject *obj, PyObject *cls) +{ + Py_INCREF(meth); + return meth; +} + PyTypeObject PyMethod_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) .tp_name = "method", @@ -339,6 +346,7 @@ PyTypeObject PyMethod_Type = { .tp_methods = method_methods, .tp_members = method_memberlist, .tp_getset = method_getset, + .tp_descr_get = method_descr_get, .tp_new = method_new, }; diff --git a/Objects/clinic/unicodeobject.c.h b/Objects/clinic/unicodeobject.c.h index 7711434f17c2bc..3e5167d9242fe4 100644 --- a/Objects/clinic/unicodeobject.c.h +++ b/Objects/clinic/unicodeobject.c.h @@ -954,9 +954,11 @@ PyDoc_STRVAR(unicode_split__doc__, " character (including \\n \\r \\t \\f and spaces) and will discard\n" " empty strings from the result.\n" " maxsplit\n" -" Maximum number of splits (starting from the left).\n" +" Maximum number of splits.\n" " -1 (the default value) means no limit.\n" "\n" +"Splitting starts at the front of the string and works to the end.\n" +"\n" "Note, str.split() is mainly useful for data that has been intentionally\n" "delimited. With natural text that includes punctuation, consider using\n" "the regular expression module."); @@ -1078,7 +1080,7 @@ PyDoc_STRVAR(unicode_rsplit__doc__, " character (including \\n \\r \\t \\f and spaces) and will discard\n" " empty strings from the result.\n" " maxsplit\n" -" Maximum number of splits (starting from the left).\n" +" Maximum number of splits.\n" " -1 (the default value) means no limit.\n" "\n" "Splitting starts at the end of the string and works to the front."); @@ -1505,4 +1507,4 @@ unicode_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=873d8b3d09af3095 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1aab29bab5201c78 input=a9049054013a1b77]*/ diff --git a/Objects/frameobject.c b/Objects/frameobject.c index be330a775872c2..cafe4ef6141d9a 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -811,7 +811,7 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore PyObject *exc = _PyFrame_StackPop(f->f_frame); assert(PyExceptionInstance_Check(exc) || exc == Py_None); PyThreadState *tstate = _PyThreadState_GET(); - Py_XSETREF(tstate->exc_info->exc_value, exc); + Py_XSETREF(tstate->exc_info->exc_value, exc == Py_None ? NULL : exc); } else { PyObject *v = _PyFrame_StackPop(f->f_frame); diff --git a/Objects/mimalloc/prim/unix/prim.c b/Objects/mimalloc/prim/unix/prim.c index cffbb2d0b4d7b2..2152017e01fb43 100644 --- a/Objects/mimalloc/prim/unix/prim.c +++ b/Objects/mimalloc/prim/unix/prim.c @@ -170,7 +170,7 @@ static void* unix_mmap_prim(void* addr, size_t size, size_t try_alignment, int p p = mmap(addr, size, protect_flags, flags | MAP_ALIGNED(n), fd, 0); if (p==MAP_FAILED || !_mi_is_aligned(p,try_alignment)) { int err = errno; - _mi_warning_message("unable to directly request aligned OS memory (error: %d (0x%x), size: 0x%zx bytes, alignment: 0x%zx, hint address: %p)\n", err, err, size, try_alignment, addr); + _mi_verbose_message("unable to directly request aligned OS memory (error: %d (0x%x), size: 0x%zx bytes, alignment: 0x%zx, hint address: %p)\n", err, err, size, try_alignment, addr); } if (p!=MAP_FAILED) return p; // fall back to regular mmap @@ -195,7 +195,7 @@ static void* unix_mmap_prim(void* addr, size_t size, size_t try_alignment, int p #else int err = errno; #endif - _mi_warning_message("unable to directly request hinted aligned OS memory (error: %d (0x%x), size: 0x%zx bytes, alignment: 0x%zx, hint address: %p)\n", err, err, size, try_alignment, hint); + _mi_verbose_message("unable to directly request hinted aligned OS memory (error: %d (0x%x), size: 0x%zx bytes, alignment: 0x%zx, hint address: %p)\n", err, err, size, try_alignment, hint); } if (p!=MAP_FAILED) return p; // fall back to regular mmap diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index e2741fef6debd3..3a1c516658dce7 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -788,7 +788,7 @@ PyObject* _Py_module_getattro_impl(PyModuleObject *m, PyObject *name, int suppress) { // When suppress=1, this function suppresses AttributeError. - PyObject *attr, *mod_name, *getattr; + PyObject *attr, *mod_name, *getattr, *origin; attr = _PyObject_GenericGetAttrWithDict((PyObject *)m, name, NULL, suppress); if (attr) { return attr; @@ -831,11 +831,31 @@ _Py_module_getattro_impl(PyModuleObject *m, PyObject *name, int suppress) if (suppress != 1) { int rc = _PyModuleSpec_IsInitializing(spec); if (rc > 0) { - PyErr_Format(PyExc_AttributeError, + int valid_spec = PyObject_GetOptionalAttr(spec, &_Py_ID(origin), &origin); + if (valid_spec == -1) { + Py_XDECREF(spec); + Py_DECREF(mod_name); + return NULL; + } + if (valid_spec == 1 && !PyUnicode_Check(origin)) { + valid_spec = 0; + Py_DECREF(origin); + } + if (valid_spec == 1) { + PyErr_Format(PyExc_AttributeError, + "partially initialized " + "module '%U' from '%U' has no attribute '%U' " + "(most likely due to a circular import)", + mod_name, origin, name); + Py_DECREF(origin); + } + else { + PyErr_Format(PyExc_AttributeError, "partially initialized " "module '%U' has no attribute '%U' " "(most likely due to a circular import)", mod_name, name); + } } else if (rc == 0) { rc = _PyModuleSpec_IsUninitializedSubmodule(spec, name); diff --git a/Objects/object.c b/Objects/object.c index cdb7a08a7828fb..d970a26756173b 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -1196,7 +1196,7 @@ PyObject_GetOptionalAttr(PyObject *v, PyObject *name, PyObject **result) } return 0; } - if (tp->tp_getattro == (getattrofunc)_Py_type_getattro) { + if (tp->tp_getattro == _Py_type_getattro) { int supress_missing_attribute_exception = 0; *result = _Py_type_getattro_impl((PyTypeObject*)v, name, &supress_missing_attribute_exception); if (supress_missing_attribute_exception) { diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 08f5f47d586729..ea29a38d74ae3e 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -1597,8 +1597,9 @@ static PyGetSetDef type_getsets[] = { }; static PyObject * -type_repr(PyTypeObject *type) +type_repr(PyObject *self) { + PyTypeObject *type = (PyTypeObject *)self; if (type->tp_name == NULL) { // type_repr() called before the type is fully initialized // by PyType_Ready(). @@ -1630,8 +1631,9 @@ type_repr(PyTypeObject *type) } static PyObject * -type_call(PyTypeObject *type, PyObject *args, PyObject *kwds) +type_call(PyObject *self, PyObject *args, PyObject *kwds) { + PyTypeObject *type = (PyTypeObject *)self; PyObject *obj; PyThreadState *tstate = _PyThreadState_GET(); @@ -4917,14 +4919,15 @@ _Py_type_getattro_impl(PyTypeObject *type, PyObject *name, int * suppress_missin /* This is similar to PyObject_GenericGetAttr(), but uses _PyType_Lookup() instead of just looking in type->tp_dict. */ PyObject * -_Py_type_getattro(PyTypeObject *type, PyObject *name) +_Py_type_getattro(PyObject *type, PyObject *name) { - return _Py_type_getattro_impl(type, name, NULL); + return _Py_type_getattro_impl((PyTypeObject *)type, name, NULL); } static int -type_setattro(PyTypeObject *type, PyObject *name, PyObject *value) +type_setattro(PyObject *self, PyObject *name, PyObject *value) { + PyTypeObject *type = (PyTypeObject *)self; int res; if (type->tp_flags & Py_TPFLAGS_IMMUTABLETYPE) { PyErr_Format( @@ -5069,8 +5072,10 @@ _PyStaticType_Dealloc(PyInterpreterState *interp, PyTypeObject *type) static void -type_dealloc(PyTypeObject *type) +type_dealloc(PyObject *self) { + PyTypeObject *type = (PyTypeObject *)self; + // Assert this is a heap-allocated type object _PyObject_ASSERT((PyObject *)type, type->tp_flags & Py_TPFLAGS_HEAPTYPE); @@ -5257,8 +5262,10 @@ PyDoc_STRVAR(type_doc, "type(name, bases, dict, **kwds) -> a new type"); static int -type_traverse(PyTypeObject *type, visitproc visit, void *arg) +type_traverse(PyObject *self, visitproc visit, void *arg) { + PyTypeObject *type = (PyTypeObject *)self; + /* Because of type_is_gc(), the collector only calls this for heaptypes. */ if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { @@ -5286,8 +5293,10 @@ type_traverse(PyTypeObject *type, visitproc visit, void *arg) } static int -type_clear(PyTypeObject *type) +type_clear(PyObject *self) { + PyTypeObject *type = (PyTypeObject *)self; + /* Because of type_is_gc(), the collector only calls this for heaptypes. */ _PyObject_ASSERT((PyObject *)type, type->tp_flags & Py_TPFLAGS_HEAPTYPE); @@ -5334,9 +5343,9 @@ type_clear(PyTypeObject *type) } static int -type_is_gc(PyTypeObject *type) +type_is_gc(PyObject *type) { - return type->tp_flags & Py_TPFLAGS_HEAPTYPE; + return ((PyTypeObject *)type)->tp_flags & Py_TPFLAGS_HEAPTYPE; } @@ -5349,28 +5358,28 @@ PyTypeObject PyType_Type = { "type", /* tp_name */ sizeof(PyHeapTypeObject), /* tp_basicsize */ sizeof(PyMemberDef), /* tp_itemsize */ - (destructor)type_dealloc, /* tp_dealloc */ + type_dealloc, /* tp_dealloc */ offsetof(PyTypeObject, tp_vectorcall), /* tp_vectorcall_offset */ 0, /* tp_getattr */ 0, /* tp_setattr */ 0, /* tp_as_async */ - (reprfunc)type_repr, /* tp_repr */ + type_repr, /* tp_repr */ &type_as_number, /* tp_as_number */ 0, /* tp_as_sequence */ 0, /* tp_as_mapping */ 0, /* tp_hash */ - (ternaryfunc)type_call, /* tp_call */ + type_call, /* tp_call */ 0, /* tp_str */ - (getattrofunc)_Py_type_getattro, /* tp_getattro */ - (setattrofunc)type_setattro, /* tp_setattro */ + _Py_type_getattro, /* tp_getattro */ + type_setattro, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TYPE_SUBCLASS | Py_TPFLAGS_HAVE_VECTORCALL | Py_TPFLAGS_ITEMS_AT_END, /* tp_flags */ type_doc, /* tp_doc */ - (traverseproc)type_traverse, /* tp_traverse */ - (inquiry)type_clear, /* tp_clear */ + type_traverse, /* tp_traverse */ + type_clear, /* tp_clear */ 0, /* tp_richcompare */ offsetof(PyTypeObject, tp_weaklist), /* tp_weaklistoffset */ 0, /* tp_iter */ @@ -5387,7 +5396,7 @@ PyTypeObject PyType_Type = { 0, /* tp_alloc */ type_new, /* tp_new */ PyObject_GC_Del, /* tp_free */ - (inquiry)type_is_gc, /* tp_is_gc */ + type_is_gc, /* tp_is_gc */ .tp_vectorcall = type_vectorcall, }; @@ -6561,6 +6570,12 @@ PyDoc_STRVAR(object_doc, "When called, it accepts no arguments and returns a new featureless\n" "instance that has no instance attributes and cannot be given any.\n"); +static Py_hash_t +object_hash(PyObject *obj) +{ + return _Py_HashPointer(obj); +} + PyTypeObject PyBaseObject_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) "object", /* tp_name */ @@ -6575,7 +6590,7 @@ PyTypeObject PyBaseObject_Type = { 0, /* tp_as_number */ 0, /* tp_as_sequence */ 0, /* tp_as_mapping */ - (hashfunc)_Py_HashPointer, /* tp_hash */ + object_hash, /* tp_hash */ 0, /* tp_call */ object_str, /* tp_str */ PyObject_GenericGetAttr, /* tp_getattro */ @@ -10389,9 +10404,22 @@ supercheck(PyTypeObject *type, PyObject *obj) Py_XDECREF(class_attr); } - PyErr_SetString(PyExc_TypeError, - "super(type, obj): " - "obj must be an instance or subtype of type"); + const char *type_or_instance, *obj_str; + + if (PyType_Check(obj)) { + type_or_instance = "type"; + obj_str = ((PyTypeObject*)obj)->tp_name; + } + else { + type_or_instance = "instance of"; + obj_str = Py_TYPE(obj)->tp_name; + } + + PyErr_Format(PyExc_TypeError, + "super(type, obj): obj (%s %.200s) is not " + "an instance or subtype of type (%.200s).", + type_or_instance, obj_str, type->tp_name); + return NULL; } diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 836e14fd5d5dea..ad87206b2a8200 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -12504,11 +12504,13 @@ str.split as unicode_split character (including \n \r \t \f and spaces) and will discard empty strings from the result. maxsplit: Py_ssize_t = -1 - Maximum number of splits (starting from the left). + Maximum number of splits. -1 (the default value) means no limit. Return a list of the substrings in the string, using sep as the separator string. +Splitting starts at the front of the string and works to the end. + Note, str.split() is mainly useful for data that has been intentionally delimited. With natural text that includes punctuation, consider using the regular expression module. @@ -12517,7 +12519,7 @@ the regular expression module. static PyObject * unicode_split_impl(PyObject *self, PyObject *sep, Py_ssize_t maxsplit) -/*[clinic end generated code: output=3a65b1db356948dc input=07b9040d98c5fe8d]*/ +/*[clinic end generated code: output=3a65b1db356948dc input=a29bcc0c7a5af0eb]*/ { if (sep == Py_None) return split(self, NULL, maxsplit); diff --git a/Python/abstract_interp_cases.c.h b/Python/abstract_interp_cases.c.h deleted file mode 100644 index 96ac0aabd1b59f..00000000000000 --- a/Python/abstract_interp_cases.c.h +++ /dev/null @@ -1,967 +0,0 @@ -// This file is generated by Tools/cases_generator/generate_cases.py -// from: -// Python/bytecodes.c -// Do not edit! - - case NOP: { - break; - } - - case RESUME_CHECK: { - break; - } - - case POP_TOP: { - STACK_SHRINK(1); - break; - } - - case PUSH_NULL: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case END_SEND: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case UNARY_NEGATIVE: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case UNARY_NOT: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _TO_BOOL: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case TO_BOOL_BOOL: { - break; - } - - case TO_BOOL_INT: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case TO_BOOL_LIST: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case TO_BOOL_NONE: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case TO_BOOL_STR: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case TO_BOOL_ALWAYS_TRUE: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case UNARY_INVERT: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _GUARD_BOTH_INT: { - break; - } - - case _GUARD_BOTH_FLOAT: { - break; - } - - case _BINARY_OP_MULTIPLY_FLOAT: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _BINARY_OP_ADD_FLOAT: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _BINARY_OP_SUBTRACT_FLOAT: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _GUARD_BOTH_UNICODE: { - break; - } - - case _BINARY_OP_ADD_UNICODE: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _BINARY_SUBSCR: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case BINARY_SLICE: { - STACK_SHRINK(2); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case STORE_SLICE: { - STACK_SHRINK(4); - break; - } - - case BINARY_SUBSCR_LIST_INT: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case BINARY_SUBSCR_STR_INT: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case BINARY_SUBSCR_TUPLE_INT: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case BINARY_SUBSCR_DICT: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case LIST_APPEND: { - STACK_SHRINK(1); - break; - } - - case SET_ADD: { - STACK_SHRINK(1); - break; - } - - case _STORE_SUBSCR: { - STACK_SHRINK(3); - break; - } - - case STORE_SUBSCR_LIST_INT: { - STACK_SHRINK(3); - break; - } - - case STORE_SUBSCR_DICT: { - STACK_SHRINK(3); - break; - } - - case DELETE_SUBSCR: { - STACK_SHRINK(2); - break; - } - - case CALL_INTRINSIC_1: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_INTRINSIC_2: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _POP_FRAME: { - STACK_SHRINK(1); - break; - } - - case GET_AITER: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case GET_ANEXT: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case GET_AWAITABLE: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case POP_EXCEPT: { - STACK_SHRINK(1); - break; - } - - case LOAD_ASSERTION_ERROR: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case LOAD_BUILD_CLASS: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case STORE_NAME: { - STACK_SHRINK(1); - break; - } - - case DELETE_NAME: { - break; - } - - case _UNPACK_SEQUENCE: { - STACK_SHRINK(1); - STACK_GROW(oparg); - break; - } - - case UNPACK_SEQUENCE_TWO_TUPLE: { - STACK_SHRINK(1); - STACK_GROW(oparg); - break; - } - - case UNPACK_SEQUENCE_TUPLE: { - STACK_SHRINK(1); - STACK_GROW(oparg); - break; - } - - case UNPACK_SEQUENCE_LIST: { - STACK_SHRINK(1); - STACK_GROW(oparg); - break; - } - - case UNPACK_EX: { - STACK_GROW((oparg & 0xFF) + (oparg >> 8)); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg >> 8))), true); - break; - } - - case _STORE_ATTR: { - STACK_SHRINK(2); - break; - } - - case DELETE_ATTR: { - STACK_SHRINK(1); - break; - } - - case STORE_GLOBAL: { - STACK_SHRINK(1); - break; - } - - case DELETE_GLOBAL: { - break; - } - - case LOAD_LOCALS: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case LOAD_FROM_DICT_OR_GLOBALS: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case LOAD_NAME: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _LOAD_GLOBAL: { - STACK_GROW(1); - STACK_GROW(((oparg & 1) ? 1 : 0)); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-(oparg & 1 ? 1 : 0))), true); - break; - } - - case _GUARD_GLOBALS_VERSION: { - break; - } - - case _GUARD_BUILTINS_VERSION: { - break; - } - - case _LOAD_GLOBAL_MODULE: { - STACK_GROW(1); - STACK_GROW(((oparg & 1) ? 1 : 0)); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-(oparg & 1 ? 1 : 0))), true); - break; - } - - case _LOAD_GLOBAL_BUILTINS: { - STACK_GROW(1); - STACK_GROW(((oparg & 1) ? 1 : 0)); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-(oparg & 1 ? 1 : 0))), true); - break; - } - - case DELETE_FAST: { - break; - } - - case MAKE_CELL: { - break; - } - - case DELETE_DEREF: { - break; - } - - case LOAD_FROM_DICT_OR_DEREF: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case LOAD_DEREF: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case STORE_DEREF: { - STACK_SHRINK(1); - break; - } - - case COPY_FREE_VARS: { - break; - } - - case BUILD_STRING: { - STACK_SHRINK(oparg); - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case BUILD_TUPLE: { - STACK_SHRINK(oparg); - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case BUILD_LIST: { - STACK_SHRINK(oparg); - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case LIST_EXTEND: { - STACK_SHRINK(1); - break; - } - - case SET_UPDATE: { - STACK_SHRINK(1); - break; - } - - case BUILD_SET: { - STACK_SHRINK(oparg); - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case BUILD_MAP: { - STACK_SHRINK(oparg*2); - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case SETUP_ANNOTATIONS: { - break; - } - - case BUILD_CONST_KEY_MAP: { - STACK_SHRINK(oparg); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case DICT_UPDATE: { - STACK_SHRINK(1); - break; - } - - case DICT_MERGE: { - STACK_SHRINK(1); - break; - } - - case MAP_ADD: { - STACK_SHRINK(2); - break; - } - - case LOAD_SUPER_ATTR_ATTR: { - STACK_SHRINK(2); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(0)), true); - break; - } - - case LOAD_SUPER_ATTR_METHOD: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _LOAD_ATTR: { - STACK_GROW(((oparg & 1) ? 1 : 0)); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-(oparg & 1 ? 1 : 0))), true); - break; - } - - case _GUARD_TYPE_VERSION: { - break; - } - - case _CHECK_MANAGED_OBJECT_HAS_VALUES: { - break; - } - - case _LOAD_ATTR_INSTANCE_VALUE: { - STACK_GROW(((oparg & 1) ? 1 : 0)); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-(oparg & 1 ? 1 : 0))), true); - break; - } - - case _CHECK_ATTR_MODULE: { - break; - } - - case _LOAD_ATTR_MODULE: { - STACK_GROW(((oparg & 1) ? 1 : 0)); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-(oparg & 1 ? 1 : 0))), true); - break; - } - - case _CHECK_ATTR_WITH_HINT: { - break; - } - - case _LOAD_ATTR_WITH_HINT: { - STACK_GROW(((oparg & 1) ? 1 : 0)); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-(oparg & 1 ? 1 : 0))), true); - break; - } - - case _LOAD_ATTR_SLOT: { - STACK_GROW(((oparg & 1) ? 1 : 0)); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-(oparg & 1 ? 1 : 0))), true); - break; - } - - case _CHECK_ATTR_CLASS: { - break; - } - - case _LOAD_ATTR_CLASS: { - STACK_GROW(((oparg & 1) ? 1 : 0)); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-(oparg & 1 ? 1 : 0))), true); - break; - } - - case _GUARD_DORV_VALUES: { - break; - } - - case _STORE_ATTR_INSTANCE_VALUE: { - STACK_SHRINK(2); - break; - } - - case _STORE_ATTR_SLOT: { - STACK_SHRINK(2); - break; - } - - case _COMPARE_OP: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case COMPARE_OP_FLOAT: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case COMPARE_OP_INT: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case COMPARE_OP_STR: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case IS_OP: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CONTAINS_OP: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CHECK_EG_MATCH: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CHECK_EXC_MATCH: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _IS_NONE: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case GET_LEN: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case MATCH_CLASS: { - STACK_SHRINK(2); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case MATCH_MAPPING: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case MATCH_SEQUENCE: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case MATCH_KEYS: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case GET_ITER: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case GET_YIELD_FROM_ITER: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _FOR_ITER_TIER_TWO: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _ITER_CHECK_LIST: { - break; - } - - case _GUARD_NOT_EXHAUSTED_LIST: { - break; - } - - case _ITER_NEXT_LIST: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _ITER_CHECK_TUPLE: { - break; - } - - case _GUARD_NOT_EXHAUSTED_TUPLE: { - break; - } - - case _ITER_NEXT_TUPLE: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _ITER_CHECK_RANGE: { - break; - } - - case _GUARD_NOT_EXHAUSTED_RANGE: { - break; - } - - case _ITER_NEXT_RANGE: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case BEFORE_ASYNC_WITH: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case BEFORE_WITH: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case WITH_EXCEPT_START: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case PUSH_EXC_INFO: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: { - break; - } - - case _GUARD_KEYS_VERSION: { - break; - } - - case _LOAD_ATTR_METHOD_WITH_VALUES: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _LOAD_ATTR_METHOD_NO_DICT: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(0)), true); - break; - } - - case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(0)), true); - break; - } - - case _CHECK_ATTR_METHOD_LAZY_DICT: { - break; - } - - case _LOAD_ATTR_METHOD_LAZY_DICT: { - STACK_GROW(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: { - break; - } - - case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2 - oparg)), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - oparg)), true); - break; - } - - case _CHECK_PEP_523: { - break; - } - - case _CHECK_FUNCTION_EXACT_ARGS: { - break; - } - - case _CHECK_STACK_SPACE: { - break; - } - - case _INIT_CALL_PY_EXACT_ARGS: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _PUSH_FRAME: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(0)), true); - break; - } - - case CALL_TYPE_1: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_STR_1: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_TUPLE_1: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case EXIT_INIT_CHECK: { - STACK_SHRINK(1); - break; - } - - case CALL_BUILTIN_CLASS: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_BUILTIN_O: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_BUILTIN_FAST: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_BUILTIN_FAST_WITH_KEYWORDS: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_LEN: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_ISINSTANCE: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_METHOD_DESCRIPTOR_O: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_METHOD_DESCRIPTOR_NOARGS: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CALL_METHOD_DESCRIPTOR_FAST: { - STACK_SHRINK(oparg); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case MAKE_FUNCTION: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case SET_FUNCTION_ATTRIBUTE: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case BUILD_SLICE: { - STACK_SHRINK(((oparg == 3) ? 1 : 0)); - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case CONVERT_VALUE: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case FORMAT_SIMPLE: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case FORMAT_WITH_SPEC: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _BINARY_OP: { - STACK_SHRINK(1); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case SWAP: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2 - (oparg-2))), true); - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); - break; - } - - case _GUARD_IS_TRUE_POP: { - STACK_SHRINK(1); - break; - } - - case _GUARD_IS_FALSE_POP: { - STACK_SHRINK(1); - break; - } - - case _GUARD_IS_NONE_POP: { - STACK_SHRINK(1); - break; - } - - case _GUARD_IS_NOT_NONE_POP: { - STACK_SHRINK(1); - break; - } - - case _JUMP_TO_TOP: { - break; - } - - case _SET_IP: { - break; - } - - case _SAVE_RETURN_OFFSET: { - break; - } - - case _EXIT_TRACE: { - break; - } - - case _INSERT: { - PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - oparg)), true); - break; - } - - case _CHECK_VALIDITY: { - break; - } diff --git a/Python/assemble.c b/Python/assemble.c index b6fb432aed4a3b..569454ebf3b9cb 100644 --- a/Python/assemble.c +++ b/Python/assemble.c @@ -4,7 +4,7 @@ #include "pycore_code.h" // write_location_entry_start() #include "pycore_compile.h" #include "pycore_opcode_utils.h" // IS_BACKWARDS_JUMP_OPCODE -#include "pycore_opcode_metadata.h" // IS_PSEUDO_INSTR, _PyOpcode_Caches +#include "pycore_opcode_metadata.h" // is_pseudo_target, _PyOpcode_Caches #define DEFAULT_CODE_SIZE 128 @@ -710,13 +710,13 @@ resolve_unconditional_jumps(instr_sequence *instrs) bool is_forward = (instr->i_oparg > i); switch(instr->i_opcode) { case JUMP: - assert(SAME_OPCODE_METADATA(JUMP, JUMP_FORWARD)); - assert(SAME_OPCODE_METADATA(JUMP, JUMP_BACKWARD)); + assert(is_pseudo_target(JUMP, JUMP_FORWARD)); + assert(is_pseudo_target(JUMP, JUMP_BACKWARD)); instr->i_opcode = is_forward ? JUMP_FORWARD : JUMP_BACKWARD; break; case JUMP_NO_INTERRUPT: - assert(SAME_OPCODE_METADATA(JUMP_NO_INTERRUPT, JUMP_FORWARD)); - assert(SAME_OPCODE_METADATA(JUMP_NO_INTERRUPT, JUMP_BACKWARD_NO_INTERRUPT)); + assert(is_pseudo_target(JUMP_NO_INTERRUPT, JUMP_FORWARD)); + assert(is_pseudo_target(JUMP_NO_INTERRUPT, JUMP_BACKWARD_NO_INTERRUPT)); instr->i_opcode = is_forward ? JUMP_FORWARD : JUMP_BACKWARD_NO_INTERRUPT; break; diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 19e2268046fcdc..29e1dab184ef4e 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -330,14 +330,14 @@ dummy_func( #endif /* ENABLE_SPECIALIZATION */ } - op(_TO_BOOL, (unused/2, value -- res)) { + op(_TO_BOOL, (value -- res)) { int err = PyObject_IsTrue(value); DECREF_INPUTS(); ERROR_IF(err < 0, error); res = err ? Py_True : Py_False; } - macro(TO_BOOL) = _SPECIALIZE_TO_BOOL + _TO_BOOL; + macro(TO_BOOL) = _SPECIALIZE_TO_BOOL + unused/2 + _TO_BOOL; inst(TO_BOOL_BOOL, (unused/1, unused/2, value -- value)) { DEOPT_IF(!PyBool_Check(value)); @@ -416,7 +416,7 @@ dummy_func( DEOPT_IF(!PyLong_CheckExact(right)); } - op(_BINARY_OP_MULTIPLY_INT, (unused/1, left, right -- res)) { + op(_BINARY_OP_MULTIPLY_INT, (left, right -- res)) { STAT_INC(BINARY_OP, hit); res = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); @@ -424,7 +424,7 @@ dummy_func( ERROR_IF(res == NULL, error); } - op(_BINARY_OP_ADD_INT, (unused/1, left, right -- res)) { + op(_BINARY_OP_ADD_INT, (left, right -- res)) { STAT_INC(BINARY_OP, hit); res = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); @@ -432,7 +432,7 @@ dummy_func( ERROR_IF(res == NULL, error); } - op(_BINARY_OP_SUBTRACT_INT, (unused/1, left, right -- res)) { + op(_BINARY_OP_SUBTRACT_INT, (left, right -- res)) { STAT_INC(BINARY_OP, hit); res = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); @@ -441,18 +441,18 @@ dummy_func( } macro(BINARY_OP_MULTIPLY_INT) = - _GUARD_BOTH_INT + _BINARY_OP_MULTIPLY_INT; + _GUARD_BOTH_INT + unused/1 + _BINARY_OP_MULTIPLY_INT; macro(BINARY_OP_ADD_INT) = - _GUARD_BOTH_INT + _BINARY_OP_ADD_INT; + _GUARD_BOTH_INT + unused/1 + _BINARY_OP_ADD_INT; macro(BINARY_OP_SUBTRACT_INT) = - _GUARD_BOTH_INT + _BINARY_OP_SUBTRACT_INT; + _GUARD_BOTH_INT + unused/1 + _BINARY_OP_SUBTRACT_INT; op(_GUARD_BOTH_FLOAT, (left, right -- left, right)) { DEOPT_IF(!PyFloat_CheckExact(left)); DEOPT_IF(!PyFloat_CheckExact(right)); } - op(_BINARY_OP_MULTIPLY_FLOAT, (unused/1, left, right -- res)) { + op(_BINARY_OP_MULTIPLY_FLOAT, (left, right -- res)) { STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval * @@ -460,7 +460,7 @@ dummy_func( DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res); } - op(_BINARY_OP_ADD_FLOAT, (unused/1, left, right -- res)) { + op(_BINARY_OP_ADD_FLOAT, (left, right -- res)) { STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval + @@ -468,7 +468,7 @@ dummy_func( DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res); } - op(_BINARY_OP_SUBTRACT_FLOAT, (unused/1, left, right -- res)) { + op(_BINARY_OP_SUBTRACT_FLOAT, (left, right -- res)) { STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval - @@ -477,18 +477,18 @@ dummy_func( } macro(BINARY_OP_MULTIPLY_FLOAT) = - _GUARD_BOTH_FLOAT + _BINARY_OP_MULTIPLY_FLOAT; + _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_MULTIPLY_FLOAT; macro(BINARY_OP_ADD_FLOAT) = - _GUARD_BOTH_FLOAT + _BINARY_OP_ADD_FLOAT; + _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_ADD_FLOAT; macro(BINARY_OP_SUBTRACT_FLOAT) = - _GUARD_BOTH_FLOAT + _BINARY_OP_SUBTRACT_FLOAT; + _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_SUBTRACT_FLOAT; op(_GUARD_BOTH_UNICODE, (left, right -- left, right)) { DEOPT_IF(!PyUnicode_CheckExact(left)); DEOPT_IF(!PyUnicode_CheckExact(right)); } - op(_BINARY_OP_ADD_UNICODE, (unused/1, left, right -- res)) { + op(_BINARY_OP_ADD_UNICODE, (left, right -- res)) { STAT_INC(BINARY_OP, hit); res = PyUnicode_Concat(left, right); _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); @@ -497,7 +497,7 @@ dummy_func( } macro(BINARY_OP_ADD_UNICODE) = - _GUARD_BOTH_UNICODE + _BINARY_OP_ADD_UNICODE; + _GUARD_BOTH_UNICODE + unused/1 + _BINARY_OP_ADD_UNICODE; // This is a subtle one. It's a super-instruction for // BINARY_OP_ADD_UNICODE followed by STORE_FAST @@ -505,7 +505,7 @@ dummy_func( // So the inputs are the same as for all BINARY_OP // specializations, but there is no output. // At the end we just skip over the STORE_FAST. - op(_BINARY_OP_INPLACE_ADD_UNICODE, (unused/1, left, right --)) { + op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) { TIER_ONE_ONLY assert(next_instr->op.code == STORE_FAST); PyObject **target_local = &GETLOCAL(next_instr->op.arg); @@ -533,7 +533,7 @@ dummy_func( } macro(BINARY_OP_INPLACE_ADD_UNICODE) = - _GUARD_BOTH_UNICODE + _BINARY_OP_INPLACE_ADD_UNICODE; + _GUARD_BOTH_UNICODE + unused/1 + _BINARY_OP_INPLACE_ADD_UNICODE; family(BINARY_SUBSCR, INLINE_CACHE_ENTRIES_BINARY_SUBSCR) = { BINARY_SUBSCR_DICT, @@ -1100,7 +1100,7 @@ dummy_func( inst(POP_EXCEPT, (exc_value -- )) { _PyErr_StackItem *exc_info = tstate->exc_info; - Py_XSETREF(exc_info->exc_value, exc_value); + Py_XSETREF(exc_info->exc_value, exc_value == Py_None ? NULL : exc_value); } inst(RERAISE, (values[oparg], exc -- values[oparg])) { @@ -1295,14 +1295,14 @@ dummy_func( #endif /* ENABLE_SPECIALIZATION */ } - op(_STORE_ATTR, (unused/3, v, owner --)) { + op(_STORE_ATTR, (v, owner --)) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); int err = PyObject_SetAttr(owner, name, v); DECREF_INPUTS(); ERROR_IF(err, error); } - macro(STORE_ATTR) = _SPECIALIZE_STORE_ATTR + _STORE_ATTR; + macro(STORE_ATTR) = _SPECIALIZE_STORE_ATTR + unused/3 + _STORE_ATTR; inst(DELETE_ATTR, (owner --)) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); @@ -1414,7 +1414,7 @@ dummy_func( #endif /* ENABLE_SPECIALIZATION */ } - op(_LOAD_GLOBAL, (unused/1, unused/1, unused/1 -- res, null if (oparg & 1))) { + op(_LOAD_GLOBAL, ( -- res, null if (oparg & 1))) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); if (PyDict_CheckExact(GLOBALS()) && PyDict_CheckExact(BUILTINS())) @@ -1451,7 +1451,12 @@ dummy_func( null = NULL; } - macro(LOAD_GLOBAL) = _SPECIALIZE_LOAD_GLOBAL + _LOAD_GLOBAL; + macro(LOAD_GLOBAL) = + _SPECIALIZE_LOAD_GLOBAL + + counter/1 + + globals_version/1 + + builtins_version/1 + + _LOAD_GLOBAL; op(_GUARD_GLOBALS_VERSION, (version/1 --)) { PyDictObject *dict = (PyDictObject *)GLOBALS(); @@ -1853,7 +1858,7 @@ dummy_func( #endif /* ENABLE_SPECIALIZATION */ } - op(_LOAD_ATTR, (unused/8, owner -- attr, self_or_null if (oparg & 1))) { + op(_LOAD_ATTR, (owner -- attr, self_or_null if (oparg & 1))) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); if (oparg & 1) { /* Designed to work in tandem with CALL, pushes two values. */ @@ -1886,7 +1891,10 @@ dummy_func( } } - macro(LOAD_ATTR) = _SPECIALIZE_LOAD_ATTR + _LOAD_ATTR; + macro(LOAD_ATTR) = + _SPECIALIZE_LOAD_ATTR + + unused/8 + + _LOAD_ATTR; pseudo(LOAD_METHOD) = { LOAD_ATTR, @@ -2369,7 +2377,7 @@ dummy_func( stack_pointer = _PyFrame_GetStackPointer(frame); } - replaced op(_POP_JUMP_IF_FALSE, (unused/1, cond -- )) { + replaced op(_POP_JUMP_IF_FALSE, (cond -- )) { assert(PyBool_Check(cond)); int flag = Py_IsFalse(cond); #if ENABLE_SPECIALIZATION @@ -2378,7 +2386,7 @@ dummy_func( JUMPBY(oparg * flag); } - replaced op(_POP_JUMP_IF_TRUE, (unused/1, cond -- )) { + replaced op(_POP_JUMP_IF_TRUE, (cond -- )) { assert(PyBool_Check(cond)); int flag = Py_IsTrue(cond); #if ENABLE_SPECIALIZATION @@ -2397,13 +2405,13 @@ dummy_func( } } - macro(POP_JUMP_IF_TRUE) = _POP_JUMP_IF_TRUE; + macro(POP_JUMP_IF_TRUE) = unused/1 + _POP_JUMP_IF_TRUE; - macro(POP_JUMP_IF_FALSE) = _POP_JUMP_IF_FALSE; + macro(POP_JUMP_IF_FALSE) = unused/1 + _POP_JUMP_IF_FALSE; - macro(POP_JUMP_IF_NONE) = _IS_NONE + _POP_JUMP_IF_TRUE; + macro(POP_JUMP_IF_NONE) = unused/1 + _IS_NONE + _POP_JUMP_IF_TRUE; - macro(POP_JUMP_IF_NOT_NONE) = _IS_NONE + _POP_JUMP_IF_FALSE; + macro(POP_JUMP_IF_NOT_NONE) = unused/1 + _IS_NONE + _POP_JUMP_IF_FALSE; inst(JUMP_BACKWARD_NO_INTERRUPT, (--)) { TIER_ONE_ONLY @@ -3010,7 +3018,7 @@ dummy_func( } // When calling Python, inline the call using DISPATCH_INLINED(). - op(_CALL, (unused/2, callable, self_or_null, args[oparg] -- res)) { + op(_CALL, (callable, self_or_null, args[oparg] -- res)) { // oparg counts all of the args, but *not* self: int total_args = oparg; if (self_or_null != NULL) { @@ -3079,7 +3087,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - macro(CALL) = _SPECIALIZE_CALL + _CALL; + macro(CALL) = _SPECIALIZE_CALL + unused/2 + _CALL; op(_CHECK_CALL_BOUND_METHOD_EXACT_ARGS, (callable, null, unused[oparg] -- callable, null, unused[oparg])) { DEOPT_IF(null != NULL); diff --git a/Python/ceval.c b/Python/ceval.c index 27304d31e27949..1fea9747488102 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -24,6 +24,7 @@ #include "pycore_sysmodule.h" // _PySys_Audit() #include "pycore_tuple.h" // _PyTuple_ITEMS() #include "pycore_typeobject.h" // _PySuper_Lookup() +#include "pycore_uop_ids.h" // Uops #include "pycore_uops.h" // _PyUOpExecutorObject #include "pycore_pyerrors.h" diff --git a/Python/compile.c b/Python/compile.c index 8b9e2f02048f11..65ac05ad58d4dd 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -796,35 +796,12 @@ stack_effect(int opcode, int oparg, int jump) // Specialized instructions are not supported. return PY_INVALID_STACK_EFFECT; } - int popped, pushed; - if (jump > 0) { - popped = _PyOpcode_num_popped(opcode, oparg, true); - pushed = _PyOpcode_num_pushed(opcode, oparg, true); - } - else { - popped = _PyOpcode_num_popped(opcode, oparg, false); - pushed = _PyOpcode_num_pushed(opcode, oparg, false); - } + int popped = _PyOpcode_num_popped(opcode, oparg); + int pushed = _PyOpcode_num_pushed(opcode, oparg); if (popped < 0 || pushed < 0) { return PY_INVALID_STACK_EFFECT; } - if (jump >= 0) { - return pushed - popped; - } - if (jump < 0) { - // Compute max(pushed - popped, alt_pushed - alt_popped) - int alt_popped = _PyOpcode_num_popped(opcode, oparg, true); - int alt_pushed = _PyOpcode_num_pushed(opcode, oparg, true); - if (alt_popped < 0 || alt_pushed < 0) { - return PY_INVALID_STACK_EFFECT; - } - int diff = pushed - popped; - int alt_diff = alt_pushed - alt_popped; - if (alt_diff > diff) { - return alt_diff; - } - return diff; - } + return pushed - popped; } // Pseudo ops @@ -1125,7 +1102,7 @@ compiler_addop_name(struct compiler_unit *u, location loc, arg <<= 1; } if (opcode == LOAD_METHOD) { - assert(SAME_OPCODE_METADATA(LOAD_METHOD, LOAD_ATTR)); + assert(is_pseudo_target(LOAD_METHOD, LOAD_ATTR)); opcode = LOAD_ATTR; arg <<= 1; arg |= 1; @@ -1135,18 +1112,18 @@ compiler_addop_name(struct compiler_unit *u, location loc, arg |= 2; } if (opcode == LOAD_SUPER_METHOD) { - assert(SAME_OPCODE_METADATA(LOAD_SUPER_METHOD, LOAD_SUPER_ATTR)); + assert(is_pseudo_target(LOAD_SUPER_METHOD, LOAD_SUPER_ATTR)); opcode = LOAD_SUPER_ATTR; arg <<= 2; arg |= 3; } if (opcode == LOAD_ZERO_SUPER_ATTR) { - assert(SAME_OPCODE_METADATA(LOAD_ZERO_SUPER_ATTR, LOAD_SUPER_ATTR)); + assert(is_pseudo_target(LOAD_ZERO_SUPER_ATTR, LOAD_SUPER_ATTR)); opcode = LOAD_SUPER_ATTR; arg <<= 2; } if (opcode == LOAD_ZERO_SUPER_METHOD) { - assert(SAME_OPCODE_METADATA(LOAD_ZERO_SUPER_METHOD, LOAD_SUPER_ATTR)); + assert(is_pseudo_target(LOAD_ZERO_SUPER_METHOD, LOAD_SUPER_ATTR)); opcode = LOAD_SUPER_ATTR; arg <<= 2; arg |= 1; diff --git a/Python/errors.c b/Python/errors.c index ed5eec5c261970..e5f176a5dd208e 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -121,11 +121,11 @@ _PyErr_GetTopmostException(PyThreadState *tstate) _PyErr_StackItem *exc_info = tstate->exc_info; assert(exc_info); - while ((exc_info->exc_value == NULL || exc_info->exc_value == Py_None) && - exc_info->previous_item != NULL) + while (exc_info->exc_value == NULL && exc_info->previous_item != NULL) { exc_info = exc_info->previous_item; } + assert(!Py_IsNone(exc_info->exc_value)); return exc_info; } @@ -592,7 +592,7 @@ PyErr_GetHandledException(void) void _PyErr_SetHandledException(PyThreadState *tstate, PyObject *exc) { - Py_XSETREF(tstate->exc_info->exc_value, Py_XNewRef(exc)); + Py_XSETREF(tstate->exc_info->exc_value, Py_XNewRef(exc == Py_None ? NULL : exc)); } void diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 7cc29c8e644d8d..14fb3a05a9f674 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -806,7 +806,7 @@ PyObject *exc_value; exc_value = stack_pointer[-1]; _PyErr_StackItem *exc_info = tstate->exc_info; - Py_XSETREF(exc_info->exc_value, exc_value); + Py_XSETREF(exc_info->exc_value, exc_value == Py_None ? NULL : exc_value); stack_pointer += -1; break; } @@ -1127,7 +1127,7 @@ null = NULL; stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + ((oparg & 1)); + stack_pointer += 1 + (oparg & 1); break; } @@ -1163,7 +1163,7 @@ null = NULL; stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + ((oparg & 1)); + stack_pointer += 1 + (oparg & 1); break; } @@ -1181,7 +1181,7 @@ null = NULL; stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + ((oparg & 1)); + stack_pointer += 1 + (oparg & 1); break; } @@ -1534,7 +1534,7 @@ Py_DECREF(self); if (attr == NULL) goto pop_3_error_tier_two; stack_pointer[-3] = attr; - stack_pointer += -2 + (((0) ? 1 : 0)); + stack_pointer += -2 + ((0) ? 1 : 0); break; } @@ -1613,7 +1613,7 @@ } stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = self_or_null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); break; } @@ -1653,7 +1653,7 @@ Py_DECREF(owner); stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); break; } @@ -1687,7 +1687,7 @@ Py_DECREF(owner); stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); break; } @@ -1731,7 +1731,7 @@ Py_DECREF(owner); stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); break; } @@ -1751,7 +1751,7 @@ Py_DECREF(owner); stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); break; } @@ -1779,7 +1779,7 @@ Py_DECREF(owner); stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); break; } @@ -2468,7 +2468,7 @@ self = owner; stack_pointer[-1] = attr; if (1) stack_pointer[0] = self; - stack_pointer += (((1) ? 1 : 0)); + stack_pointer += ((1) ? 1 : 0); break; } @@ -2488,7 +2488,7 @@ self = owner; stack_pointer[-1] = attr; if (1) stack_pointer[0] = self; - stack_pointer += (((1) ? 1 : 0)); + stack_pointer += ((1) ? 1 : 0); break; } @@ -2504,7 +2504,7 @@ Py_DECREF(owner); attr = Py_NewRef(descr); stack_pointer[-1] = attr; - stack_pointer += (((0) ? 1 : 0)); + stack_pointer += ((0) ? 1 : 0); break; } @@ -2521,7 +2521,7 @@ Py_DECREF(owner); attr = Py_NewRef(descr); stack_pointer[-1] = attr; - stack_pointer += (((0) ? 1 : 0)); + stack_pointer += ((0) ? 1 : 0); break; } @@ -2551,7 +2551,7 @@ self = owner; stack_pointer[-1] = attr; if (1) stack_pointer[0] = self; - stack_pointer += (((1) ? 1 : 0)); + stack_pointer += ((1) ? 1 : 0); break; } @@ -2663,7 +2663,7 @@ goto exit_unwind; } #endif - stack_pointer += (((0) ? 1 : 0)); + stack_pointer += ((0) ? 1 : 0); break; } @@ -3199,16 +3199,16 @@ PyObject *start; PyObject *slice; oparg = CURRENT_OPARG(); - if (oparg == 3) { step = stack_pointer[-(((oparg == 3) ? 1 : 0))]; } - stop = stack_pointer[-1 - (((oparg == 3) ? 1 : 0))]; - start = stack_pointer[-2 - (((oparg == 3) ? 1 : 0))]; + if (oparg == 3) { step = stack_pointer[-((oparg == 3) ? 1 : 0)]; } + stop = stack_pointer[-1 - ((oparg == 3) ? 1 : 0)]; + start = stack_pointer[-2 - ((oparg == 3) ? 1 : 0)]; slice = PySlice_New(start, stop, step); Py_DECREF(start); Py_DECREF(stop); Py_XDECREF(step); - if (slice == NULL) { stack_pointer += -2 - (((oparg == 3) ? 1 : 0)); goto error_tier_two; } - stack_pointer[-2 - (((oparg == 3) ? 1 : 0))] = slice; - stack_pointer += -1 - (((oparg == 3) ? 1 : 0)); + if (slice == NULL) { stack_pointer += -2 - ((oparg == 3) ? 1 : 0); goto error_tier_two; } + stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice; + stack_pointer += -1 - ((oparg == 3) ? 1 : 0); break; } diff --git a/Python/flowgraph.c b/Python/flowgraph.c index d2e3a7ae441c7f..0e6ffbc32e1526 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -648,7 +648,7 @@ mark_except_handlers(basicblock *entryblock) { struct _PyCfgExceptStack { - basicblock *handlers[CO_MAXBLOCKS+1]; + basicblock *handlers[CO_MAXBLOCKS+2]; int depth; }; @@ -661,6 +661,7 @@ push_except_block(struct _PyCfgExceptStack *stack, cfg_instr *setup) { if (opcode == SETUP_WITH || opcode == SETUP_CLEANUP) { target->b_preserve_lasti = 1; } + assert(stack->depth <= CO_MAXBLOCKS); stack->handlers[++stack->depth] = target; return target; } @@ -2258,11 +2259,11 @@ convert_pseudo_ops(basicblock *entryblock) INSTR_SET_OP0(instr, NOP); } else if (instr->i_opcode == LOAD_CLOSURE) { - assert(SAME_OPCODE_METADATA(LOAD_CLOSURE, LOAD_FAST)); + assert(is_pseudo_target(LOAD_CLOSURE, LOAD_FAST)); instr->i_opcode = LOAD_FAST; } else if (instr->i_opcode == STORE_FAST_MAYBE_NULL) { - assert(SAME_OPCODE_METADATA(STORE_FAST_MAYBE_NULL, STORE_FAST)); + assert(is_pseudo_target(STORE_FAST_MAYBE_NULL, STORE_FAST)); instr->i_opcode = STORE_FAST; } } diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index a274427a699a43..ce31967b7912d7 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -153,6 +153,7 @@ DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); } + /* Skip 1 cache entry */ // _BINARY_OP_ADD_FLOAT { STAT_INC(BINARY_OP, hit); @@ -181,6 +182,7 @@ DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); } + /* Skip 1 cache entry */ // _BINARY_OP_ADD_INT { STAT_INC(BINARY_OP, hit); @@ -209,6 +211,7 @@ DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(!PyUnicode_CheckExact(right), BINARY_OP); } + /* Skip 1 cache entry */ // _BINARY_OP_ADD_UNICODE { STAT_INC(BINARY_OP, hit); @@ -236,6 +239,7 @@ DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(!PyUnicode_CheckExact(right), BINARY_OP); } + /* Skip 1 cache entry */ // _BINARY_OP_INPLACE_ADD_UNICODE { TIER_ONE_ONLY @@ -282,6 +286,7 @@ DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); } + /* Skip 1 cache entry */ // _BINARY_OP_MULTIPLY_FLOAT { STAT_INC(BINARY_OP, hit); @@ -310,6 +315,7 @@ DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); } + /* Skip 1 cache entry */ // _BINARY_OP_MULTIPLY_INT { STAT_INC(BINARY_OP, hit); @@ -338,6 +344,7 @@ DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); } + /* Skip 1 cache entry */ // _BINARY_OP_SUBTRACT_FLOAT { STAT_INC(BINARY_OP, hit); @@ -366,6 +373,7 @@ DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); } + /* Skip 1 cache entry */ // _BINARY_OP_SUBTRACT_INT { STAT_INC(BINARY_OP, hit); @@ -683,16 +691,16 @@ PyObject *stop; PyObject *start; PyObject *slice; - if (oparg == 3) { step = stack_pointer[-(((oparg == 3) ? 1 : 0))]; } - stop = stack_pointer[-1 - (((oparg == 3) ? 1 : 0))]; - start = stack_pointer[-2 - (((oparg == 3) ? 1 : 0))]; + if (oparg == 3) { step = stack_pointer[-((oparg == 3) ? 1 : 0)]; } + stop = stack_pointer[-1 - ((oparg == 3) ? 1 : 0)]; + start = stack_pointer[-2 - ((oparg == 3) ? 1 : 0)]; slice = PySlice_New(start, stop, step); Py_DECREF(start); Py_DECREF(stop); Py_XDECREF(step); - if (slice == NULL) { stack_pointer += -2 - (((oparg == 3) ? 1 : 0)); goto error; } - stack_pointer[-2 - (((oparg == 3) ? 1 : 0))] = slice; - stack_pointer += -1 - (((oparg == 3) ? 1 : 0)); + if (slice == NULL) { stack_pointer += -2 - ((oparg == 3) ? 1 : 0); goto error; } + stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice; + stack_pointer += -1 - ((oparg == 3) ? 1 : 0); DISPATCH(); } @@ -763,6 +771,7 @@ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache); #endif /* ENABLE_SPECIALIZATION */ } + /* Skip 2 cache entries */ // _CALL { // oparg counts all of the args, but *not* self: @@ -995,7 +1004,7 @@ } #endif } - stack_pointer += (((0) ? 1 : 0)); + stack_pointer += ((0) ? 1 : 0); DISPATCH(); } @@ -1176,9 +1185,9 @@ PyObject *callargs; PyObject *func; PyObject *result; - if (oparg & 1) { kwargs = stack_pointer[-((oparg & 1))]; } - callargs = stack_pointer[-1 - ((oparg & 1))]; - func = stack_pointer[-3 - ((oparg & 1))]; + if (oparg & 1) { kwargs = stack_pointer[-(oparg & 1)]; } + callargs = stack_pointer[-1 - (oparg & 1)]; + func = stack_pointer[-3 - (oparg & 1)]; // DICT_MERGE is called before this opcode if there are kwargs. // It converts all dict subtypes in kwargs into regular dicts. assert(kwargs == NULL || PyDict_CheckExact(kwargs)); @@ -1244,9 +1253,9 @@ Py_DECREF(callargs); Py_XDECREF(kwargs); assert(PEEK(2 + (oparg & 1)) == NULL); - if (result == NULL) { stack_pointer += -3 - ((oparg & 1)); goto error; } - stack_pointer[-3 - ((oparg & 1))] = result; - stack_pointer += -2 - ((oparg & 1)); + if (result == NULL) { stack_pointer += -3 - (oparg & 1); goto error; } + stack_pointer[-3 - (oparg & 1)] = result; + stack_pointer += -2 - (oparg & 1); CHECK_EVAL_BREAKER(); DISPATCH(); } @@ -1745,7 +1754,7 @@ } #endif } - stack_pointer += (((0) ? 1 : 0)); + stack_pointer += ((0) ? 1 : 0); DISPATCH(); } @@ -3400,6 +3409,7 @@ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache); #endif /* ENABLE_SPECIALIZATION */ } + /* Skip 8 cache entries */ // _LOAD_ATTR { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); @@ -3435,7 +3445,7 @@ } stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = self_or_null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); DISPATCH(); } @@ -3468,7 +3478,7 @@ } stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); DISPATCH(); } @@ -3545,7 +3555,7 @@ /* Skip 5 cache entries */ stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); DISPATCH(); } @@ -3587,7 +3597,7 @@ } stack_pointer[-1] = attr; if (1) stack_pointer[0] = self; - stack_pointer += (((1) ? 1 : 0)); + stack_pointer += ((1) ? 1 : 0); DISPATCH(); } @@ -3622,7 +3632,7 @@ } stack_pointer[-1] = attr; if (1) stack_pointer[0] = self; - stack_pointer += (((1) ? 1 : 0)); + stack_pointer += ((1) ? 1 : 0); DISPATCH(); } @@ -3669,7 +3679,7 @@ } stack_pointer[-1] = attr; if (1) stack_pointer[0] = self; - stack_pointer += (((1) ? 1 : 0)); + stack_pointer += ((1) ? 1 : 0); DISPATCH(); } @@ -3708,7 +3718,7 @@ /* Skip 5 cache entries */ stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); DISPATCH(); } @@ -3740,7 +3750,7 @@ attr = Py_NewRef(descr); } stack_pointer[-1] = attr; - stack_pointer += (((0) ? 1 : 0)); + stack_pointer += ((0) ? 1 : 0); DISPATCH(); } @@ -3783,7 +3793,7 @@ attr = Py_NewRef(descr); } stack_pointer[-1] = attr; - stack_pointer += (((0) ? 1 : 0)); + stack_pointer += ((0) ? 1 : 0); DISPATCH(); } @@ -3851,7 +3861,7 @@ /* Skip 5 cache entries */ stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); DISPATCH(); } @@ -3907,7 +3917,7 @@ /* Skip 5 cache entries */ stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; - stack_pointer += ((oparg & 1)); + stack_pointer += (oparg & 1); DISPATCH(); } @@ -4096,6 +4106,9 @@ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache); #endif /* ENABLE_SPECIALIZATION */ } + /* Skip 1 cache entry */ + /* Skip 1 cache entry */ + /* Skip 1 cache entry */ // _LOAD_GLOBAL { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); @@ -4135,7 +4148,7 @@ } stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + ((oparg & 1)); + stack_pointer += 1 + (oparg & 1); DISPATCH(); } @@ -4176,7 +4189,7 @@ } stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + ((oparg & 1)); + stack_pointer += 1 + (oparg & 1); DISPATCH(); } @@ -4210,7 +4223,7 @@ } stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; - stack_pointer += 1 + ((oparg & 1)); + stack_pointer += 1 + (oparg & 1); DISPATCH(); } @@ -4338,7 +4351,7 @@ } stack_pointer[-3] = attr; if (oparg & 1) stack_pointer[-2] = null; - stack_pointer += -2 + ((oparg & 1)); + stack_pointer += -2 + (oparg & 1); DISPATCH(); } @@ -4366,7 +4379,7 @@ Py_DECREF(self); if (attr == NULL) goto pop_3_error; stack_pointer[-3] = attr; - stack_pointer += -2 + (((0) ? 1 : 0)); + stack_pointer += -2 + ((0) ? 1 : 0); DISPATCH(); } @@ -4554,7 +4567,7 @@ PyObject *exc_value; exc_value = stack_pointer[-1]; _PyErr_StackItem *exc_info = tstate->exc_info; - Py_XSETREF(exc_info->exc_value, exc_value); + Py_XSETREF(exc_info->exc_value, exc_value == Py_None ? NULL : exc_value); stack_pointer += -1; DISPATCH(); } @@ -4564,6 +4577,7 @@ next_instr += 2; INSTRUCTION_STATS(POP_JUMP_IF_FALSE); PyObject *cond; + /* Skip 1 cache entry */ cond = stack_pointer[-1]; assert(PyBool_Check(cond)); int flag = Py_IsFalse(cond); @@ -4582,6 +4596,7 @@ PyObject *value; PyObject *b; PyObject *cond; + /* Skip 1 cache entry */ // _IS_NONE value = stack_pointer[-1]; { @@ -4614,6 +4629,7 @@ PyObject *value; PyObject *b; PyObject *cond; + /* Skip 1 cache entry */ // _IS_NONE value = stack_pointer[-1]; { @@ -4644,6 +4660,7 @@ next_instr += 2; INSTRUCTION_STATS(POP_JUMP_IF_TRUE); PyObject *cond; + /* Skip 1 cache entry */ cond = stack_pointer[-1]; assert(PyBool_Check(cond)); int flag = Py_IsTrue(cond); @@ -5117,6 +5134,7 @@ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache); #endif /* ENABLE_SPECIALIZATION */ } + /* Skip 3 cache entries */ // _STORE_ATTR v = stack_pointer[-2]; { @@ -5509,6 +5527,7 @@ DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache); #endif /* ENABLE_SPECIALIZATION */ } + /* Skip 2 cache entries */ // _TO_BOOL { int err = PyObject_IsTrue(value); diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h index bcd6ea7564f9b3..e664e638bdb749 100644 --- a/Python/opcode_targets.h +++ b/Python/opcode_targets.h @@ -254,4 +254,5 @@ static void *opcode_targets[256] = { &&TARGET_INSTRUMENTED_POP_JUMP_IF_NONE, &&TARGET_INSTRUMENTED_POP_JUMP_IF_NOT_NONE, &&TARGET_INSTRUMENTED_LINE, - &&_unknown_opcode}; + &&_unknown_opcode, +}; diff --git a/Python/optimizer.c b/Python/optimizer.c index d44e733bc346fa..f27af14d967cd3 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -6,14 +6,20 @@ #include "pycore_opcode_utils.h" // MAX_REAL_OPCODE #include "pycore_optimizer.h" // _Py_uop_analyze_and_optimize() #include "pycore_pystate.h" // _PyInterpreterState_GET() +#include "pycore_uop_ids.h" #include "pycore_uops.h" #include "cpython/optimizer.h" #include #include #include +#define NEED_OPCODE_METADATA +#include "pycore_uop_metadata.h" // Uop tables +#undef NEED_OPCODE_METADATA + #define MAX_EXECUTORS_SIZE 256 + static bool has_space_for_executor(PyCodeObject *code, _Py_CODEUNIT *instr) { @@ -327,9 +333,6 @@ uop_dealloc(_PyUOpExecutorObject *self) { const char * _PyUOpName(int index) { - if (index <= MAX_REAL_OPCODE) { - return _PyOpcode_OpName[index]; - } return _PyOpcode_uop_name[index]; } @@ -388,7 +391,7 @@ PyTypeObject _PyUOpExecutor_Type = { /* TO DO -- Generate these tables */ static const uint16_t -_PyUOp_Replacements[OPCODE_METADATA_SIZE] = { +_PyUOp_Replacements[MAX_UOP_ID + 1] = { [_ITER_JUMP_RANGE] = _GUARD_NOT_EXHAUSTED_RANGE, [_ITER_JUMP_LIST] = _GUARD_NOT_EXHAUSTED_LIST, [_ITER_JUMP_TUPLE] = _GUARD_NOT_EXHAUSTED_TUPLE, @@ -515,11 +518,11 @@ translate_bytecode_to_trace( uint32_t opcode = instr->op.code; uint32_t oparg = instr->op.arg; - uint32_t extras = 0; + uint32_t extended = 0; if (opcode == EXTENDED_ARG) { instr++; - extras += 1; + extended = 1; opcode = instr->op.code; oparg = (oparg << 8) | instr->op.arg; if (opcode == EXTENDED_ARG) { @@ -574,6 +577,7 @@ translate_bytecode_to_trace( } case JUMP_BACKWARD: + case JUMP_BACKWARD_NO_INTERRUPT: { if (instr + 2 - oparg == initial_instr && code == initial_code) { RESERVE(1); @@ -620,23 +624,7 @@ translate_bytecode_to_trace( int offset = expansion->uops[i].offset + 1; switch (expansion->uops[i].size) { case OPARG_FULL: - if (extras && OPCODE_HAS_JUMP(opcode)) { - if (opcode == JUMP_BACKWARD_NO_INTERRUPT) { - oparg -= extras; - } - else { - assert(opcode != JUMP_BACKWARD); - oparg += extras; - } - } - if (_PyUOp_Replacements[uop]) { - uop = _PyUOp_Replacements[uop]; - if (uop == _FOR_ITER_TIER_TWO) { - target += 1 + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1; - assert(_PyCode_CODE(code)[target-1].op.code == END_FOR || - _PyCode_CODE(code)[target-1].op.code == INSTRUMENTED_END_FOR); - } - } + assert(opcode != JUMP_BACKWARD_NO_INTERRUPT && opcode != JUMP_BACKWARD); break; case OPARG_CACHE_1: operand = read_u16(&instr[offset].cache); @@ -657,7 +645,15 @@ translate_bytecode_to_trace( oparg = offset; assert(uop == _SAVE_RETURN_OFFSET); break; - + case OPARG_REPLACED: + uop = _PyUOp_Replacements[uop]; + assert(uop != 0); + if (uop == _FOR_ITER_TIER_TWO) { + target += 1 + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1 + extended; + assert(_PyCode_CODE(code)[target-1].op.code == END_FOR || + _PyCode_CODE(code)[target-1].op.code == INSTRUMENTED_END_FOR); + } + break; default: fprintf(stderr, "opcode=%d, oparg=%d; nuops=%d, i=%d; size=%d, offset=%d\n", @@ -799,7 +795,8 @@ compute_used(_PyUOpInstruction *buffer, uint32_t *used) } /* All other micro-ops fall through, so i+1 is reachable */ SET_BIT(used, i+1); - if (OPCODE_HAS_JUMP(opcode)) { + assert(opcode <= MAX_UOP_ID); + if (_PyUop_Flags[opcode] & HAS_JUMP_FLAG) { /* Mark target as reachable */ SET_BIT(used, buffer[i].oparg); } diff --git a/Python/perf_trampoline.c b/Python/perf_trampoline.c index 540b650192ed34..750ba18d3510ed 100644 --- a/Python/perf_trampoline.c +++ b/Python/perf_trampoline.c @@ -247,7 +247,7 @@ new_code_arena(void) mem_size, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, // fd (not used here) 0); // offset (not used here) - if (!memory) { + if (memory == MAP_FAILED) { PyErr_SetFromErrno(PyExc_OSError); PyErr_FormatUnraisable("Failed to create new mmap for perf trampoline"); perf_status = PERF_STATUS_FAILED; diff --git a/Python/specialize.c b/Python/specialize.c index 7c2a4a42b1dcc3..369b962a545f4e 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -10,6 +10,7 @@ #include "pycore_moduleobject.h" #include "pycore_object.h" #include "pycore_opcode_metadata.h" // _PyOpcode_Caches +#include "pycore_uop_metadata.h" // _PyOpcode_uop_name #include "pycore_opcode_utils.h" // RESUME_AT_FUNC_START #include "pycore_pylifecycle.h" // _PyOS_URandomNonblock() #include "pycore_runtime.h" // _Py_ID() diff --git a/Tools/build/deepfreeze.py b/Tools/build/deepfreeze.py index 218c64e13374e6..05633e3f77af49 100644 --- a/Tools/build/deepfreeze.py +++ b/Tools/build/deepfreeze.py @@ -21,7 +21,7 @@ verbose = False -# This must be kept in sync with Tools/cases_generator/generate_cases.py +# This must be kept in sync with Tools/cases_generator/analyzer.py RESUME = 149 def isprintable(b: bytes) -> bool: diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py index c02eb88b46532f..93d0d8a3762df3 100644 --- a/Tools/build/generate_sbom.py +++ b/Tools/build/generate_sbom.py @@ -1,12 +1,16 @@ """Tool for generating Software Bill of Materials (SBOM) for Python's dependencies""" - +import os import re import hashlib import json import glob import pathlib import subprocess +import sys import typing +from urllib.request import urlopen + +CPYTHON_ROOT_DIR = pathlib.Path(__file__).parent.parent.parent # Before adding a new entry to this list, double check that # the license expression is a valid SPDX license expression: @@ -43,15 +47,14 @@ class PackageFiles(typing.NamedTuple): # values to 'exclude' if we create new files within tracked # directories that aren't sourced from third-party packages. PACKAGE_TO_FILES = { + # NOTE: pip's entry in this structure is automatically generated in + # the 'discover_pip_sbom_package()' function below. "mpdecimal": PackageFiles( include=["Modules/_decimal/libmpdec/**"] ), "expat": PackageFiles( include=["Modules/expat/**"] ), - "pip": PackageFiles( - include=["Lib/ensurepip/_bundled/pip-23.3.2-py3-none-any.whl"] - ), "macholib": PackageFiles( include=["Lib/ctypes/macholib/**"], exclude=[ @@ -106,13 +109,106 @@ def filter_gitignored_paths(paths: list[str]) -> list[str]: return sorted([line.split()[-1] for line in git_check_ignore_lines if line.startswith("::")]) +def discover_pip_sbom_package(sbom_data: dict[str, typing.Any]) -> None: + """pip is a part of a packaging ecosystem (Python, surprise!) so it's actually + automatable to discover the metadata we need like the version and checksums + so let's do that on behalf of our friends at the PyPA. + """ + global PACKAGE_TO_FILES + + ensurepip_bundled_dir = CPYTHON_ROOT_DIR / "Lib/ensurepip/_bundled" + pip_wheels = [] + + # Find the hopefully one pip wheel in the bundled directory. + for wheel_filename in os.listdir(ensurepip_bundled_dir): + if wheel_filename.startswith("pip-"): + pip_wheels.append(wheel_filename) + if len(pip_wheels) != 1: + print("Zero or multiple pip wheels detected in 'Lib/ensurepip/_bundled'") + sys.exit(1) + pip_wheel_filename = pip_wheels[0] + + # Add the wheel filename to the list of files so the SBOM file + # and relationship generator can work its magic on the wheel too. + PACKAGE_TO_FILES["pip"] = PackageFiles( + include=[f"Lib/ensurepip/_bundled/{pip_wheel_filename}"] + ) + + # Wheel filename format puts the version right after the project name. + pip_version = pip_wheel_filename.split("-")[1] + pip_checksum_sha256 = hashlib.sha256( + (ensurepip_bundled_dir / pip_wheel_filename).read_bytes() + ).hexdigest() + + # Get pip's download location from PyPI. Check that the checksum is correct too. + try: + raw_text = urlopen(f"https://pypi.org/pypi/pip/{pip_version}/json").read() + pip_release_metadata = json.loads(raw_text) + url: dict[str, typing.Any] + + # Look for a matching artifact filename and then check + # its remote checksum to the local one. + for url in pip_release_metadata["urls"]: + if url["filename"] == pip_wheel_filename: + break + else: + raise ValueError(f"No matching filename on PyPI for '{pip_wheel_filename}'") + if url["digests"]["sha256"] != pip_checksum_sha256: + raise ValueError(f"Local pip checksum doesn't match artifact on PyPI") + + # Successfully found the download URL for the matching artifact. + pip_download_url = url["url"] + + except (OSError, ValueError) as e: + print(f"Couldn't fetch pip's metadata from PyPI: {e}") + sys.exit(1) + + # Remove pip from the existing SBOM packages if it's there + # and then overwrite its entry with our own generated one. + sbom_data["packages"] = [ + sbom_package + for sbom_package in sbom_data["packages"] + if sbom_package["name"] != "pip" + ] + sbom_data["packages"].append( + { + "SPDXID": spdx_id("SPDXRef-PACKAGE-pip"), + "name": "pip", + "versionInfo": pip_version, + "originator": "Organization: Python Packaging Authority", + "licenseConcluded": "MIT", + "downloadLocation": pip_download_url, + "checksums": [ + {"algorithm": "SHA256", "checksumValue": pip_checksum_sha256} + ], + "externalRefs": [ + { + "referenceCategory": "SECURITY", + "referenceLocator": f"cpe:2.3:a:pypa:pip:{pip_version}:*:*:*:*:*:*:*", + "referenceType": "cpe23Type", + }, + { + "referenceCategory": "PACKAGE_MANAGER", + "referenceLocator": f"pkg:pypi/pip@{pip_version}", + "referenceType": "purl", + }, + ], + "primaryPackagePurpose": "SOURCE", + } + ) + + def main() -> None: - root_dir = pathlib.Path(__file__).parent.parent.parent - sbom_path = root_dir / "Misc/sbom.spdx.json" + sbom_path = CPYTHON_ROOT_DIR / "Misc/sbom.spdx.json" sbom_data = json.loads(sbom_path.read_bytes()) - # Make a bunch of assertions about the SBOM data to ensure it's consistent. + # Insert pip's SBOM metadata from the wheel. + discover_pip_sbom_package(sbom_data) + + # Ensure all packages in this tool are represented also in the SBOM file. assert {package["name"] for package in sbom_data["packages"]} == set(PACKAGE_TO_FILES) + + # Make a bunch of assertions about the SBOM data to ensure it's consistent. for package in sbom_data["packages"]: # Properties and ID must be properly formed. @@ -138,17 +234,17 @@ def main() -> None: for include in sorted(files.include): # Find all the paths and then filter them through .gitignore. - paths = glob.glob(include, root_dir=root_dir, recursive=True) + paths = glob.glob(include, root_dir=CPYTHON_ROOT_DIR, recursive=True) paths = filter_gitignored_paths(paths) assert paths, include # Make sure that every value returns something! for path in paths: # Skip directories and excluded files - if not (root_dir / path).is_file() or path in exclude: + if not (CPYTHON_ROOT_DIR / path).is_file() or path in exclude: continue # SPDX requires SHA1 to be used for files, but we provide SHA256 too. - data = (root_dir / path).read_bytes() + data = (CPYTHON_ROOT_DIR / path).read_bytes() checksum_sha1 = hashlib.sha1(data).hexdigest() checksum_sha256 = hashlib.sha256(data).hexdigest() diff --git a/Tools/c-analyzer/cpython/_parser.py b/Tools/c-analyzer/cpython/_parser.py index 04388fb54caa6c..239ed4e0266a75 100644 --- a/Tools/c-analyzer/cpython/_parser.py +++ b/Tools/c-analyzer/cpython/_parser.py @@ -84,7 +84,6 @@ def clean_lines(text): Python/frozen_modules/*.h Python/generated_cases.c.h Python/executor_cases.c.h -Python/abstract_interp_cases.c.h # not actually source Python/bytecodes.c diff --git a/Tools/cases_generator/analysis.py b/Tools/cases_generator/analysis.py deleted file mode 100644 index 26d92c13cd82ab..00000000000000 --- a/Tools/cases_generator/analysis.py +++ /dev/null @@ -1,487 +0,0 @@ -import re -import sys -import typing - -from _typing_backports import assert_never -from flags import InstructionFlags, variable_used -from formatting import prettify_filename, UNUSED -from instructions import ( - ActiveCacheEffect, - Component, - Instruction, - InstructionOrCacheEffect, - MacroInstruction, - MacroParts, - PseudoInstruction, -) -import parsing -from parsing import StackEffect - -BEGIN_MARKER = "// BEGIN BYTECODES //" -END_MARKER = "// END BYTECODES //" - -RESERVED_WORDS = { - "co_consts": "Use FRAME_CO_CONSTS.", - "co_names": "Use FRAME_CO_NAMES.", -} - -RE_GO_TO_INSTR = r"^\s*GO_TO_INSTRUCTION\((\w+)\);\s*(?://.*)?$" - - -class Analyzer: - """Parse input, analyze it, and write to output.""" - - input_filenames: list[str] - errors: int = 0 - warnings: int = 0 - - def __init__(self, input_filenames: list[str]): - self.input_filenames = input_filenames - - def message(self, msg: str, node: parsing.Node) -> None: - lineno = 0 - filename = "" - if context := node.context: - filename = context.owner.filename - # Use line number of first non-comment in the node - for token in context.owner.tokens[context.begin : context.end]: - lineno = token.line - if token.kind != "COMMENT": - break - print(f"{filename}:{lineno}: {msg}", file=sys.stderr) - - def error(self, msg: str, node: parsing.Node) -> None: - self.message("error: " + msg, node) - self.errors += 1 - - def warning(self, msg: str, node: parsing.Node) -> None: - self.message("warning: " + msg, node) - self.warnings += 1 - - def note(self, msg: str, node: parsing.Node) -> None: - self.message("note: " + msg, node) - - everything: list[ - parsing.InstDef - | parsing.Macro - | parsing.Pseudo - ] - instrs: dict[str, Instruction] # Includes ops - macros: dict[str, parsing.Macro] - macro_instrs: dict[str, MacroInstruction] - families: dict[str, parsing.Family] - pseudos: dict[str, parsing.Pseudo] - pseudo_instrs: dict[str, PseudoInstruction] - - def parse(self) -> None: - """Parse the source text. - - We only want the parser to see the stuff between the - begin and end markers. - """ - - self.everything = [] - self.instrs = {} - self.macros = {} - self.families = {} - self.pseudos = {} - - instrs_idx: dict[str, int] = dict() - - for filename in self.input_filenames: - self.parse_file(filename, instrs_idx) - - files = " + ".join(self.input_filenames) - n_instrs = len(set(self.instrs) & set(self.macros)) - n_ops = len(self.instrs) - n_instrs - print( - f"Read {n_instrs} instructions, {n_ops} ops, " - f"{len(self.macros)} macros, {len(self.pseudos)} pseudos, " - f"and {len(self.families)} families from {files}", - file=sys.stderr, - ) - - def parse_file(self, filename: str, instrs_idx: dict[str, int]) -> None: - with open(filename) as file: - src = file.read() - - psr = parsing.Parser(src, filename=prettify_filename(filename)) - - # Skip until begin marker - while tkn := psr.next(raw=True): - if tkn.text == BEGIN_MARKER: - break - else: - raise psr.make_syntax_error( - f"Couldn't find {BEGIN_MARKER!r} in {psr.filename}" - ) - start = psr.getpos() - - # Find end marker, then delete everything after it - while tkn := psr.next(raw=True): - if tkn.text == END_MARKER: - break - del psr.tokens[psr.getpos() - 1 :] - - # Parse from start - psr.setpos(start) - thing: parsing.Node | None - thing_first_token = psr.peek() - while thing := psr.definition(): - thing = typing.cast( - parsing.InstDef | parsing.Macro | parsing.Pseudo | parsing.Family, thing - ) - if ws := [w for w in RESERVED_WORDS if variable_used(thing, w)]: - self.error( - f"'{ws[0]}' is a reserved word. {RESERVED_WORDS[ws[0]]}", thing - ) - - match thing: - case parsing.InstDef(name=name): - macro: parsing.Macro | None = None - if thing.kind == "inst" and "override" not in thing.annotations: - macro = parsing.Macro(name, [parsing.OpName(name)]) - if name in self.instrs: - if "override" not in thing.annotations: - raise psr.make_syntax_error( - f"Duplicate definition of '{name}' @ {thing.context} " - f"previous definition @ {self.instrs[name].inst.context}", - thing_first_token, - ) - self.everything[instrs_idx[name]] = thing - if name not in self.instrs and "override" in thing.annotations: - raise psr.make_syntax_error( - f"Definition of '{name}' @ {thing.context} is supposed to be " - "an override but no previous definition exists.", - thing_first_token, - ) - self.instrs[name] = Instruction(thing) - instrs_idx[name] = len(self.everything) - self.everything.append(thing) - if macro is not None: - self.macros[macro.name] = macro - self.everything.append(macro) - case parsing.Macro(name): - self.macros[name] = thing - self.everything.append(thing) - case parsing.Family(name): - self.families[name] = thing - case parsing.Pseudo(name): - self.pseudos[name] = thing - self.everything.append(thing) - case _: - assert_never(thing) - if not psr.eof(): - raise psr.make_syntax_error(f"Extra stuff at the end of {filename}") - - def analyze(self) -> None: - """Analyze the inputs. - - Raises SystemExit if there is an error. - """ - self.analyze_macros_and_pseudos() - self.map_families() - self.mark_predictions() - self.check_families() - - def mark_predictions(self) -> None: - """Mark the instructions that need PREDICTED() labels.""" - # Start with family heads - for family in self.families.values(): - if family.name in self.instrs: - self.instrs[family.name].predicted = True - if family.name in self.macro_instrs: - self.macro_instrs[family.name].predicted = True - # Also look for GO_TO_INSTRUCTION() calls - for instr in self.instrs.values(): - targets: set[str] = set() - for line in instr.block_text: - if m := re.match(RE_GO_TO_INSTR, line): - targets.add(m.group(1)) - for target in targets: - if target_instr := self.instrs.get(target): - target_instr.predicted = True - if target_macro := self.macro_instrs.get(target): - target_macro.predicted = True - if not target_instr and not target_macro: - self.error( - f"Unknown instruction {target!r} predicted in {instr.name!r}", - instr.inst, # TODO: Use better location - ) - - def map_families(self) -> None: - """Link instruction names back to their family, if they have one.""" - for family in self.families.values(): - for member in [family.name] + family.members: - if member_instr := self.instrs.get(member): - if ( - member_instr.family is not family - and member_instr.family is not None - ): - self.error( - f"Instruction {member} is a member of multiple families " - f"({member_instr.family.name}, {family.name}).", - family, - ) - else: - member_instr.family = family - if member_mac := self.macro_instrs.get(member): - assert member_mac.family is None, (member, member_mac.family.name) - member_mac.family = family - if not member_instr and not member_mac: - self.error( - f"Unknown instruction {member!r} referenced in family {family.name!r}", - family, - ) - # A sanctioned exception: - # This opcode is a member of the family but it doesn't pass the checks. - if mac := self.macro_instrs.get("BINARY_OP_INPLACE_ADD_UNICODE"): - mac.family = self.families.get("BINARY_OP") - - def check_families(self) -> None: - """Check each family: - - - Must have at least 2 members (including head) - - Head and all members must be known instructions - - Head and all members must have the same cache, input and output effects - """ - for family in self.families.values(): - if family.name not in self.macro_instrs and family.name not in self.instrs: - self.error( - f"Family {family.name!r} has unknown instruction {family.name!r}", - family, - ) - members = [ - member - for member in family.members - if member in self.instrs or member in self.macro_instrs - ] - if members != family.members: - unknown = set(family.members) - set(members) - self.error( - f"Family {family.name!r} has unknown members: {unknown}", family - ) - expected_effects = self.effect_counts(family.name) - for member in members: - member_effects = self.effect_counts(member) - if member_effects != expected_effects: - self.error( - f"Family {family.name!r} has inconsistent " - f"(cache, input, output) effects:\n" - f" {family.name} = {expected_effects}; " - f"{member} = {member_effects}", - family, - ) - - def effect_counts(self, name: str) -> tuple[int, int, int]: - if mac := self.macro_instrs.get(name): - cache = mac.cache_offset - input, output = 0, 0 - for part in mac.parts: - if isinstance(part, Component): - # A component may pop what the previous component pushed, - # so we offset the input/output counts by that. - delta_i = len(part.instr.input_effects) - delta_o = len(part.instr.output_effects) - offset = min(delta_i, output) - input += delta_i - offset - output += delta_o - offset - else: - assert False, f"Unknown instruction {name!r}" - return cache, input, output - - def analyze_macros_and_pseudos(self) -> None: - """Analyze each macro and pseudo instruction.""" - self.macro_instrs = {} - self.pseudo_instrs = {} - for name, macro in self.macros.items(): - self.macro_instrs[name] = mac = self.analyze_macro(macro) - self.check_macro_consistency(mac) - for name, pseudo in self.pseudos.items(): - self.pseudo_instrs[name] = self.analyze_pseudo(pseudo) - - # TODO: Merge with similar code in stacking.py, write_components() - def check_macro_consistency(self, mac: MacroInstruction) -> None: - def get_var_names(instr: Instruction) -> dict[str, StackEffect]: - vars: dict[str, StackEffect] = {} - for eff in instr.input_effects + instr.output_effects: - if eff.name == UNUSED: - continue - if eff.name in vars: - if vars[eff.name] != eff: - self.error( - f"Instruction {instr.name!r} has " - f"inconsistent type/cond/size for variable " - f"{eff.name!r}: {vars[eff.name]} vs {eff}", - instr.inst, - ) - else: - vars[eff.name] = eff - return vars - - all_vars: dict[str, StackEffect] = {} - # print("Checking", mac.name) - prevop: Instruction | None = None - for part in mac.parts: - if not isinstance(part, Component): - continue - vars = get_var_names(part.instr) - # print(" //", part.instr.name, "//", vars) - for name, eff in vars.items(): - if name in all_vars: - if all_vars[name] != eff: - self.error( - f"Macro {mac.name!r} has " - f"inconsistent type/cond/size for variable " - f"{name!r}: " - f"{all_vars[name]} vs {eff} in {part.instr.name!r}", - mac.macro, - ) - else: - all_vars[name] = eff - if prevop is not None: - pushes = list(prevop.output_effects) - pops = list(reversed(part.instr.input_effects)) - copies: list[tuple[StackEffect, StackEffect]] = [] - while pushes and pops and pushes[-1] == pops[0]: - src, dst = pushes.pop(), pops.pop(0) - if src.name == dst.name or dst.name == UNUSED: - continue - copies.append((src, dst)) - reads = set(copy[0].name for copy in copies) - writes = set(copy[1].name for copy in copies) - if reads & writes: - self.error( - f"Macro {mac.name!r} has conflicting copies " - f"(source of one copy is destination of another): " - f"{reads & writes}", - mac.macro, - ) - prevop = part.instr - - def analyze_macro(self, macro: parsing.Macro) -> MacroInstruction: - components = self.check_macro_components(macro) - parts: MacroParts = [] - flags = InstructionFlags.newEmpty() - offset = 0 - for component in components: - match component: - case parsing.CacheEffect() as ceffect: - parts.append(ceffect) - offset += ceffect.size - case Instruction() as instr: - part, offset = self.analyze_instruction(instr, offset) - parts.append(part) - if instr.name != "_SAVE_RETURN_OFFSET": - # _SAVE_RETURN_OFFSET's oparg does not transfer - flags.add(instr.instr_flags) - case _: - assert_never(component) - format = "IB" if flags.HAS_ARG_FLAG else "IX" - if offset: - format += "C" + "0" * (offset - 1) - return MacroInstruction(macro.name, format, flags, macro, parts, offset) - - def analyze_pseudo(self, pseudo: parsing.Pseudo) -> PseudoInstruction: - targets: list[Instruction | MacroInstruction] = [] - for target_name in pseudo.targets: - if target_name in self.instrs: - targets.append(self.instrs[target_name]) - else: - targets.append(self.macro_instrs[target_name]) - assert targets - ignored_flags = {"HAS_EVAL_BREAK_FLAG", "HAS_DEOPT_FLAG", "HAS_ERROR_FLAG", - "HAS_ESCAPES_FLAG"} - assert len({t.instr_flags.bitmap(ignore=ignored_flags) for t in targets}) == 1 - - flags = InstructionFlags(**{f"{f}_FLAG" : True for f in pseudo.flags}) - for t in targets: - flags.add(t.instr_flags) - return PseudoInstruction(pseudo.name, targets, flags) - - def analyze_instruction( - self, instr: Instruction, offset: int - ) -> tuple[Component, int]: - active_effects: list[ActiveCacheEffect] = [] - for ceffect in instr.cache_effects: - if ceffect.name != UNUSED: - active_effects.append(ActiveCacheEffect(ceffect, offset)) - offset += ceffect.size - return ( - Component(instr, active_effects), - offset, - ) - - def check_macro_components( - self, macro: parsing.Macro - ) -> list[InstructionOrCacheEffect]: - components: list[InstructionOrCacheEffect] = [] - for uop in macro.uops: - match uop: - case parsing.OpName(name): - if name not in self.instrs: - self.error(f"Unknown instruction {name!r}", macro) - else: - components.append(self.instrs[name]) - case parsing.CacheEffect(): - components.append(uop) - case _: - assert_never(uop) - return components - - def report_non_viable_uops(self, jsonfile: str) -> None: - print("The following ops are not viable uops:") - skips = { - "CACHE", - "RESERVED", - "INTERPRETER_EXIT", - "JUMP_BACKWARD", - "LOAD_FAST_LOAD_FAST", - "LOAD_CONST_LOAD_FAST", - "STORE_FAST_STORE_FAST", - "POP_JUMP_IF_TRUE", - "POP_JUMP_IF_FALSE", - "_ITER_JUMP_LIST", - "_ITER_JUMP_TUPLE", - "_ITER_JUMP_RANGE", - } - try: - # Secret feature: if bmraw.json exists, print and sort by execution count - counts = load_execution_counts(jsonfile) - except FileNotFoundError as err: - counts = {} - non_viable = [ - instr - for instr in self.instrs.values() - if instr.name not in skips - and not instr.name.startswith("INSTRUMENTED_") - and not instr.is_viable_uop() - ] - non_viable.sort(key=lambda instr: (-counts.get(instr.name, 0), instr.name)) - for instr in non_viable: - if instr.name in counts: - scount = f"{counts[instr.name]:,}" - else: - scount = "" - print(f" {scount:>15} {instr.name:<35}", end="") - if instr.name in self.families: - print(" (unspecialized)", end="") - elif instr.family is not None: - print(f" (specialization of {instr.family.name})", end="") - print() - - -def load_execution_counts(jsonfile: str) -> dict[str, int]: - import json - - with open(jsonfile) as f: - jsondata = json.load(f) - - # Look for keys like "opcode[LOAD_FAST].execution_count" - prefix = "opcode[" - suffix = "].execution_count" - res: dict[str, int] = {} - for key, value in jsondata.items(): - if key.startswith(prefix) and key.endswith(suffix): - res[key[len(prefix) : -len(suffix)]] = value - return res diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index e077eb0a8ed203..82ef8888bfcee5 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -11,11 +11,16 @@ class Properties: deopts: bool oparg: bool jumps: bool + eval_breaker: bool ends_with_eval_breaker: bool needs_this: bool always_exits: bool stores_sp: bool tier_one_only: bool + uses_co_consts: bool + uses_co_names: bool + uses_locals: bool + has_free: bool def dump(self, indent: str) -> None: print(indent, end="") @@ -30,11 +35,16 @@ def from_list(properties: list["Properties"]) -> "Properties": deopts=any(p.deopts for p in properties), oparg=any(p.oparg for p in properties), jumps=any(p.jumps for p in properties), + eval_breaker=any(p.eval_breaker for p in properties), ends_with_eval_breaker=any(p.ends_with_eval_breaker for p in properties), needs_this=any(p.needs_this for p in properties), always_exits=any(p.always_exits for p in properties), stores_sp=any(p.stores_sp for p in properties), tier_one_only=any(p.tier_one_only for p in properties), + uses_co_consts=any(p.uses_co_consts for p in properties), + uses_co_names=any(p.uses_co_names for p in properties), + uses_locals=any(p.uses_locals for p in properties), + has_free=any(p.has_free for p in properties), ) @@ -44,11 +54,16 @@ def from_list(properties: list["Properties"]) -> "Properties": deopts=False, oparg=False, jumps=False, + eval_breaker=False, ends_with_eval_breaker=False, needs_this=False, always_exits=False, stores_sp=False, tier_one_only=False, + uses_co_consts=False, + uses_co_names=False, + uses_locals=False, + has_free=False, ) @@ -142,6 +157,12 @@ def is_viable(self) -> bool: return False return True + def is_super(self) -> bool: + for tkn in self.body: + if tkn.kind == "IDENTIFIER" and tkn.text == "oparg1": + return True + return False + Part = Uop | Skip @@ -153,6 +174,7 @@ class Instruction: _properties: Properties | None is_target: bool = False family: Optional["Family"] = None + opcode: int = -1 @property def properties(self) -> Properties: @@ -171,16 +193,30 @@ def dump(self, indent: str) -> None: def size(self) -> int: return 1 + sum(part.size for part in self.parts) + def is_super(self) -> bool: + if len(self.parts) != 1: + return False + uop = self.parts[0] + if isinstance(uop, Uop): + return uop.is_super() + else: + return False + @dataclass class PseudoInstruction: name: str targets: list[Instruction] flags: list[str] + opcode: int = -1 def dump(self, indent: str) -> None: print(indent, self.name, "->", " or ".join([t.name for t in self.targets])) + @property + def properties(self) -> Properties: + return Properties.from_list([i.properties for i in self.targets]) + @dataclass class Family: @@ -198,12 +234,15 @@ class Analysis: uops: dict[str, Uop] families: dict[str, Family] pseudos: dict[str, PseudoInstruction] + opmap: dict[str, int] + have_arg: int + min_instrumented: int def analysis_error(message: str, tkn: lexer.Token) -> SyntaxError: # To do -- support file and line output # Construct a SyntaxError instance from message and token - return lexer.make_syntax_error(message, "", tkn.line, tkn.column, "") + return lexer.make_syntax_error(message, tkn.filename, tkn.line, tkn.column, "") def override_error( @@ -238,6 +277,11 @@ def analyze_caches(inputs: list[parser.InputEffect]) -> list[CacheEntry]: caches: list[parser.CacheEffect] = [ i for i in inputs if isinstance(i, parser.CacheEffect) ] + for cache in caches: + if cache.name == "unused": + raise analysis_error( + "Unused cache entry in op. Move to enclosing macro.", cache.tokens[0] + ) return [CacheEntry(i.name, int(i.size)) for i in caches] @@ -258,7 +302,81 @@ def is_infallible(op: parser.InstDef) -> bool: ) -from flags import makes_escaping_api_call +NON_ESCAPING_FUNCTIONS = ( + "Py_INCREF", + "_PyDictOrValues_IsValues", + "_PyObject_DictOrValuesPointer", + "_PyDictOrValues_GetValues", + "_PyObject_MakeInstanceAttributesFromDict", + "Py_DECREF", + "_Py_DECREF_SPECIALIZED", + "DECREF_INPUTS_AND_REUSE_FLOAT", + "PyUnicode_Append", + "_PyLong_IsZero", + "Py_SIZE", + "Py_TYPE", + "PyList_GET_ITEM", + "PyTuple_GET_ITEM", + "PyList_GET_SIZE", + "PyTuple_GET_SIZE", + "Py_ARRAY_LENGTH", + "Py_Unicode_GET_LENGTH", + "PyUnicode_READ_CHAR", + "_Py_SINGLETON", + "PyUnicode_GET_LENGTH", + "_PyLong_IsCompact", + "_PyLong_IsNonNegativeCompact", + "_PyLong_CompactValue", + "_Py_NewRef", + "_Py_IsImmortal", + "_Py_STR", + "_PyLong_Add", + "_PyLong_Multiply", + "_PyLong_Subtract", + "Py_NewRef", + "_PyList_ITEMS", + "_PyTuple_ITEMS", + "_PyList_AppendTakeRef", + "_Py_atomic_load_uintptr_relaxed", + "_PyFrame_GetCode", + "_PyThreadState_HasStackSpace", +) + +ESCAPING_FUNCTIONS = ( + "import_name", + "import_from", +) + + +def makes_escaping_api_call(instr: parser.InstDef) -> bool: + if "CALL_INTRINSIC" in instr.name: + return True + tkns = iter(instr.tokens) + for tkn in tkns: + if tkn.kind != lexer.IDENTIFIER: + continue + try: + next_tkn = next(tkns) + except StopIteration: + return False + if next_tkn.kind != lexer.LPAREN: + continue + if tkn.text in ESCAPING_FUNCTIONS: + return True + if not tkn.text.startswith("Py") and not tkn.text.startswith("_Py"): + continue + if tkn.text.endswith("Check"): + continue + if tkn.text.startswith("Py_Is"): + continue + if tkn.text.endswith("CheckExact"): + continue + if tkn.text in NON_ESCAPING_FUNCTIONS: + continue + return True + return False + + EXITS = { "DISPATCH", @@ -300,17 +418,28 @@ def always_exits(op: parser.InstDef) -> bool: def compute_properties(op: parser.InstDef) -> Properties: + has_free = ( + variable_used(op, "PyCell_New") + or variable_used(op, "PyCell_GET") + or variable_used(op, "PyCell_SET") + ) return Properties( escapes=makes_escaping_api_call(op), infallible=is_infallible(op), deopts=variable_used(op, "DEOPT_IF"), oparg=variable_used(op, "oparg"), jumps=variable_used(op, "JUMPBY"), + eval_breaker=variable_used(op, "CHECK_EVAL_BREAKER"), ends_with_eval_breaker=eval_breaker_at_end(op), needs_this=variable_used(op, "this_instr"), always_exits=always_exits(op), stores_sp=variable_used(op, "STORE_SP"), tier_one_only=variable_used(op, "TIER_ONE_ONLY"), + uses_co_consts=variable_used(op, "FRAME_CO_CONSTS"), + uses_co_names=variable_used(op, "FRAME_CO_NAMES"), + uses_locals=(variable_used(op, "GETLOCAL") or variable_used(op, "SETLOCAL")) + and not has_free, + has_free=has_free, ) @@ -417,6 +546,95 @@ def add_pseudo( ) +def assign_opcodes( + instructions: dict[str, Instruction], + families: dict[str, Family], + pseudos: dict[str, PseudoInstruction], +) -> tuple[dict[str, int], int, int]: + """Assigns opcodes, then returns the opmap, + have_arg and min_instrumented values""" + instmap: dict[str, int] = {} + + # 0 is reserved for cache entries. This helps debugging. + instmap["CACHE"] = 0 + + # 17 is reserved as it is the initial value for the specializing counter. + # This helps catch cases where we attempt to execute a cache. + instmap["RESERVED"] = 17 + + # 149 is RESUME - it is hard coded as such in Tools/build/deepfreeze.py + instmap["RESUME"] = 149 + + # This is an historical oddity. + instmap["BINARY_OP_INPLACE_ADD_UNICODE"] = 3 + + instmap["INSTRUMENTED_LINE"] = 254 + + instrumented = [name for name in instructions if name.startswith("INSTRUMENTED")] + + # Special case: this instruction is implemented in ceval.c + # rather than bytecodes.c, so we need to add it explicitly + # here (at least until we add something to bytecodes.c to + # declare external instructions). + instrumented.append("INSTRUMENTED_LINE") + + specialized: set[str] = set() + no_arg: list[str] = [] + has_arg: list[str] = [] + + for family in families.values(): + specialized.update(inst.name for inst in family.members) + + for inst in instructions.values(): + name = inst.name + if name in specialized: + continue + if name in instrumented: + continue + if inst.properties.oparg: + has_arg.append(name) + else: + no_arg.append(name) + + # Specialized ops appear in their own section + # Instrumented opcodes are at the end of the valid range + min_internal = 150 + min_instrumented = 254 - (len(instrumented) - 1) + assert min_internal + len(specialized) < min_instrumented + + next_opcode = 1 + + def add_instruction(name: str) -> None: + nonlocal next_opcode + if name in instmap: + return # Pre-defined name + while next_opcode in instmap.values(): + next_opcode += 1 + instmap[name] = next_opcode + next_opcode += 1 + + for name in sorted(no_arg): + add_instruction(name) + for name in sorted(has_arg): + add_instruction(name) + # For compatibility + next_opcode = min_internal + for name in sorted(specialized): + add_instruction(name) + next_opcode = min_instrumented + for name in instrumented: + add_instruction(name) + + for name in instructions: + instructions[name].opcode = instmap[name] + + for op, name in enumerate(sorted(pseudos), 256): + instmap[name] = op + pseudos[name].opcode = op + + return instmap, len(no_arg), min_instrumented + + def analyze_forest(forest: list[parser.AstNode]) -> Analysis: instructions: dict[str, Instruction] = {} uops: dict[str, Uop] = {} @@ -460,10 +678,20 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: continue if target.text in instructions: instructions[target.text].is_target = True - # Hack + # Special case BINARY_OP_INPLACE_ADD_UNICODE + # BINARY_OP_INPLACE_ADD_UNICODE is not a normal family member, + # as it is the wrong size, but we need it to maintain an + # historical optimization. if "BINARY_OP_INPLACE_ADD_UNICODE" in instructions: - instructions["BINARY_OP_INPLACE_ADD_UNICODE"].family = families["BINARY_OP"] - return Analysis(instructions, uops, families, pseudos) + inst = instructions["BINARY_OP_INPLACE_ADD_UNICODE"] + inst.family = families["BINARY_OP"] + families["BINARY_OP"].members.append(inst) + opmap, first_arg, min_instrumented = assign_opcodes( + instructions, families, pseudos + ) + return Analysis( + instructions, uops, families, pseudos, opmap, first_arg, min_instrumented + ) def analyze_files(filenames: list[str]) -> Analysis: diff --git a/Tools/cases_generator/cwriter.py b/Tools/cases_generator/cwriter.py index 67b1c9a169024c..069f0177a74018 100644 --- a/Tools/cases_generator/cwriter.py +++ b/Tools/cases_generator/cwriter.py @@ -1,5 +1,6 @@ +import contextlib from lexer import Token -from typing import TextIO +from typing import TextIO, Iterator class CWriter: @@ -44,9 +45,12 @@ def maybe_dedent(self, txt: str) -> None: def maybe_indent(self, txt: str) -> None: parens = txt.count("(") - txt.count(")") - if parens > 0 and self.last_token: - offset = self.last_token.end_column - 1 - if offset <= self.indents[-1] or offset > 40: + if parens > 0: + if self.last_token: + offset = self.last_token.end_column - 1 + if offset <= self.indents[-1] or offset > 40: + offset = self.indents[-1] + 4 + else: offset = self.indents[-1] + 4 self.indents.append(offset) if is_label(txt): @@ -54,6 +58,7 @@ def maybe_indent(self, txt: str) -> None: else: braces = txt.count("{") - txt.count("}") if braces > 0: + assert braces == 1 if 'extern "C"' in txt: self.indents.append(self.indents[-1]) else: @@ -114,6 +119,28 @@ def start_line(self) -> None: self.newline = True self.last_token = None + @contextlib.contextmanager + def header_guard(self, name: str) -> Iterator[None]: + self.out.write( + f""" +#ifndef {name} +#define {name} +#ifdef __cplusplus +extern "C" {{ +#endif + +""" + ) + yield + self.out.write( + f""" +#ifdef __cplusplus +}} +#endif +#endif /* !{name} */ +""" + ) + def is_label(txt: str) -> bool: return not txt.startswith("//") and txt.endswith(":") diff --git a/Tools/cases_generator/flags.py b/Tools/cases_generator/flags.py deleted file mode 100644 index bf76112159e38e..00000000000000 --- a/Tools/cases_generator/flags.py +++ /dev/null @@ -1,191 +0,0 @@ -import dataclasses - -from formatting import Formatter -import lexer as lx -import parsing -from typing import AbstractSet - -NON_ESCAPING_FUNCTIONS = ( - "Py_INCREF", - "_PyDictOrValues_IsValues", - "_PyObject_DictOrValuesPointer", - "_PyDictOrValues_GetValues", - "_PyObject_MakeInstanceAttributesFromDict", - "Py_DECREF", - "_Py_DECREF_SPECIALIZED", - "DECREF_INPUTS_AND_REUSE_FLOAT", - "PyUnicode_Append", - "_PyLong_IsZero", - "Py_SIZE", - "Py_TYPE", - "PyList_GET_ITEM", - "PyTuple_GET_ITEM", - "PyList_GET_SIZE", - "PyTuple_GET_SIZE", - "Py_ARRAY_LENGTH", - "Py_Unicode_GET_LENGTH", - "PyUnicode_READ_CHAR", - "_Py_SINGLETON", - "PyUnicode_GET_LENGTH", - "_PyLong_IsCompact", - "_PyLong_IsNonNegativeCompact", - "_PyLong_CompactValue", - "_Py_NewRef", - "_Py_IsImmortal", - "_Py_STR", - "_PyLong_Add", - "_PyLong_Multiply", - "_PyLong_Subtract", - "Py_NewRef", - "_PyList_ITEMS", - "_PyTuple_ITEMS", - "_PyList_AppendTakeRef", - "_Py_atomic_load_uintptr_relaxed", - "_PyFrame_GetCode", - "_PyThreadState_HasStackSpace", -) - -ESCAPING_FUNCTIONS = ( - "import_name", - "import_from", -) - - -def makes_escaping_api_call(instr: parsing.InstDef) -> bool: - if "CALL_INTRINSIC" in instr.name: - return True - tkns = iter(instr.tokens) - for tkn in tkns: - if tkn.kind != lx.IDENTIFIER: - continue - try: - next_tkn = next(tkns) - except StopIteration: - return False - if next_tkn.kind != lx.LPAREN: - continue - if tkn.text in ESCAPING_FUNCTIONS: - return True - if not tkn.text.startswith("Py") and not tkn.text.startswith("_Py"): - continue - if tkn.text.endswith("Check"): - continue - if tkn.text.startswith("Py_Is"): - continue - if tkn.text.endswith("CheckExact"): - continue - if tkn.text in NON_ESCAPING_FUNCTIONS: - continue - return True - return False - - -@dataclasses.dataclass -class InstructionFlags: - """Construct and manipulate instruction flags""" - - HAS_ARG_FLAG: bool = False - HAS_CONST_FLAG: bool = False - HAS_NAME_FLAG: bool = False - HAS_JUMP_FLAG: bool = False - HAS_FREE_FLAG: bool = False - HAS_LOCAL_FLAG: bool = False - HAS_EVAL_BREAK_FLAG: bool = False - HAS_DEOPT_FLAG: bool = False - HAS_ERROR_FLAG: bool = False - HAS_ESCAPES_FLAG: bool = False - - def __post_init__(self) -> None: - self.bitmask = {name: (1 << i) for i, name in enumerate(self.names())} - - @staticmethod - def fromInstruction(instr: parsing.InstDef) -> "InstructionFlags": - has_free = ( - variable_used(instr, "PyCell_New") - or variable_used(instr, "PyCell_GET") - or variable_used(instr, "PyCell_SET") - ) - - return InstructionFlags( - HAS_ARG_FLAG=variable_used(instr, "oparg"), - HAS_CONST_FLAG=variable_used(instr, "FRAME_CO_CONSTS"), - HAS_NAME_FLAG=variable_used(instr, "FRAME_CO_NAMES"), - HAS_JUMP_FLAG=variable_used(instr, "JUMPBY"), - HAS_FREE_FLAG=has_free, - HAS_LOCAL_FLAG=( - variable_used(instr, "GETLOCAL") or variable_used(instr, "SETLOCAL") - ) - and not has_free, - HAS_EVAL_BREAK_FLAG=variable_used(instr, "CHECK_EVAL_BREAKER"), - HAS_DEOPT_FLAG=variable_used(instr, "DEOPT_IF"), - HAS_ERROR_FLAG=( - variable_used(instr, "ERROR_IF") - or variable_used(instr, "error") - or variable_used(instr, "pop_1_error") - or variable_used(instr, "exception_unwind") - or variable_used(instr, "resume_with_error") - ), - HAS_ESCAPES_FLAG=makes_escaping_api_call(instr), - ) - - @staticmethod - def newEmpty() -> "InstructionFlags": - return InstructionFlags() - - def add(self, other: "InstructionFlags") -> None: - for name, value in dataclasses.asdict(other).items(): - if value: - setattr(self, name, value) - - def names(self, value: bool | None = None) -> list[str]: - if value is None: - return list(dataclasses.asdict(self).keys()) - return [n for n, v in dataclasses.asdict(self).items() if v == value] - - def bitmap(self, ignore: AbstractSet[str] = frozenset()) -> int: - flags = 0 - assert all(hasattr(self, name) for name in ignore) - for name in self.names(): - if getattr(self, name) and name not in ignore: - flags |= self.bitmask[name] - return flags - - @classmethod - def emit_macros(cls, out: Formatter) -> None: - flags = cls.newEmpty() - for name, value in flags.bitmask.items(): - out.emit(f"#define {name} ({value})") - - for name, value in flags.bitmask.items(): - out.emit( - f"#define OPCODE_{name[:-len('_FLAG')]}(OP) " - f"(_PyOpcode_opcode_metadata[OP].flags & ({name}))" - ) - - -def variable_used(node: parsing.Node, name: str) -> bool: - """Determine whether a variable with a given name is used in a node.""" - return any( - token.kind == "IDENTIFIER" and token.text == name for token in node.tokens - ) - - -def variable_used_unspecialized(node: parsing.Node, name: str) -> bool: - """Like variable_used(), but skips #if ENABLE_SPECIALIZATION blocks.""" - tokens: list[lx.Token] = [] - skipping = False - for i, token in enumerate(node.tokens): - if token.kind == "CMACRO": - text = "".join(token.text.split()) - # TODO: Handle nested #if - if text == "#if": - if i + 1 < len(node.tokens) and node.tokens[i + 1].text in ( - "ENABLE_SPECIALIZATION", - "TIER_ONE", - ): - skipping = True - elif text in ("#else", "#endif"): - skipping = False - if not skipping: - tokens.append(token) - return any(token.kind == "IDENTIFIER" and token.text == name for token in tokens) diff --git a/Tools/cases_generator/formatting.py b/Tools/cases_generator/formatting.py deleted file mode 100644 index 4fd9172d20c274..00000000000000 --- a/Tools/cases_generator/formatting.py +++ /dev/null @@ -1,206 +0,0 @@ -import contextlib -import re -import typing -from collections.abc import Iterator - -from parsing import StackEffect, Family - -UNUSED = "unused" - - -class Formatter: - """Wraps an output stream with the ability to indent etc.""" - - stream: typing.TextIO - prefix: str - emit_line_directives: bool = False - lineno: int # Next line number, 1-based - filename: str # Slightly improved stream.filename - nominal_lineno: int - nominal_filename: str - - def __init__( - self, - stream: typing.TextIO, - indent: int, - emit_line_directives: bool = False, - comment: str = "//", - ) -> None: - self.stream = stream - self.prefix = " " * indent - self.emit_line_directives = emit_line_directives - self.comment = comment - self.lineno = 1 - self.filename = prettify_filename(self.stream.name) - self.nominal_lineno = 1 - self.nominal_filename = self.filename - - def write_raw(self, s: str) -> None: - self.stream.write(s) - newlines = s.count("\n") - self.lineno += newlines - self.nominal_lineno += newlines - - def emit(self, arg: str) -> None: - if arg: - self.write_raw(f"{self.prefix}{arg}\n") - else: - self.write_raw("\n") - - def set_lineno(self, lineno: int, filename: str) -> None: - if self.emit_line_directives: - if lineno != self.nominal_lineno or filename != self.nominal_filename: - self.emit(f'#line {lineno} "{filename}"') - self.nominal_lineno = lineno - self.nominal_filename = filename - - def reset_lineno(self) -> None: - if self.lineno != self.nominal_lineno or self.filename != self.nominal_filename: - self.set_lineno(self.lineno + 1, self.filename) - - @contextlib.contextmanager - def indent(self) -> Iterator[None]: - self.prefix += " " - yield - self.prefix = self.prefix[:-4] - - @contextlib.contextmanager - def block(self, head: str, tail: str = "") -> Iterator[None]: - if head: - self.emit(head + " {") - else: - self.emit("{") - with self.indent(): - yield - self.emit("}" + tail) - - def stack_adjust( - self, - input_effects: list[StackEffect], - output_effects: list[StackEffect], - ) -> None: - shrink, isym = list_effect_size(input_effects) - grow, osym = list_effect_size(output_effects) - diff = grow - shrink - if isym and isym != osym: - self.emit(f"STACK_SHRINK({isym});") - if diff < 0: - self.emit(f"STACK_SHRINK({-diff});") - if diff > 0: - self.emit(f"STACK_GROW({diff});") - if osym and osym != isym: - self.emit(f"STACK_GROW({osym});") - - def declare(self, dst: StackEffect, src: StackEffect | None) -> None: - if dst.name == UNUSED or dst.cond == "0": - return - typ = f"{dst.type}" if dst.type else "PyObject *" - if src: - cast = self.cast(dst, src) - initexpr = f"{cast}{src.name}" - if src.cond and src.cond != "1": - initexpr = f"{parenthesize_cond(src.cond)} ? {initexpr} : NULL" - init = f" = {initexpr}" - elif dst.cond and dst.cond != "1": - init = " = NULL" - else: - init = "" - sepa = "" if typ.endswith("*") else " " - self.emit(f"{typ}{sepa}{dst.name}{init};") - - def assign(self, dst: StackEffect, src: StackEffect) -> None: - if src.name == UNUSED or dst.name == UNUSED: - return - cast = self.cast(dst, src) - if re.match(r"^REG\(oparg(\d+)\)$", dst.name): - self.emit(f"Py_XSETREF({dst.name}, {cast}{src.name});") - else: - stmt = f"{dst.name} = {cast}{src.name};" - if src.cond and src.cond != "1": - if src.cond == "0": - # It will not be executed - return - stmt = f"if ({src.cond}) {{ {stmt} }}" - self.emit(stmt) - - def cast(self, dst: StackEffect, src: StackEffect) -> str: - return f"({dst.type or 'PyObject *'})" if src.type != dst.type else "" - - def static_assert_family_size( - self, name: str, family: Family | None, cache_offset: int - ) -> None: - """Emit a static_assert for the size of a family, if known. - - This will fail at compile time if the cache size computed from - the instruction definition does not match the size of the struct - used by specialize.c. - """ - if family and name == family.name: - cache_size = family.size - if cache_size: - self.emit( - f"static_assert({cache_size} == {cache_offset}, " - f'"incorrect cache size");' - ) - - -def prettify_filename(filename: str) -> str: - # Make filename more user-friendly and less platform-specific, - # it is only used for error reporting at this point. - filename = filename.replace("\\", "/") - if filename.startswith("./"): - filename = filename[2:] - if filename.endswith(".new"): - filename = filename[:-4] - return filename - - -def list_effect_size(effects: list[StackEffect]) -> tuple[int, str]: - numeric = 0 - symbolic: list[str] = [] - for effect in effects: - diff, sym = effect_size(effect) - numeric += diff - if sym: - symbolic.append(maybe_parenthesize(sym)) - return numeric, " + ".join(symbolic) - - -def effect_size(effect: StackEffect) -> tuple[int, str]: - """Return the 'size' impact of a stack effect. - - Returns a tuple (numeric, symbolic) where: - - - numeric is an int giving the statically analyzable size of the effect - - symbolic is a string representing a variable effect (e.g. 'oparg*2') - - At most one of these will be non-zero / non-empty. - """ - if effect.size: - assert not effect.cond, "Array effects cannot have a condition" - return 0, effect.size - elif effect.cond: - if effect.cond in ("0", "1"): - return int(effect.cond), "" - return 0, f"{maybe_parenthesize(effect.cond)} ? 1 : 0" - else: - return 1, "" - - -def maybe_parenthesize(sym: str) -> str: - """Add parentheses around a string if it contains an operator. - - An exception is made for '*' which is common and harmless - in the context where the symbolic size is used. - """ - if re.match(r"^[\s\w*]+$", sym): - return sym - else: - return f"({sym})" - - -def parenthesize_cond(cond: str) -> str: - """Parenthesize a condition, but only if it contains ?: itself.""" - if "?" in cond: - cond = f"({cond})" - return cond diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py deleted file mode 100644 index 50bc14a57fc584..00000000000000 --- a/Tools/cases_generator/generate_cases.py +++ /dev/null @@ -1,850 +0,0 @@ -"""Generate the main interpreter switch. -Reads the instruction definitions from bytecodes.c. -Writes the cases to generated_cases.c.h, which is #included in ceval.c. -""" - -import argparse -import contextlib -import itertools -import os -import posixpath -import sys -import textwrap -import typing -from collections.abc import Iterator - -import stacking # Early import to avoid circular import -from _typing_backports import assert_never -from analysis import Analyzer -from formatting import Formatter, list_effect_size -from flags import InstructionFlags, variable_used -from instructions import ( - AnyInstruction, - AbstractInstruction, - Component, - Instruction, - MacroInstruction, - MacroParts, - PseudoInstruction, - TIER_ONE, - TIER_TWO, -) -import parsing -from parsing import StackEffect - - -HERE = os.path.dirname(__file__) -ROOT = os.path.join(HERE, "../..") -THIS = os.path.relpath(__file__, ROOT).replace(os.path.sep, posixpath.sep) - -DEFAULT_INPUT = os.path.relpath(os.path.join(ROOT, "Python/bytecodes.c")) -DEFAULT_OUTPUT = os.path.relpath(os.path.join(ROOT, "Python/generated_cases.c.h")) -DEFAULT_OPCODE_IDS_H_OUTPUT = os.path.relpath( - os.path.join(ROOT, "Include/opcode_ids.h") -) -DEFAULT_OPCODE_TARGETS_H_OUTPUT = os.path.relpath( - os.path.join(ROOT, "Python/opcode_targets.h") -) -DEFAULT_METADATA_OUTPUT = os.path.relpath( - os.path.join(ROOT, "Include/internal/pycore_opcode_metadata.h") -) -DEFAULT_PYMETADATA_OUTPUT = os.path.relpath( - os.path.join(ROOT, "Lib/_opcode_metadata.py") -) -DEFAULT_EXECUTOR_OUTPUT = os.path.relpath( - os.path.join(ROOT, "Python/executor_cases.c.h") -) -DEFAULT_ABSTRACT_INTERPRETER_OUTPUT = os.path.relpath( - os.path.join(ROOT, "Python/abstract_interp_cases.c.h") -) - -# Constants used instead of size for macro expansions. -# Note: 1, 2, 4 must match actual cache entry sizes. -OPARG_SIZES = { - "OPARG_FULL": 0, - "OPARG_CACHE_1": 1, - "OPARG_CACHE_2": 2, - "OPARG_CACHE_4": 4, - "OPARG_TOP": 5, - "OPARG_BOTTOM": 6, - "OPARG_SAVE_RETURN_OFFSET": 7, -} - -INSTR_FMT_PREFIX = "INSTR_FMT_" - -# TODO: generate all these after updating the DSL -SPECIALLY_HANDLED_ABSTRACT_INSTR = { - "LOAD_FAST", - "LOAD_FAST_CHECK", - "LOAD_FAST_AND_CLEAR", - "LOAD_CONST", - "STORE_FAST", - "STORE_FAST_MAYBE_NULL", - "COPY", - # Arithmetic - "_BINARY_OP_MULTIPLY_INT", - "_BINARY_OP_ADD_INT", - "_BINARY_OP_SUBTRACT_INT", -} - -arg_parser = argparse.ArgumentParser( - description="Generate the code for the interpreter switch.", - formatter_class=argparse.ArgumentDefaultsHelpFormatter, -) - -arg_parser.add_argument( - "-v", - "--viable", - help="Print list of non-viable uops and exit", - action="store_true", -) -arg_parser.add_argument( - "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT -) -arg_parser.add_argument( - "-t", - "--opcode_targets_h", - type=str, - help="File with opcode targets for computed gotos", - default=DEFAULT_OPCODE_TARGETS_H_OUTPUT, -) -arg_parser.add_argument( - "-m", - "--metadata", - type=str, - help="Generated C metadata", - default=DEFAULT_METADATA_OUTPUT, -) -arg_parser.add_argument( - "-p", - "--pymetadata", - type=str, - help="Generated Python metadata", - default=DEFAULT_PYMETADATA_OUTPUT, -) -arg_parser.add_argument( - "-l", "--emit-line-directives", help="Emit #line directives", action="store_true" -) -arg_parser.add_argument( - "input", nargs=argparse.REMAINDER, help="Instruction definition file(s)" -) -arg_parser.add_argument( - "-a", - "--abstract-interpreter-cases", - type=str, - help="Write abstract interpreter cases to this file", - default=DEFAULT_ABSTRACT_INTERPRETER_OUTPUT, -) - - -class Generator(Analyzer): - def get_stack_effect_info( - self, thing: parsing.InstDef | parsing.Macro | parsing.Pseudo - ) -> tuple[AnyInstruction | None, str, str]: - def effect_str(effects: list[StackEffect]) -> str: - n_effect, sym_effect = list_effect_size(effects) - if sym_effect: - return f"{sym_effect} + {n_effect}" if n_effect else sym_effect - return str(n_effect) - - instr: AnyInstruction | None - popped: str | None = None - pushed: str | None = None - match thing: - case parsing.InstDef(): - instr = self.instrs[thing.name] - popped = effect_str(instr.input_effects) - pushed = effect_str(instr.output_effects) - case parsing.Macro(): - instr = self.macro_instrs[thing.name] - popped, pushed = stacking.get_stack_effect_info_for_macro(instr) - case parsing.Pseudo(): - instr = self.pseudo_instrs[thing.name] - # Calculate stack effect, and check that it's the same - # for all targets. - for target in self.pseudos[thing.name].targets: - target_instr = self.instrs.get(target) - if target_instr is None: - macro_instr = self.macro_instrs[target] - popped, pushed = stacking.get_stack_effect_info_for_macro(macro_instr) - else: - target_popped = effect_str(target_instr.input_effects) - target_pushed = effect_str(target_instr.output_effects) - if popped is None: - popped, pushed = target_popped, target_pushed - else: - assert popped == target_popped - assert pushed == target_pushed - case _: - assert_never(thing) - assert popped is not None and pushed is not None - return instr, popped, pushed - - @contextlib.contextmanager - def metadata_item(self, signature: str, open: str, close: str) -> Iterator[None]: - self.out.emit("") - self.out.emit(f"extern {signature};") - self.out.emit("#ifdef NEED_OPCODE_METADATA") - with self.out.block(f"{signature} {open}", close): - yield - self.out.emit("#endif // NEED_OPCODE_METADATA") - - def write_stack_effect_functions(self) -> None: - popped_data: list[tuple[AnyInstruction, str]] = [] - pushed_data: list[tuple[AnyInstruction, str]] = [] - for thing in self.everything: - if isinstance(thing, parsing.Macro) and thing.name in self.instrs: - continue - instr, popped, pushed = self.get_stack_effect_info(thing) - if instr is not None: - popped_data.append((instr, popped)) - pushed_data.append((instr, pushed)) - - def write_function( - direction: str, data: list[tuple[AnyInstruction, str]] - ) -> None: - with self.metadata_item( - f"int _PyOpcode_num_{direction}(int opcode, int oparg, bool jump)", - "", - "", - ): - with self.out.block("switch(opcode)"): - effects = [(instr.name, effect) for instr, effect in data] - for name, effect in sorted(effects): - self.out.emit(f"case {name}:") - self.out.emit(f" return {effect};") - self.out.emit("default:") - self.out.emit(" return -1;") - - write_function("popped", popped_data) - write_function("pushed", pushed_data) - self.out.emit("") - - def from_source_files(self) -> str: - filenames = [] - for filename in self.input_filenames: - try: - filename = os.path.relpath(filename, ROOT) - except ValueError: - # May happen on Windows if root and temp on different volumes - pass - filenames.append(filename.replace(os.path.sep, posixpath.sep)) - paths = f"\n{self.out.comment} ".join(filenames) - return f"{self.out.comment} from:\n{self.out.comment} {paths}\n" - - def write_provenance_header(self) -> None: - self.out.write_raw(f"{self.out.comment} This file is generated by {THIS}\n") - self.out.write_raw(self.from_source_files()) - self.out.write_raw(f"{self.out.comment} Do not edit!\n") - - def assign_opcode_ids(self) -> None: - """Assign IDs to opcodes""" - - ops: list[tuple[bool, str]] = [] # (has_arg, name) for each opcode - instrumented_ops: list[str] = [] - - specialized_ops: set[str] = set() - for name, family in self.families.items(): - specialized_ops.update(family.members) - - for instr in self.macro_instrs.values(): - name = instr.name - if name in specialized_ops: - continue - if name.startswith("INSTRUMENTED_"): - instrumented_ops.append(name) - else: - ops.append((instr.instr_flags.HAS_ARG_FLAG, name)) - - # Special case: this instruction is implemented in ceval.c - # rather than bytecodes.c, so we need to add it explicitly - # here (at least until we add something to bytecodes.c to - # declare external instructions). - instrumented_ops.append("INSTRUMENTED_LINE") - - # assert lists are unique - assert len(set(ops)) == len(ops) - assert len(set(instrumented_ops)) == len(instrumented_ops) - - opname: list[str | None] = [None] * 512 - opmap: dict[str, int] = {} - markers: dict[str, int] = {} - - def map_op(op: int, name: str) -> None: - assert op < len(opname) - assert opname[op] is None, (op, name) - assert name not in opmap - opname[op] = name - opmap[name] = op - - # 0 is reserved for cache entries. This helps debugging. - map_op(0, "CACHE") - - # 17 is reserved as it is the initial value for the specializing counter. - # This helps catch cases where we attempt to execute a cache. - map_op(17, "RESERVED") - - # 149 is RESUME - it is hard coded as such in Tools/build/deepfreeze.py - map_op(149, "RESUME") - - # Specialized ops appear in their own section - # Instrumented opcodes are at the end of the valid range - min_internal = 150 - min_instrumented = 254 - (len(instrumented_ops) - 1) - assert min_internal + len(specialized_ops) < min_instrumented - - next_opcode = 1 - for has_arg, name in sorted(ops): - if name in opmap: - continue # an anchored name, like CACHE - map_op(next_opcode, name) - if has_arg and "HAVE_ARGUMENT" not in markers: - markers["HAVE_ARGUMENT"] = next_opcode - - while opname[next_opcode] is not None: - next_opcode += 1 - - assert next_opcode < min_internal, next_opcode - - for i, op in enumerate(sorted(specialized_ops)): - map_op(min_internal + i, op) - - markers["MIN_INSTRUMENTED_OPCODE"] = min_instrumented - for i, op in enumerate(instrumented_ops): - map_op(min_instrumented + i, op) - - # Pseudo opcodes are after the valid range - for i, op in enumerate(sorted(self.pseudos)): - map_op(256 + i, op) - - assert 255 not in opmap.values() # 255 is reserved - self.opmap = opmap - self.markers = markers - - def write_opcode_targets(self, opcode_targets_filename: str) -> None: - """Write header file that defines the jump target table""" - - with open(opcode_targets_filename, "w") as f: - # Create formatter - self.out = Formatter(f, 0) - - with self.out.block("static void *opcode_targets[256] =", ";"): - targets = ["_unknown_opcode"] * 256 - for name, op in self.opmap.items(): - if op < 256: - targets[op] = f"TARGET_{name}" - f.write(",\n".join([f" &&{s}" for s in targets])) - - def write_metadata(self, metadata_filename: str, pymetadata_filename: str) -> None: - """Write instruction metadata to output file.""" - - # Compute the set of all instruction formats. - all_formats: set[str] = set() - for thing in self.everything: - format: str | None = None - match thing: - case parsing.InstDef(): - format = self.instrs[thing.name].instr_fmt - case parsing.Macro(): - format = self.macro_instrs[thing.name].instr_fmt - case parsing.Pseudo(): - # Pseudo instructions exist only in the compiler, - # so do not have a format - continue - case _: - assert_never(thing) - assert format is not None - all_formats.add(format) - - # Turn it into a sorted list of enum values. - format_enums = [INSTR_FMT_PREFIX + format for format in sorted(all_formats)] - - with open(metadata_filename, "w") as f: - # Create formatter - self.out = Formatter(f, 0) - - self.write_provenance_header() - - self.out.emit("") - self.out.emit("#ifndef Py_BUILD_CORE") - self.out.emit('# error "this header requires Py_BUILD_CORE define"') - self.out.emit("#endif") - self.out.emit("") - self.out.emit("#include // bool") - - self.write_pseudo_instrs() - - self.out.emit("") - self.out.emit('#include "pycore_uop_ids.h"') - - self.write_stack_effect_functions() - - # Write the enum definition for instruction formats. - with self.out.block("enum InstructionFormat", ";"): - for enum in format_enums: - self.out.emit(enum + ",") - - self.out.emit("") - self.out.emit( - "#define IS_VALID_OPCODE(OP) \\\n" - " (((OP) >= 0) && ((OP) < OPCODE_METADATA_SIZE) && \\\n" - " (_PyOpcode_opcode_metadata[(OP)].valid_entry))" - ) - - self.out.emit("") - InstructionFlags.emit_macros(self.out) - - self.out.emit("") - with self.out.block("struct opcode_metadata", ";"): - self.out.emit("bool valid_entry;") - self.out.emit("enum InstructionFormat instr_format;") - self.out.emit("int flags;") - self.out.emit("") - - with self.out.block("struct opcode_macro_expansion", ";"): - self.out.emit("int nuops;") - self.out.emit( - "struct { int16_t uop; int8_t size; int8_t offset; } uops[12];" - ) - self.out.emit("") - - for key, value in OPARG_SIZES.items(): - self.out.emit(f"#define {key} {value}") - self.out.emit("") - - self.out.emit( - "#define OPCODE_METADATA_FLAGS(OP) " - "(_PyOpcode_opcode_metadata[(OP)].flags & (HAS_ARG_FLAG | HAS_JUMP_FLAG))" - ) - self.out.emit("#define SAME_OPCODE_METADATA(OP1, OP2) \\") - self.out.emit( - " (OPCODE_METADATA_FLAGS(OP1) == OPCODE_METADATA_FLAGS(OP2))" - ) - self.out.emit("") - - # Write metadata array declaration - self.out.emit("#define OPCODE_METADATA_SIZE 512") - self.out.emit("#define OPCODE_UOP_NAME_SIZE 512") - self.out.emit("#define OPCODE_MACRO_EXPANSION_SIZE 256") - - with self.metadata_item( - "const struct opcode_metadata " - "_PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE]", - "=", - ";", - ): - # Write metadata for each instruction - sorted_things = sorted(self.everything, key = lambda t:t.name) - for thing in sorted_things: - match thing: - case parsing.InstDef(): - self.write_metadata_for_inst(self.instrs[thing.name]) - case parsing.Macro(): - if thing.name not in self.instrs: - self.write_metadata_for_macro( - self.macro_instrs[thing.name] - ) - case parsing.Pseudo(): - self.write_metadata_for_pseudo( - self.pseudo_instrs[thing.name] - ) - case _: - assert_never(thing) - - with self.metadata_item( - "const struct opcode_macro_expansion " - "_PyOpcode_macro_expansion[OPCODE_MACRO_EXPANSION_SIZE]", - "=", - ";", - ): - # Write macro expansion for each non-pseudo instruction - for mac in sorted(self.macro_instrs.values(), key=lambda t: t.name): - if is_super_instruction(mac): - # Special-case the heck out of super-instructions - self.write_super_expansions(mac.name) - else: - self.write_macro_expansions( - mac.name, mac.parts, mac.cache_offset - ) - - with self.metadata_item( - "const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE]", "=", ";" - ): - self.write_uop_items(lambda name, counter: f'[{name}] = "{name}",') - - with self.metadata_item( - f"const char *const _PyOpcode_OpName[{1 + max(self.opmap.values())}]", - "=", - ";", - ): - for name in sorted(self.opmap): - self.out.emit(f'[{name}] = "{name}",') - - with self.metadata_item( - f"const uint8_t _PyOpcode_Caches[256]", - "=", - ";", - ): - family_member_names: set[str] = set() - for family in self.families.values(): - family_member_names.update(family.members) - for mac in self.macro_instrs.values(): - if ( - mac.cache_offset > 0 - and mac.name not in family_member_names - and not mac.name.startswith("INSTRUMENTED_") - ): - self.out.emit(f"[{mac.name}] = {mac.cache_offset},") - - deoptcodes = {} - for name, op in self.opmap.items(): - if op < 256: - deoptcodes[name] = name - for name, family in self.families.items(): - for m in family.members: - deoptcodes[m] = name - # special case: - deoptcodes["BINARY_OP_INPLACE_ADD_UNICODE"] = "BINARY_OP" - - with self.metadata_item(f"const uint8_t _PyOpcode_Deopt[256]", "=", ";"): - for opt, deopt in sorted(deoptcodes.items()): - self.out.emit(f"[{opt}] = {deopt},") - - self.out.emit("") - self.out.emit("#define EXTRA_CASES \\") - valid_opcodes = set(self.opmap.values()) - with self.out.indent(): - for op in range(256): - if op not in valid_opcodes: - self.out.emit(f"case {op}: \\") - self.out.emit(" ;\n") - - with open(pymetadata_filename, "w") as f: - # Create formatter - self.out = Formatter(f, 0, comment="#") - - self.write_provenance_header() - - # emit specializations - specialized_ops = set() - - self.out.emit("") - self.out.emit("_specializations = {") - for name, family in self.families.items(): - with self.out.indent(): - self.out.emit(f'"{family.name}": [') - with self.out.indent(): - for m in family.members: - self.out.emit(f'"{m}",') - specialized_ops.update(family.members) - self.out.emit(f"],") - self.out.emit("}") - - # Handle special case - self.out.emit("") - self.out.emit("# An irregular case:") - self.out.emit( - '_specializations["BINARY_OP"].append(' - '"BINARY_OP_INPLACE_ADD_UNICODE")' - ) - specialized_ops.add("BINARY_OP_INPLACE_ADD_UNICODE") - - ops = sorted((id, name) for (name, id) in self.opmap.items()) - # emit specialized opmap - self.out.emit("") - with self.out.block("_specialized_opmap ="): - for op, name in ops: - if name in specialized_ops: - self.out.emit(f"'{name}': {op},") - - # emit opmap - self.out.emit("") - with self.out.block("opmap ="): - for op, name in ops: - if name not in specialized_ops: - self.out.emit(f"'{name}': {op},") - - for name in ["MIN_INSTRUMENTED_OPCODE", "HAVE_ARGUMENT"]: - self.out.emit(f"{name} = {self.markers[name]}") - - def write_pseudo_instrs(self) -> None: - """Write the IS_PSEUDO_INSTR macro""" - self.out.emit("\n\n#define IS_PSEUDO_INSTR(OP) ( \\") - for op in self.pseudos: - self.out.emit(f" ((OP) == {op}) || \\") - self.out.emit(f" 0)") - - def write_uop_items(self, make_text: typing.Callable[[str, int], str]) -> None: - """Write '#define XXX NNN' for each uop""" - counter = 300 # TODO: Avoid collision with pseudo instructions - seen = set() - - def add(name: str) -> None: - if name in seen: - return - nonlocal counter - self.out.emit(make_text(name, counter)) - counter += 1 - seen.add(name) - - # These two are first by convention - add("_EXIT_TRACE") - add("_SET_IP") - - for instr in sorted(self.instrs.values(), key=lambda t:t.name): - # Skip ops that are also macros -- those are desugared inst()s - if instr.name not in self.macros: - add(instr.name) - - def write_macro_expansions( - self, name: str, parts: MacroParts, cache_offset: int - ) -> None: - """Write the macro expansions for a macro-instruction.""" - # TODO: Refactor to share code with write_cody(), is_viaible_uop(), etc. - offset = 0 # Cache effect offset - expansions: list[tuple[str, int, int]] = [] # [(name, size, offset), ...] - for part in parts: - if isinstance(part, Component): - # Skip specializations - if "specializing" in part.instr.annotations: - continue - # All other component instructions must be viable uops - if not part.instr.is_viable_uop() and "replaced" not in part.instr.annotations: - # This note just reminds us about macros that cannot - # be expanded to Tier 2 uops. It is not an error. - # Suppress it using 'replaced op(...)' for macros having - # manual translation in translate_bytecode_to_trace() - # in Python/optimizer.c. - if len(parts) > 1 or part.instr.name != name: - self.note( - f"Part {part.instr.name} of {name} is not a viable uop", - part.instr.inst, - ) - return - if not part.active_caches: - if part.instr.name == "_SAVE_RETURN_OFFSET": - size, offset = OPARG_SIZES["OPARG_SAVE_RETURN_OFFSET"], cache_offset - else: - size, offset = OPARG_SIZES["OPARG_FULL"], 0 - else: - # If this assert triggers, is_viable_uops() lied - assert len(part.active_caches) == 1, (name, part.instr.name) - cache = part.active_caches[0] - size, offset = cache.effect.size, cache.offset - expansions.append((part.instr.name, size, offset)) - assert len(expansions) > 0, f"Macro {name} has empty expansion?!" - self.write_expansions(name, expansions) - - def write_super_expansions(self, name: str) -> None: - """Write special macro expansions for super-instructions. - - If you get an assertion failure here, you probably have accidentally - violated one of the assumptions here. - - - A super-instruction's name is of the form FIRST_SECOND where - FIRST and SECOND are regular instructions whose name has the - form FOO_BAR. Thus, there must be exactly 3 underscores. - Example: LOAD_CONST_STORE_FAST. - - - A super-instruction's body uses `oparg1 and `oparg2`, and no - other instruction's body uses those variable names. - - - A super-instruction has no active (used) cache entries. - - In the expansion, the first instruction's operand is all but the - bottom 4 bits of the super-instruction's oparg, and the second - instruction's operand is the bottom 4 bits. We use the special - size codes OPARG_TOP and OPARG_BOTTOM for these. - """ - pieces = name.split("_") - assert len(pieces) == 4, f"{name} doesn't look like a super-instr" - name1 = "_".join(pieces[:2]) - name2 = "_".join(pieces[2:]) - assert name1 in self.instrs, f"{name1} doesn't match any instr" - assert name2 in self.instrs, f"{name2} doesn't match any instr" - instr1 = self.instrs[name1] - instr2 = self.instrs[name2] - assert not instr1.active_caches, f"{name1} has active caches" - assert not instr2.active_caches, f"{name2} has active caches" - expansions: list[tuple[str, int, int]] = [ - (name1, OPARG_SIZES["OPARG_TOP"], 0), - (name2, OPARG_SIZES["OPARG_BOTTOM"], 0), - ] - self.write_expansions(name, expansions) - - def write_expansions( - self, name: str, expansions: list[tuple[str, int, int]] - ) -> None: - pieces = [ - f"{{ {name}, {size}, {offset} }}" for name, size, offset in expansions - ] - self.out.emit( - f"[{name}] = " - f"{{ .nuops = {len(pieces)}, .uops = {{ {', '.join(pieces)} }} }}," - ) - - def emit_metadata_entry(self, name: str, fmt: str | None, flags: InstructionFlags) -> None: - flag_names = flags.names(value=True) - if not flag_names: - flag_names.append("0") - fmt_macro = "0" if fmt is None else INSTR_FMT_PREFIX + fmt - self.out.emit( - f"[{name}] = {{ true, {fmt_macro}," - f" {' | '.join(flag_names)} }}," - ) - - def write_metadata_for_inst(self, instr: Instruction) -> None: - """Write metadata for a single instruction.""" - self.emit_metadata_entry(instr.name, instr.instr_fmt, instr.instr_flags) - - def write_metadata_for_macro(self, mac: MacroInstruction) -> None: - """Write metadata for a macro-instruction.""" - self.emit_metadata_entry(mac.name, mac.instr_fmt, mac.instr_flags) - - def write_metadata_for_pseudo(self, ps: PseudoInstruction) -> None: - """Write metadata for a macro-instruction.""" - self.emit_metadata_entry(ps.name, None, ps.instr_flags) - - def write_instructions( - self, output_filename: str, emit_line_directives: bool - ) -> None: - """Write instructions to output file.""" - with open(output_filename, "w") as f: - # Create formatter - self.out = Formatter(f, 8, emit_line_directives) - - self.write_provenance_header() - - self.out.write_raw("\n") - self.out.write_raw("#ifdef TIER_TWO\n") - self.out.write_raw(" #error \"This file is for Tier 1 only\"\n") - self.out.write_raw("#endif\n") - self.out.write_raw("#define TIER_ONE 1\n") - - # Write and count instructions of all kinds - n_macros = 0 - cases = [] - for thing in self.everything: - match thing: - case parsing.InstDef(): - pass - case parsing.Macro(): - n_macros += 1 - mac = self.macro_instrs[thing.name] - cases.append((mac.name, mac)) - case parsing.Pseudo(): - pass - case _: - assert_never(thing) - cases.sort() - for _, mac in cases: - stacking.write_macro_instr(mac, self.out) - - self.out.write_raw("\n") - self.out.write_raw("#undef TIER_ONE\n") - - print( - f"Wrote {n_macros} cases to {output_filename}", - file=sys.stderr, - ) - - def write_executor_instructions( - self, executor_filename: str, emit_line_directives: bool - ) -> None: - """Generate cases for the Tier 2 interpreter.""" - n_uops = 0 - with open(executor_filename, "w") as f: - self.out = Formatter(f, 8, emit_line_directives) - self.write_provenance_header() - - self.out.write_raw("\n") - self.out.write_raw("#ifdef TIER_ONE\n") - self.out.write_raw(" #error \"This file is for Tier 2 only\"\n") - self.out.write_raw("#endif\n") - self.out.write_raw("#define TIER_TWO 2\n") - - for instr in self.instrs.values(): - if instr.is_viable_uop(): - n_uops += 1 - self.out.emit("") - with self.out.block(f"case {instr.name}:"): - if instr.instr_flags.HAS_ARG_FLAG: - self.out.emit("oparg = CURRENT_OPARG();") - stacking.write_single_instr(instr, self.out, tier=TIER_TWO) - if instr.check_eval_breaker: - self.out.emit("CHECK_EVAL_BREAKER();") - self.out.emit("break;") - - self.out.write_raw("\n") - self.out.write_raw("#undef TIER_TWO\n") - - print( - f"Wrote {n_uops} cases to {executor_filename}", - file=sys.stderr, - ) - - def write_abstract_interpreter_instructions( - self, abstract_interpreter_filename: str, emit_line_directives: bool - ) -> None: - """Generate cases for the Tier 2 abstract interpreter/analzyer.""" - with open(abstract_interpreter_filename, "w") as f: - self.out = Formatter(f, 8, emit_line_directives) - self.write_provenance_header() - for instr in self.instrs.values(): - instr = AbstractInstruction(instr.inst) - if ( - instr.is_viable_uop() - and instr.name not in SPECIALLY_HANDLED_ABSTRACT_INSTR - ): - self.out.emit("") - with self.out.block(f"case {instr.name}:"): - instr.write(self.out, tier=TIER_TWO) - self.out.emit("break;") - print( - f"Wrote some stuff to {abstract_interpreter_filename}", - file=sys.stderr, - ) - - -def is_super_instruction(mac: MacroInstruction) -> bool: - if ( - len(mac.parts) == 1 - and isinstance(mac.parts[0], Component) - and variable_used(mac.parts[0].instr.inst, "oparg1") - ): - assert variable_used(mac.parts[0].instr.inst, "oparg2") - return True - else: - return False - - -def main() -> None: - """Parse command line, parse input, analyze, write output.""" - args = arg_parser.parse_args() # Prints message and sys.exit(2) on error - if len(args.input) == 0: - args.input.append(DEFAULT_INPUT) - - # Raises OSError if input unreadable - a = Generator(args.input) - - a.parse() # Raises SyntaxError on failure - a.analyze() # Prints messages and sets a.errors on failure - if a.errors: - sys.exit(f"Found {a.errors} errors") - if args.viable: - # Load execution counts from bmraw.json, if it exists - a.report_non_viable_uops("bmraw.json") - return - - # These raise OSError if output can't be written - - a.assign_opcode_ids() - a.write_opcode_targets(args.opcode_targets_h) - a.write_metadata(args.metadata, args.pymetadata) - a.write_abstract_interpreter_instructions( - args.abstract_interpreter_cases, args.emit_line_directives - ) - - -if __name__ == "__main__": - main() diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index 1b565bff2c56f6..5a42a05c5c2ef2 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -2,14 +2,11 @@ from typing import TextIO from analyzer import ( - Analysis, Instruction, Uop, - Part, analyze_files, + Properties, Skip, - StackItem, - analysis_error, ) from cwriter import CWriter from typing import Callable, Mapping, TextIO, Iterator @@ -25,14 +22,16 @@ def root_relative_path(filename: str) -> str: try: return Path(filename).absolute().relative_to(ROOT).as_posix() except ValueError: + # Not relative to root, just return original path. return filename -def write_header(generator: str, sources: list[str], outfile: TextIO) -> None: + +def write_header(generator: str, sources: list[str], outfile: TextIO, comment: str = "//") -> None: outfile.write( - f"""// This file is generated by {root_relative_path(generator)} -// from: -// {", ".join(root_relative_path(src) for src in sources)} -// Do not edit! + f"""{comment} This file is generated by {root_relative_path(generator)} +{comment} from: +{comment} {", ".join(root_relative_path(src) for src in sources)} +{comment} Do not edit! """ ) @@ -186,3 +185,31 @@ def emit_tokens( replacement_functions[tkn.text](out, tkn, tkn_iter, uop, stack, inst) else: out.emit(tkn) + + +def cflags(p: Properties) -> str: + flags: list[str] = [] + if p.oparg: + flags.append("HAS_ARG_FLAG") + if p.uses_co_consts: + flags.append("HAS_CONST_FLAG") + if p.uses_co_names: + flags.append("HAS_NAME_FLAG") + if p.jumps: + flags.append("HAS_JUMP_FLAG") + if p.has_free: + flags.append("HAS_FREE_FLAG") + if p.uses_locals: + flags.append("HAS_LOCAL_FLAG") + if p.eval_breaker: + flags.append("HAS_EVAL_BREAK_FLAG") + if p.deopts: + flags.append("HAS_DEOPT_FLAG") + if not p.infallible: + flags.append("HAS_ERROR_FLAG") + if p.escapes: + flags.append("HAS_ESCAPES_FLAG") + if flags: + return " | ".join(flags) + else: + return "0" diff --git a/Tools/cases_generator/instructions.py b/Tools/cases_generator/instructions.py deleted file mode 100644 index 149a08810e4ae5..00000000000000 --- a/Tools/cases_generator/instructions.py +++ /dev/null @@ -1,355 +0,0 @@ -import dataclasses -import re -import typing - -from flags import InstructionFlags, variable_used, variable_used_unspecialized -from formatting import ( - Formatter, - UNUSED, - list_effect_size, -) -import lexer as lx -import parsing -from parsing import StackEffect -import stacking - -BITS_PER_CODE_UNIT = 16 - - -@dataclasses.dataclass -class ActiveCacheEffect: - """Wraps a CacheEffect that is actually used, in context.""" - - effect: parsing.CacheEffect - offset: int - - -FORBIDDEN_NAMES_IN_UOPS = ( - "next_instr", - "oparg1", # Proxy for super-instructions like LOAD_FAST_LOAD_FAST - "JUMPBY", - "DISPATCH", - "TIER_ONE_ONLY", -) - - -# Interpreter tiers -TIER_ONE: typing.Final = 1 # Specializing adaptive interpreter (PEP 659) -TIER_TWO: typing.Final = 2 # Experimental tracing interpreter -Tiers: typing.TypeAlias = typing.Literal[1, 2] - - -@dataclasses.dataclass -class Instruction: - """An instruction with additional data and code.""" - - # Parts of the underlying instruction definition - inst: parsing.InstDef - name: str - annotations: list[str] - block: parsing.Block - block_text: list[str] # Block.text, less curlies, less PREDICT() calls - block_line: int # First line of block in original code - - # Computed by constructor - always_exits: str # If the block always exits, its last line; else "" - has_deopt: bool - needs_this_instr: bool - cache_offset: int - cache_effects: list[parsing.CacheEffect] - input_effects: list[StackEffect] - output_effects: list[StackEffect] - unmoved_names: frozenset[str] - instr_fmt: str - instr_flags: InstructionFlags - active_caches: list[ActiveCacheEffect] - - # Set later - family: parsing.Family | None = None - predicted: bool = False - - def __init__(self, inst: parsing.InstDef): - self.inst = inst - self.name = inst.name - self.annotations = inst.annotations - self.block = inst.block - self.block_text, self.check_eval_breaker, self.block_line = extract_block_text( - self.block - ) - self.always_exits = always_exits(self.block_text) - self.has_deopt = variable_used(self.inst, "DEOPT_IF") - self.cache_effects = [ - effect for effect in inst.inputs if isinstance(effect, parsing.CacheEffect) - ] - self.cache_offset = sum(c.size for c in self.cache_effects) - self.needs_this_instr = variable_used(self.inst, "this_instr") or any(c.name != UNUSED for c in self.cache_effects) - self.input_effects = [ - effect for effect in inst.inputs if isinstance(effect, StackEffect) - ] - self.output_effects = inst.outputs # For consistency/completeness - unmoved_names: set[str] = set() - for ieffect, oeffect in zip(self.input_effects, self.output_effects): - if ieffect == oeffect and ieffect.name == oeffect.name: - unmoved_names.add(ieffect.name) - else: - break - self.unmoved_names = frozenset(unmoved_names) - - self.instr_flags = InstructionFlags.fromInstruction(inst) - - self.active_caches = [] - offset = 0 - for effect in self.cache_effects: - if effect.name != UNUSED: - self.active_caches.append(ActiveCacheEffect(effect, offset)) - offset += effect.size - - if self.instr_flags.HAS_ARG_FLAG: - fmt = "IB" - else: - fmt = "IX" - if offset: - fmt += "C" + "0" * (offset - 1) - self.instr_fmt = fmt - - def is_viable_uop(self) -> bool: - """Whether this instruction is viable as a uop.""" - dprint: typing.Callable[..., None] = lambda *args, **kwargs: None - if "FRAME" in self.name: - dprint = print - - if self.name == "_EXIT_TRACE": - return True # This has 'return frame' but it's okay - if self.name == "_SAVE_RETURN_OFFSET": - return True # Adjusts next_instr, but only in tier 1 code - if self.always_exits: - dprint(f"Skipping {self.name} because it always exits: {self.always_exits}") - return False - if len(self.active_caches) > 1: - # print(f"Skipping {self.name} because it has >1 cache entries") - return False - res = True - for forbidden in FORBIDDEN_NAMES_IN_UOPS: - # NOTE: To disallow unspecialized uops, use - # if variable_used(self.inst, forbidden): - if variable_used_unspecialized(self.inst, forbidden): - dprint(f"Skipping {self.name} because it uses {forbidden}") - res = False - return res - - def write_body( - self, - out: Formatter, - dedent: int, - active_caches: list[ActiveCacheEffect], - tier: Tiers, - family: parsing.Family | None, - ) -> None: - """Write the instruction body.""" - # Write cache effect variable declarations and initializations - for active in active_caches: - ceffect = active.effect - bits = ceffect.size * BITS_PER_CODE_UNIT - if bits == 64: - # NOTE: We assume that 64-bit data in the cache - # is always an object pointer. - # If this becomes false, we need a way to specify - # syntactically what type the cache data is. - typ = "PyObject *" - func = "read_obj" - else: - typ = f"uint{bits}_t " - func = f"read_u{bits}" - if tier == TIER_ONE: - out.emit( - f"{typ}{ceffect.name} = " - f"{func}(&this_instr[{active.offset + 1}].cache);" - ) - else: - out.emit(f"{typ}{ceffect.name} = ({typ.strip()})CURRENT_OPERAND();") - - # Write the body, substituting a goto for ERROR_IF() and other stuff - assert dedent <= 0 - extra = " " * -dedent - names_to_skip = self.unmoved_names | frozenset({UNUSED, "null"}) - offset = 0 - context = self.block.context - assert context is not None and context.owner is not None - filename = context.owner.filename - for line in self.block_text: - out.set_lineno(self.block_line + offset, filename) - offset += 1 - if m := re.match(r"(\s*)ERROR_IF\((.+), (\w+)\);\s*(?://.*)?$", line): - space, cond, label = m.groups() - space = extra + space - # ERROR_IF() must pop the inputs from the stack. - # The code block is responsible for DECREF()ing them. - # NOTE: If the label doesn't exist, just add it to ceval.c. - - # Don't pop common input/output effects at the bottom! - # These aren't DECREF'ed so they can stay. - ieffs = list(self.input_effects) - oeffs = list(self.output_effects) - while ( - ieffs - and oeffs - and ieffs[0] == oeffs[0] - and ieffs[0].name == oeffs[0].name - ): - ieffs.pop(0) - oeffs.pop(0) - ninputs, symbolic = list_effect_size(ieffs) - if ninputs: - label = f"pop_{ninputs}_{label}" - if tier == TIER_TWO: - label = label + "_tier_two" - if symbolic: - out.write_raw( - f"{space}if ({cond}) {{ STACK_SHRINK({symbolic}); goto {label}; }}\n" - ) - else: - out.write_raw(f"{space}if ({cond}) goto {label};\n") - elif m := re.match(r"(\s*)DEOPT_IF\((.+)\);\s*(?://.*)?$", line): - space, cond = m.groups() - space = extra + space - target = family.name if family else self.name - out.write_raw(f"{space}DEOPT_IF({cond}, {target});\n") - elif "DEOPT" in line: - filename = context.owner.filename - lineno = context.owner.tokens[context.begin].line - print(f"{filename}:{lineno}: ERROR: DEOPT_IF() must be all on one line") - out.write_raw(extra + line) - elif m := re.match(r"(\s*)DECREF_INPUTS\(\);\s*(?://.*)?$", line): - out.reset_lineno() - space = extra + m.group(1) - for ieff in self.input_effects: - if ieff.name in names_to_skip: - continue - if ieff.size: - out.write_raw( - f"{space}for (int _i = {ieff.size}; --_i >= 0;) {{\n" - ) - out.write_raw(f"{space} Py_DECREF({ieff.name}[_i]);\n") - out.write_raw(f"{space}}}\n") - else: - decref = "XDECREF" if ieff.cond else "DECREF" - out.write_raw(f"{space}Py_{decref}({ieff.name});\n") - else: - out.write_raw(extra + line) - out.reset_lineno() - - -InstructionOrCacheEffect = Instruction | parsing.CacheEffect - - -# Instruction used for abstract interpretation. -class AbstractInstruction(Instruction): - def __init__(self, inst: parsing.InstDef): - super().__init__(inst) - - def write(self, out: Formatter, tier: Tiers = TIER_ONE) -> None: - """Write one abstract instruction, sans prologue and epilogue.""" - stacking.write_single_instr_for_abstract_interp(self, out) - - def write_body( - self, - out: Formatter, - dedent: int, - active_caches: list[ActiveCacheEffect], - tier: Tiers, - family: parsing.Family | None, - ) -> None: - pass - - -@dataclasses.dataclass -class Component: - instr: Instruction - active_caches: list[ActiveCacheEffect] - - -MacroParts = list[Component | parsing.CacheEffect] - - -@dataclasses.dataclass -class MacroInstruction: - """A macro instruction.""" - - name: str - instr_fmt: str - instr_flags: InstructionFlags - macro: parsing.Macro - parts: MacroParts - cache_offset: int - # Set later - predicted: bool = False - family: parsing.Family | None = None - - -@dataclasses.dataclass -class PseudoInstruction: - """A pseudo instruction.""" - - name: str - targets: list[Instruction | MacroInstruction] - instr_flags: InstructionFlags - - -AnyInstruction = Instruction | MacroInstruction | PseudoInstruction - - -def extract_block_text(block: parsing.Block) -> tuple[list[str], bool, int]: - # Get lines of text with proper dedent - blocklines = block.text.splitlines(True) - first_token: lx.Token = block.tokens[0] # IndexError means the context is broken - block_line = first_token.begin[0] - - # Remove blank lines from both ends - while blocklines and not blocklines[0].strip(): - blocklines.pop(0) - block_line += 1 - while blocklines and not blocklines[-1].strip(): - blocklines.pop() - - # Remove leading and trailing braces - assert blocklines and blocklines[0].strip() == "{" - assert blocklines and blocklines[-1].strip() == "}" - blocklines.pop() - blocklines.pop(0) - block_line += 1 - - # Remove trailing blank lines - while blocklines and not blocklines[-1].strip(): - blocklines.pop() - - # Separate CHECK_EVAL_BREAKER() macro from end - check_eval_breaker = ( - blocklines != [] and blocklines[-1].strip() == "CHECK_EVAL_BREAKER();" - ) - if check_eval_breaker: - del blocklines[-1] - - return blocklines, check_eval_breaker, block_line - - -def always_exits(lines: list[str]) -> str: - """Determine whether a block always ends in a return/goto/etc.""" - if not lines: - return "" - line = lines[-1].rstrip() - # Indent must match exactly (TODO: Do something better) - if line[:12] != " " * 12: - return "" - line = line[12:] - if line.startswith( - ( - "goto ", - "return ", - "DISPATCH", - "GO_TO_", - "Py_UNREACHABLE()", - "ERROR_IF(true, ", - ) - ): - return line - return "" diff --git a/Tools/cases_generator/opcode_id_generator.py b/Tools/cases_generator/opcode_id_generator.py index ddbb409bbced39..dbea3d0b622c87 100644 --- a/Tools/cases_generator/opcode_id_generator.py +++ b/Tools/cases_generator/opcode_id_generator.py @@ -24,111 +24,23 @@ DEFAULT_OUTPUT = ROOT / "Include/opcode_ids.h" -def generate_opcode_header(filenames: list[str], analysis: Analysis, outfile: TextIO) -> None: +def generate_opcode_header( + filenames: list[str], analysis: Analysis, outfile: TextIO +) -> None: write_header(__file__, filenames, outfile) out = CWriter(outfile, 0, False) - out.emit("\n") - instmap: dict[str, int] = {} + with out.header_guard("Py_OPCODE_IDS_H"): + out.emit("/* Instruction opcodes for compiled code */\n") - # 0 is reserved for cache entries. This helps debugging. - instmap["CACHE"] = 0 + def write_define(name: str, op: int) -> None: + out.emit(f"#define {name:<38} {op:>3}\n") - # 17 is reserved as it is the initial value for the specializing counter. - # This helps catch cases where we attempt to execute a cache. - instmap["RESERVED"] = 17 + for op, name in sorted([(op, name) for (name, op) in analysis.opmap.items()]): + write_define(name, op) - # 149 is RESUME - it is hard coded as such in Tools/build/deepfreeze.py - instmap["RESUME"] = 149 - instmap["INSTRUMENTED_LINE"] = 254 - - instrumented = [ - name for name in analysis.instructions if name.startswith("INSTRUMENTED") - ] - - # Special case: this instruction is implemented in ceval.c - # rather than bytecodes.c, so we need to add it explicitly - # here (at least until we add something to bytecodes.c to - # declare external instructions). - instrumented.append("INSTRUMENTED_LINE") - - specialized: set[str] = set() - no_arg: list[str] = [] - has_arg: list[str] = [] - - for family in analysis.families.values(): - specialized.update(inst.name for inst in family.members) - - for inst in analysis.instructions.values(): - name = inst.name - if name in specialized: - continue - if name in instrumented: - continue - if inst.properties.oparg: - has_arg.append(name) - else: - no_arg.append(name) - - # Specialized ops appear in their own section - # Instrumented opcodes are at the end of the valid range - min_internal = 150 - min_instrumented = 254 - (len(instrumented) - 1) - assert min_internal + len(specialized) < min_instrumented - - next_opcode = 1 - - def add_instruction(name: str) -> None: - nonlocal next_opcode - if name in instmap: - return # Pre-defined name - while next_opcode in instmap.values(): - next_opcode += 1 - instmap[name] = next_opcode - next_opcode += 1 - - for name in sorted(no_arg): - add_instruction(name) - for name in sorted(has_arg): - add_instruction(name) - # For compatibility - next_opcode = min_internal - for name in sorted(specialized): - add_instruction(name) - next_opcode = min_instrumented - for name in instrumented: - add_instruction(name) - - for op, name in enumerate(sorted(analysis.pseudos), 256): - instmap[name] = op - - assert 255 not in instmap.values() - - out.emit( - """#ifndef Py_OPCODE_IDS_H -#define Py_OPCODE_IDS_H -#ifdef __cplusplus -extern "C" { -#endif - -/* Instruction opcodes for compiled code */ -""" - ) - - def write_define(name: str, op: int) -> None: - out.emit(f"#define {name:<38} {op:>3}\n") - - for op, name in sorted([(op, name) for (name, op) in instmap.items()]): - write_define(name, op) - - out.emit("\n") - write_define("HAVE_ARGUMENT", len(no_arg)) - write_define("MIN_INSTRUMENTED_OPCODE", min_instrumented) - - out.emit("\n") - out.emit("#ifdef __cplusplus\n") - out.emit("}\n") - out.emit("#endif\n") - out.emit("#endif /* !Py_OPCODE_IDS_H */\n") + out.emit("\n") + write_define("HAVE_ARGUMENT", analysis.have_arg) + write_define("MIN_INSTRUMENTED_OPCODE", analysis.min_instrumented) arg_parser = argparse.ArgumentParser( diff --git a/Tools/cases_generator/opcode_metadata_generator.py b/Tools/cases_generator/opcode_metadata_generator.py new file mode 100644 index 00000000000000..9b7df9a54c7b3b --- /dev/null +++ b/Tools/cases_generator/opcode_metadata_generator.py @@ -0,0 +1,386 @@ +"""Generate uop metedata. +Reads the instruction definitions from bytecodes.c. +Writes the metadata to pycore_uop_metadata.h by default. +""" + +import argparse +import os.path +import sys + +from analyzer import ( + Analysis, + Instruction, + analyze_files, + Skip, + Uop, +) +from generators_common import ( + DEFAULT_INPUT, + ROOT, + write_header, + cflags, + StackOffset, +) +from cwriter import CWriter +from typing import TextIO +from stack import get_stack_effect + +# Constants used instead of size for macro expansions. +# Note: 1, 2, 4 must match actual cache entry sizes. +OPARG_KINDS = { + "OPARG_FULL": 0, + "OPARG_CACHE_1": 1, + "OPARG_CACHE_2": 2, + "OPARG_CACHE_4": 4, + "OPARG_TOP": 5, + "OPARG_BOTTOM": 6, + "OPARG_SAVE_RETURN_OFFSET": 7, + # Skip 8 as the other powers of 2 are sizes + "OPARG_REPLACED": 9, +} + +FLAGS = [ + "ARG", + "CONST", + "NAME", + "JUMP", + "FREE", + "LOCAL", + "EVAL_BREAK", + "DEOPT", + "ERROR", + "ESCAPES", +] + + +def generate_flag_macros(out: CWriter) -> None: + for i, flag in enumerate(FLAGS): + out.emit(f"#define HAS_{flag}_FLAG ({1< None: + for name, value in OPARG_KINDS.items(): + out.emit(f"#define {name} {value}\n") + out.emit("\n") + + +def emit_stack_effect_function( + out: CWriter, direction: str, data: list[tuple[str, str]] +) -> None: + out.emit(f"extern int _PyOpcode_num_{direction}(int opcode, int oparg);\n") + out.emit("#ifdef NEED_OPCODE_METADATA\n") + out.emit(f"int _PyOpcode_num_{direction}(int opcode, int oparg) {{\n") + out.emit("switch(opcode) {\n") + for name, effect in data: + out.emit(f"case {name}:\n") + out.emit(f" return {effect};\n") + out.emit("default:\n") + out.emit(" return -1;\n") + out.emit("}\n") + out.emit("}\n\n") + out.emit("#endif\n\n") + + +def generate_stack_effect_functions(analysis: Analysis, out: CWriter) -> None: + popped_data: list[tuple[str, str]] = [] + pushed_data: list[tuple[str, str]] = [] + for inst in analysis.instructions.values(): + stack = get_stack_effect(inst) + popped = (-stack.base_offset).to_c() + pushed = (stack.top_offset - stack.base_offset).to_c() + popped_data.append((inst.name, popped)) + pushed_data.append((inst.name, pushed)) + emit_stack_effect_function(out, "popped", sorted(popped_data)) + emit_stack_effect_function(out, "pushed", sorted(pushed_data)) + + +def generate_is_pseudo(analysis: Analysis, out: CWriter) -> None: + """Write the IS_PSEUDO_INSTR macro""" + out.emit("\n\n#define IS_PSEUDO_INSTR(OP) ( \\\n") + for op in analysis.pseudos: + out.emit(f"((OP) == {op}) || \\\n") + out.emit("0") + out.emit(")\n\n") + + +def get_format(inst: Instruction) -> str: + if inst.properties.oparg: + format = "INSTR_FMT_IB" + else: + format = "INSTR_FMT_IX" + if inst.size > 1: + format += "C" + format += "0" * (inst.size - 2) + return format + + +def generate_instruction_formats(analysis: Analysis, out: CWriter) -> None: + # Compute the set of all instruction formats. + formats: set[str] = set() + for inst in analysis.instructions.values(): + formats.add(get_format(inst)) + # Generate an enum for it + out.emit("enum InstructionFormat {\n") + next_id = 1 + for format in sorted(formats): + out.emit(f"{format} = {next_id},\n") + next_id += 1 + out.emit("};\n\n") + + +def generate_deopt_table(analysis: Analysis, out: CWriter) -> None: + out.emit("extern const uint8_t _PyOpcode_Deopt[256];\n") + out.emit("#ifdef NEED_OPCODE_METADATA\n") + out.emit("const uint8_t _PyOpcode_Deopt[256] = {\n") + deopts: list[tuple[str, str]] = [] + for inst in analysis.instructions.values(): + deopt = inst.name + if inst.family is not None: + deopt = inst.family.name + deopts.append((inst.name, deopt)) + deopts.append(("INSTRUMENTED_LINE", "INSTRUMENTED_LINE")) + for name, deopt in sorted(deopts): + out.emit(f"[{name}] = {deopt},\n") + out.emit("};\n\n") + out.emit("#endif // NEED_OPCODE_METADATA\n\n") + + +def generate_cache_table(analysis: Analysis, out: CWriter) -> None: + out.emit("extern const uint8_t _PyOpcode_Caches[256];\n") + out.emit("#ifdef NEED_OPCODE_METADATA\n") + out.emit("const uint8_t _PyOpcode_Caches[256] = {\n") + for inst in analysis.instructions.values(): + if inst.family and inst.family.name != inst.name: + continue + if inst.name.startswith("INSTRUMENTED"): + continue + if inst.size > 1: + out.emit(f"[{inst.name}] = {inst.size-1},\n") + out.emit("};\n") + out.emit("#endif\n\n") + + +def generate_name_table(analysis: Analysis, out: CWriter) -> None: + table_size = 256 + len(analysis.pseudos) + out.emit(f"extern const char *_PyOpcode_OpName[{table_size}];\n") + out.emit("#ifdef NEED_OPCODE_METADATA\n") + out.emit(f"const char *_PyOpcode_OpName[{table_size}] = {{\n") + names = list(analysis.instructions) + list(analysis.pseudos) + names.append("INSTRUMENTED_LINE") + for name in sorted(names): + out.emit(f'[{name}] = "{name}",\n') + out.emit("};\n") + out.emit("#endif\n\n") + + +def generate_metadata_table(analysis: Analysis, out: CWriter) -> None: + table_size = 256 + len(analysis.pseudos) + out.emit("struct opcode_metadata {\n") + out.emit("uint8_t valid_entry;\n") + out.emit("int8_t instr_format;\n") + out.emit("int16_t flags;\n") + out.emit("};\n\n") + out.emit( + f"extern const struct opcode_metadata _PyOpcode_opcode_metadata[{table_size}];\n" + ) + out.emit("#ifdef NEED_OPCODE_METADATA\n") + out.emit( + f"const struct opcode_metadata _PyOpcode_opcode_metadata[{table_size}] = {{\n" + ) + for inst in sorted(analysis.instructions.values(), key=lambda t: t.name): + out.emit( + f"[{inst.name}] = {{ true, {get_format(inst)}, {cflags(inst.properties)} }},\n" + ) + for pseudo in sorted(analysis.pseudos.values(), key=lambda t: t.name): + flags = cflags(pseudo.properties) + for flag in pseudo.flags: + if flags == "0": + flags = f"{flag}_FLAG" + else: + flags += f" | {flag}_FLAG" + out.emit(f"[{pseudo.name}] = {{ true, -1, {flags} }},\n") + out.emit("};\n") + out.emit("#endif\n\n") + + +def generate_expansion_table(analysis: Analysis, out: CWriter) -> None: + expansions_table: dict[str, list[tuple[str, int, int]]] = {} + for inst in sorted(analysis.instructions.values(), key=lambda t: t.name): + offset: int = 0 # Cache effect offset + expansions: list[tuple[str, int, int]] = [] # [(name, size, offset), ...] + if inst.is_super(): + pieces = inst.name.split("_") + assert len(pieces) == 4, f"{inst.name} doesn't look like a super-instr" + name1 = "_".join(pieces[:2]) + name2 = "_".join(pieces[2:]) + assert name1 in analysis.instructions, f"{name1} doesn't match any instr" + assert name2 in analysis.instructions, f"{name2} doesn't match any instr" + instr1 = analysis.instructions[name1] + instr2 = analysis.instructions[name2] + assert ( + len(instr1.parts) == 1 + ), f"{name1} is not a good superinstruction part" + assert ( + len(instr2.parts) == 1 + ), f"{name2} is not a good superinstruction part" + expansions.append((instr1.parts[0].name, OPARG_KINDS["OPARG_TOP"], 0)) + expansions.append((instr2.parts[0].name, OPARG_KINDS["OPARG_BOTTOM"], 0)) + elif not is_viable_expansion(inst): + continue + else: + for part in inst.parts: + size = part.size + if part.name == "_SAVE_RETURN_OFFSET": + size = OPARG_KINDS["OPARG_SAVE_RETURN_OFFSET"] + if isinstance(part, Uop): + # Skip specializations + if "specializing" in part.annotations: + continue + if "replaced" in part.annotations: + size = OPARG_KINDS["OPARG_REPLACED"] + expansions.append((part.name, size, offset if size else 0)) + offset += part.size + expansions_table[inst.name] = expansions + max_uops = max(len(ex) for ex in expansions_table.values()) + out.emit(f"#define MAX_UOP_PER_EXPANSION {max_uops}\n") + out.emit("struct opcode_macro_expansion {\n") + out.emit("int nuops;\n") + out.emit( + "struct { int16_t uop; int8_t size; int8_t offset; } uops[MAX_UOP_PER_EXPANSION];\n" + ) + out.emit("};\n") + out.emit( + "extern const struct opcode_macro_expansion _PyOpcode_macro_expansion[256];\n\n" + ) + out.emit("#ifdef NEED_OPCODE_METADATA\n") + out.emit("const struct opcode_macro_expansion\n") + out.emit("_PyOpcode_macro_expansion[256] = {\n") + for inst_name, expansions in expansions_table.items(): + uops = [ + f"{{ {name}, {size}, {offset} }}" for (name, size, offset) in expansions + ] + out.emit( + f'[{inst_name}] = {{ .nuops = {len(expansions)}, .uops = {{ {", ".join(uops)} }} }},\n' + ) + out.emit("};\n") + out.emit("#endif // NEED_OPCODE_METADATA\n\n") + + +def is_viable_expansion(inst: Instruction) -> bool: + "An instruction can be expanded if all its parts are viable for tier 2" + for part in inst.parts: + if isinstance(part, Uop): + # Skip specializing and replaced uops + if "specializing" in part.annotations: + continue + if "replaced" in part.annotations: + continue + if part.properties.tier_one_only or not part.is_viable(): + return False + return True + + +def generate_extra_cases(analysis: Analysis, out: CWriter) -> None: + out.emit("#define EXTRA_CASES \\\n") + valid_opcodes = set(analysis.opmap.values()) + for op in range(256): + if op not in valid_opcodes: + out.emit(f" case {op}: \\\n") + out.emit(" ;\n") + + +def generate_pseudo_targets(analysis: Analysis, out: CWriter) -> None: + table_size = len(analysis.pseudos) + max_targets = max(len(pseudo.targets) for pseudo in analysis.pseudos.values()) + out.emit("struct pseudo_targets {\n") + out.emit(f"uint8_t targets[{max_targets + 1}];\n") + out.emit("};\n") + out.emit( + f"extern const struct pseudo_targets _PyOpcode_PseudoTargets[{table_size}];\n" + ) + out.emit("#ifdef NEED_OPCODE_METADATA\n") + out.emit( + f"const struct pseudo_targets _PyOpcode_PseudoTargets[{table_size}] = {{\n" + ) + for pseudo in analysis.pseudos.values(): + targets = ["0"] * (max_targets + 1) + for i, target in enumerate(pseudo.targets): + targets[i] = target.name + out.emit(f"[{pseudo.name}-256] = {{ {{ {', '.join(targets)} }} }},\n") + out.emit("};\n\n") + out.emit("#endif // NEED_OPCODE_METADATA\n") + out.emit("static inline bool\n") + out.emit("is_pseudo_target(int pseudo, int target) {\n") + out.emit(f"if (pseudo < 256 || pseudo >= {256+table_size}) {{\n") + out.emit(f"return false;\n") + out.emit("}\n") + out.emit( + f"for (int i = 0; _PyOpcode_PseudoTargets[pseudo-256].targets[i]; i++) {{\n" + ) + out.emit( + f"if (_PyOpcode_PseudoTargets[pseudo-256].targets[i] == target) return true;\n" + ) + out.emit("}\n") + out.emit(f"return false;\n") + out.emit("}\n\n") + + +def generate_opcode_metadata( + filenames: list[str], analysis: Analysis, outfile: TextIO +) -> None: + write_header(__file__, filenames, outfile) + out = CWriter(outfile, 0, False) + with out.header_guard("Py_CORE_OPCODE_METADATA_H"): + out.emit("#ifndef Py_BUILD_CORE\n") + out.emit('# error "this header requires Py_BUILD_CORE define"\n') + out.emit("#endif\n\n") + out.emit("#include // bool\n") + out.emit('#include "opcode_ids.h"\n') + generate_is_pseudo(analysis, out) + out.emit('#include "pycore_uop_ids.h"\n') + generate_stack_effect_functions(analysis, out) + generate_instruction_formats(analysis, out) + table_size = 256 + len(analysis.pseudos) + out.emit("#define IS_VALID_OPCODE(OP) \\\n") + out.emit(f" (((OP) >= 0) && ((OP) < {table_size}) && \\\n") + out.emit(" (_PyOpcode_opcode_metadata[(OP)].valid_entry))\n\n") + generate_flag_macros(out) + generate_oparg_macros(out) + generate_metadata_table(analysis, out) + generate_expansion_table(analysis, out) + generate_name_table(analysis, out) + generate_cache_table(analysis, out) + generate_deopt_table(analysis, out) + generate_extra_cases(analysis, out) + generate_pseudo_targets(analysis, out) + + +arg_parser = argparse.ArgumentParser( + description="Generate the header file with opcode metadata.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, +) + + +DEFAULT_OUTPUT = ROOT / "Include/internal/pycore_opcode_metadata.h" + + +arg_parser.add_argument( + "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT +) + +arg_parser.add_argument( + "input", nargs=argparse.REMAINDER, help="Instruction definition file(s)" +) + +if __name__ == "__main__": + args = arg_parser.parse_args() + if len(args.input) == 0: + args.input.append(DEFAULT_INPUT) + data = analyze_files(args.input) + with open(args.output, "w") as outfile: + generate_opcode_metadata(args.input, data, outfile) diff --git a/Tools/cases_generator/parser.py b/Tools/cases_generator/parser.py index fe4e8e476eadee..2b77d14d21143f 100644 --- a/Tools/cases_generator/parser.py +++ b/Tools/cases_generator/parser.py @@ -11,7 +11,17 @@ OpName, AstNode, ) -from formatting import prettify_filename + + +def prettify_filename(filename: str) -> str: + # Make filename more user-friendly and less platform-specific, + # it is only used for error reporting at this point. + filename = filename.replace("\\", "/") + if filename.startswith("./"): + filename = filename[2:] + if filename.endswith(".new"): + filename = filename[:-4] + return filename BEGIN_MARKER = "// BEGIN BYTECODES //" diff --git a/Tools/cases_generator/py_metadata_generator.py b/Tools/cases_generator/py_metadata_generator.py new file mode 100644 index 00000000000000..43811fdacc8a9e --- /dev/null +++ b/Tools/cases_generator/py_metadata_generator.py @@ -0,0 +1,97 @@ +"""Generate uop metedata. +Reads the instruction definitions from bytecodes.c. +Writes the metadata to pycore_uop_metadata.h by default. +""" + +import argparse + +from analyzer import ( + Analysis, + analyze_files, +) +from generators_common import ( + DEFAULT_INPUT, + ROOT, + root_relative_path, + write_header, +) +from cwriter import CWriter +from typing import TextIO + + + +DEFAULT_OUTPUT = ROOT / "Lib/_opcode_metadata.py" + + +def get_specialized(analysis: Analysis) -> set[str]: + specialized: set[str] = set() + for family in analysis.families.values(): + for member in family.members: + specialized.add(member.name) + return specialized + + +def generate_specializations(analysis: Analysis, out: CWriter) -> None: + out.emit("_specializations = {\n") + for family in analysis.families.values(): + out.emit(f'"{family.name}": [\n') + for member in family.members: + out.emit(f' "{member.name}",\n') + out.emit("],\n") + out.emit("}\n\n") + + +def generate_specialized_opmap(analysis: Analysis, out: CWriter) -> None: + out.emit("_specialized_opmap = {\n") + names = [] + for family in analysis.families.values(): + for member in family.members: + if member.name == family.name: + continue + names.append(member.name) + for name in sorted(names): + out.emit(f"'{name}': {analysis.opmap[name]},\n") + out.emit("}\n\n") + + +def generate_opmap(analysis: Analysis, out: CWriter) -> None: + specialized = get_specialized(analysis) + out.emit("opmap = {\n") + for inst, op in analysis.opmap.items(): + if inst not in specialized: + out.emit(f"'{inst}': {analysis.opmap[inst]},\n") + out.emit("}\n\n") + + +def generate_py_metadata( + filenames: list[str], analysis: Analysis, outfile: TextIO +) -> None: + write_header(__file__, filenames, outfile, "#") + out = CWriter(outfile, 0, False) + generate_specializations(analysis, out) + generate_specialized_opmap(analysis, out) + generate_opmap(analysis, out) + out.emit(f"HAVE_ARGUMENT = {analysis.have_arg}\n") + out.emit(f"MIN_INSTRUMENTED_OPCODE = {analysis.min_instrumented}\n") + + +arg_parser = argparse.ArgumentParser( + description="Generate the Python file with opcode metadata.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, +) + +arg_parser.add_argument( + "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT +) + +arg_parser.add_argument( + "input", nargs=argparse.REMAINDER, help="Instruction definition file(s)" +) + +if __name__ == "__main__": + args = arg_parser.parse_args() + if len(args.input) == 0: + args.input.append(DEFAULT_INPUT) + data = analyze_files(args.input) + with open(args.output, "w") as outfile: + generate_py_metadata(args.input, data, outfile) diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index 0b31ce4090f552..d351037a663ca2 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -1,10 +1,24 @@ -import sys -from analyzer import StackItem +import re +from analyzer import StackItem, Instruction, Uop from dataclasses import dataclass -from formatting import maybe_parenthesize from cwriter import CWriter +def maybe_parenthesize(sym: str) -> str: + """Add parentheses around a string if it contains an operator + and is not already parenthesized. + + An exception is made for '*' which is common and harmless + in the context where the symbolic size is used. + """ + if sym.startswith("(") and sym.endswith(")"): + return sym + if re.match(r"^[\s\w*]+$", sym): + return sym + else: + return f"({sym})" + + def var_size(var: StackItem) -> str: if var.condition: # Special case simplification @@ -15,13 +29,16 @@ def var_size(var: StackItem) -> str: else: return var.size - +@dataclass class StackOffset: "The stack offset of the virtual base of the stack from the physical stack pointer" - def __init__(self) -> None: - self.popped: list[str] = [] - self.pushed: list[str] = [] + popped: list[str] + pushed: list[str] + + @staticmethod + def empty() -> "StackOffset": + return StackOffset([], []) def pop(self, item: StackItem) -> None: self.popped.append(var_size(item)) @@ -29,6 +46,15 @@ def pop(self, item: StackItem) -> None: def push(self, item: StackItem) -> None: self.pushed.append(var_size(item)) + def __sub__(self, other: "StackOffset") -> "StackOffset": + return StackOffset( + self.popped + other.pushed, + self.pushed + other.popped + ) + + def __neg__(self) -> "StackOffset": + return StackOffset(self.pushed, self.popped) + def simplify(self) -> None: "Remove matching values from both the popped and pushed list" if not self.popped or not self.pushed: @@ -88,9 +114,9 @@ class SizeMismatch(Exception): class Stack: def __init__(self) -> None: - self.top_offset = StackOffset() - self.base_offset = StackOffset() - self.peek_offset = StackOffset() + self.top_offset = StackOffset.empty() + self.base_offset = StackOffset.empty() + self.peek_offset = StackOffset.empty() self.variables: list[StackItem] = [] self.defined: set[str] = set() @@ -166,3 +192,15 @@ def flush(self, out: CWriter) -> None: def as_comment(self) -> str: return f"/* Variables: {[v.name for v in self.variables]}. Base offset: {self.base_offset.to_c()}. Top offset: {self.top_offset.to_c()} */" + + +def get_stack_effect(inst: Instruction) -> Stack: + stack = Stack() + for uop in inst.parts: + if not isinstance(uop, Uop): + continue + for var in reversed(uop.stack.inputs): + stack.pop(var) + for i, var in enumerate(uop.stack.outputs): + stack.push(var) + return stack diff --git a/Tools/cases_generator/stacking.py b/Tools/cases_generator/stacking.py deleted file mode 100644 index 123e38c524f49d..00000000000000 --- a/Tools/cases_generator/stacking.py +++ /dev/null @@ -1,534 +0,0 @@ -import dataclasses -import typing - -from flags import variable_used_unspecialized -from formatting import ( - Formatter, - UNUSED, - maybe_parenthesize, - parenthesize_cond, -) -from instructions import ( - ActiveCacheEffect, - Instruction, - MacroInstruction, - Component, - Tiers, - TIER_ONE, -) -from parsing import StackEffect, CacheEffect, Family - - -@dataclasses.dataclass -class StackOffset: - """Represent the stack offset for a PEEK or POKE. - - - At stack_pointer[0], deep and high are both empty. - (Note that that is an invalid stack reference.) - - Below stack top, only deep is non-empty. - - Above stack top, only high is non-empty. - - In complex cases, both deep and high may be non-empty. - - All this would be much simpler if all stack entries were the same - size, but with conditional and array effects, they aren't. - The offsets are each represented by a list of StackEffect objects. - The name in the StackEffects is unused. - """ - - deep: list[StackEffect] = dataclasses.field(default_factory=list) - high: list[StackEffect] = dataclasses.field(default_factory=list) - - def clone(self) -> "StackOffset": - return StackOffset(list(self.deep), list(self.high)) - - def negate(self) -> "StackOffset": - return StackOffset(list(self.high), list(self.deep)) - - def deeper(self, eff: StackEffect) -> None: - if eff in self.high: - self.high.remove(eff) - else: - self.deep.append(eff) - - def higher(self, eff: StackEffect) -> None: - if eff in self.deep: - self.deep.remove(eff) - else: - self.high.append(eff) - - def as_terms(self) -> list[tuple[str, str]]: - num = 0 - terms: list[tuple[str, str]] = [] - for eff in self.deep: - if eff.size: - terms.append(("-", maybe_parenthesize(eff.size))) - elif eff.cond and eff.cond not in ("0", "1"): - terms.append(("-", f"({parenthesize_cond(eff.cond)} ? 1 : 0)")) - elif eff.cond != "0": - num -= 1 - for eff in self.high: - if eff.size: - terms.append(("+", maybe_parenthesize(eff.size))) - elif eff.cond and eff.cond not in ("0", "1"): - terms.append(("+", f"({parenthesize_cond(eff.cond)} ? 1 : 0)")) - elif eff.cond != "0": - num += 1 - if num < 0: - terms.insert(0, ("-", str(-num))) - elif num > 0: - terms.append(("+", str(num))) - return terms - - def as_index(self) -> str: - terms = self.as_terms() - return make_index(terms) - - def equivalent_to(self, other: "StackOffset") -> bool: - if self.deep == other.deep and self.high == other.high: - return True - deep = list(self.deep) - for x in other.deep: - try: - deep.remove(x) - except ValueError: - return False - if deep: - return False - high = list(self.high) - for x in other.high: - try: - high.remove(x) - except ValueError: - return False - if high: - return False - return True - - -def make_index(terms: list[tuple[str, str]]) -> str: - # Produce an index expression from the terms honoring PEP 8, - # surrounding binary ops with spaces but not unary minus - index = "" - for sign, term in terms: - if index: - index += f" {sign} {term}" - elif sign == "+": - index = term - else: - index = sign + term - return index or "0" - - -@dataclasses.dataclass -class StackItem: - offset: StackOffset - effect: StackEffect - - def as_variable(self, lax: bool = False) -> str: - """Return e.g. stack_pointer[-1].""" - terms = self.offset.as_terms() - if self.effect.size: - terms.insert(0, ("+", "stack_pointer")) - index = make_index(terms) - if self.effect.size: - res = index - else: - res = f"stack_pointer[{index}]" - if not lax: - # Check that we're not reading or writing above stack top. - # Skip this for output variable initialization (lax=True). - assert ( - self.effect in self.offset.deep and not self.offset.high - ), f"Push or pop above current stack level: {res}" - return res - - def as_stack_effect(self, lax: bool = False) -> StackEffect: - return StackEffect( - self.as_variable(lax=lax), - self.effect.type if self.effect.size else "", - self.effect.cond, - self.effect.size, - ) - - -@dataclasses.dataclass -class CopyItem: - src: StackItem - dst: StackItem - - -class EffectManager: - """Manage stack effects and offsets for an instruction.""" - - instr: Instruction - active_caches: list[ActiveCacheEffect] - peeks: list[StackItem] - pokes: list[StackItem] - copies: list[CopyItem] # See merge() - # Track offsets from stack pointer - min_offset: StackOffset - final_offset: StackOffset - # Link to previous manager - pred: "EffectManager | None" = None - - def __init__( - self, - instr: Instruction, - active_caches: list[ActiveCacheEffect], - pred: "EffectManager | None" = None, - ): - self.instr = instr - self.active_caches = active_caches - self.peeks = [] - self.pokes = [] - self.copies = [] - self.final_offset = pred.final_offset.clone() if pred else StackOffset() - for eff in reversed(instr.input_effects): - self.final_offset.deeper(eff) - self.peeks.append(StackItem(offset=self.final_offset.clone(), effect=eff)) - self.min_offset = self.final_offset.clone() - for eff in instr.output_effects: - self.pokes.append(StackItem(offset=self.final_offset.clone(), effect=eff)) - self.final_offset.higher(eff) - - self.pred = pred - while pred: - # Replace push(x) + pop(y) with copy(x, y). - # Check that the sources and destinations are disjoint. - sources: set[str] = set() - destinations: set[str] = set() - while ( - pred.pokes - and self.peeks - and pred.pokes[-1].effect == self.peeks[0].effect - ): - src = pred.pokes.pop(-1) - dst = self.peeks.pop(0) - assert src.offset.equivalent_to(dst.offset), (src, dst) - pred.final_offset.deeper(src.effect) - if dst.effect.name != src.effect.name: - if dst.effect.name != UNUSED: - destinations.add(dst.effect.name) - if src.effect.name != UNUSED: - sources.add(src.effect.name) - self.copies.append(CopyItem(src, dst)) - # TODO: Turn this into an error (pass an Analyzer instance?) - assert sources & destinations == set(), ( - pred.instr.name, - self.instr.name, - sources, - destinations, - ) - # See if we can get more copies of a earlier predecessor. - if self.peeks and not pred.pokes and not pred.peeks: - pred = pred.pred - else: - pred = None # Break - - # Fix up patterns of copies through UNUSED, - # e.g. cp(a, UNUSED) + cp(UNUSED, b) -> cp(a, b). - if any(copy.src.effect.name == UNUSED for copy in self.copies): - pred = self.pred - while pred is not None: - for copy in self.copies: - if copy.src.effect.name == UNUSED: - for pred_copy in pred.copies: - if pred_copy.dst == copy.src: - copy.src = pred_copy.src - break - pred = pred.pred - - def adjust_deeper(self, eff: StackEffect) -> None: - for peek in self.peeks: - peek.offset.deeper(eff) - for poke in self.pokes: - poke.offset.deeper(eff) - for copy in self.copies: - copy.src.offset.deeper(eff) - copy.dst.offset.deeper(eff) - self.min_offset.deeper(eff) - self.final_offset.deeper(eff) - - def adjust_higher(self, eff: StackEffect) -> None: - for peek in self.peeks: - peek.offset.higher(eff) - for poke in self.pokes: - poke.offset.higher(eff) - for copy in self.copies: - copy.src.offset.higher(eff) - copy.dst.offset.higher(eff) - self.min_offset.higher(eff) - self.final_offset.higher(eff) - - def adjust(self, offset: StackOffset) -> None: - deep = list(offset.deep) - high = list(offset.high) - for down in deep: - self.adjust_deeper(down) - for up in high: - self.adjust_higher(up) - - def adjust_inverse(self, offset: StackOffset) -> None: - deep = list(offset.deep) - high = list(offset.high) - for down in deep: - self.adjust_higher(down) - for up in high: - self.adjust_deeper(up) - - def collect_vars(self) -> dict[str, StackEffect]: - """Collect all variables, skipping unused ones.""" - vars: dict[str, StackEffect] = {} - - def add(eff: StackEffect) -> None: - if eff.name != UNUSED: - if eff.name in vars: - # TODO: Make this an error - assert vars[eff.name] == eff, ( - self.instr.name, - eff.name, - vars[eff.name], - eff, - ) - else: - vars[eff.name] = eff - - for copy in self.copies: - add(copy.src.effect) - add(copy.dst.effect) - for peek in self.peeks: - add(peek.effect) - for poke in self.pokes: - add(poke.effect) - - return vars - - -def less_than(a: StackOffset, b: StackOffset) -> bool: - # TODO: Handle more cases - if a.high != b.high: - return False - return a.deep[: len(b.deep)] == b.deep - - -def get_managers(parts: list[Component]) -> list[EffectManager]: - managers: list[EffectManager] = [] - pred: EffectManager | None = None - for part in parts: - mgr = EffectManager(part.instr, part.active_caches, pred) - managers.append(mgr) - pred = mgr - return managers - - -def get_stack_effect_info_for_macro(mac: MacroInstruction) -> tuple[str, str]: - """Get the stack effect info for a macro instruction. - - Returns a tuple (popped, pushed) where each is a string giving a - symbolic expression for the number of values popped/pushed. - """ - parts = [part for part in mac.parts if isinstance(part, Component)] - managers = get_managers(parts) - popped = StackOffset() - for mgr in managers: - if less_than(mgr.min_offset, popped): - popped = mgr.min_offset.clone() - # Compute pushed = final - popped - pushed = managers[-1].final_offset.clone() - for effect in popped.deep: - pushed.higher(effect) - for effect in popped.high: - pushed.deeper(effect) - return popped.negate().as_index(), pushed.as_index() - - -def write_single_instr( - instr: Instruction, out: Formatter, tier: Tiers = TIER_ONE -) -> None: - try: - write_components( - [Component(instr, instr.active_caches)], - out, - tier, - 0, - instr.family, - ) - except AssertionError as err: - raise AssertionError(f"Error writing instruction {instr.name}") from err - - -def write_macro_instr(mac: MacroInstruction, out: Formatter) -> None: - parts = [ - part - for part in mac.parts - if isinstance(part, Component) and part.instr.name != "_SET_IP" - ] - out.emit("") - with out.block(f"TARGET({mac.name})"): - needs_this = any(part.instr.needs_this_instr for part in parts) - if needs_this and not mac.predicted: - out.emit(f"_Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;") - else: - out.emit(f"frame->instr_ptr = next_instr;") - out.emit(f"next_instr += {mac.cache_offset+1};") - out.emit(f"INSTRUCTION_STATS({mac.name});") - if mac.predicted: - out.emit(f"PREDICTED({mac.name});") - if needs_this: - out.emit(f"_Py_CODEUNIT *this_instr = next_instr - {mac.cache_offset+1};") - out.static_assert_family_size(mac.name, mac.family, mac.cache_offset) - try: - next_instr_is_set = write_components( - parts, out, TIER_ONE, mac.cache_offset, mac.family - ) - except AssertionError as err: - raise AssertionError(f"Error writing macro {mac.name}") from err - if not parts[-1].instr.always_exits: - if parts[-1].instr.check_eval_breaker: - out.emit("CHECK_EVAL_BREAKER();") - out.emit("DISPATCH();") - - -def write_components( - parts: list[Component], - out: Formatter, - tier: Tiers, - cache_offset: int, - family: Family | None, -) -> bool: - managers = get_managers(parts) - - all_vars: dict[str, StackEffect] = {} - for mgr in managers: - for name, eff in mgr.collect_vars().items(): - if name in all_vars: - # TODO: Turn this into an error -- variable conflict - assert all_vars[name] == eff, ( - name, - mgr.instr.name, - all_vars[name], - eff, - ) - else: - all_vars[name] = eff - - # Declare all variables - for name, eff in all_vars.items(): - out.declare(eff, None) - - next_instr_is_set = False - for mgr in managers: - if len(parts) > 1: - out.emit(f"// {mgr.instr.name}") - - for copy in mgr.copies: - copy_src_effect = copy.src.effect - if copy_src_effect.name != copy.dst.effect.name: - if copy_src_effect.name == UNUSED: - copy_src_effect = copy.src.as_stack_effect() - out.assign(copy.dst.effect, copy_src_effect) - for peek in mgr.peeks: - out.assign( - peek.effect, - peek.as_stack_effect(), - ) - # Initialize array outputs - for poke in mgr.pokes: - if poke.effect.size and poke.effect.name not in mgr.instr.unmoved_names: - out.assign( - poke.effect, - poke.as_stack_effect(lax=True), - ) - - if mgr.instr.name in ("_PUSH_FRAME", "_POP_FRAME"): - # Adjust stack to min_offset. - # This means that all input effects of this instruction - # are materialized, but not its output effects. - # That's as intended, since these two are so special. - out.stack_adjust(mgr.min_offset.deep, mgr.min_offset.high) - # However, for tier 2, pretend the stack is at final offset. - mgr.adjust_inverse(mgr.final_offset) - if tier == TIER_ONE: - # TODO: Check in analyzer that _{PUSH,POP}_FRAME is last. - assert ( - mgr is managers[-1] - ), f"Expected {mgr.instr.name!r} to be the last uop" - assert_no_pokes(managers) - - if mgr.instr.name == "_SAVE_RETURN_OFFSET": - next_instr_is_set = True - if tier == TIER_ONE: - assert_no_pokes(managers) - - if len(parts) == 1: - mgr.instr.write_body(out, 0, mgr.active_caches, tier, family) - else: - with out.block(""): - mgr.instr.write_body(out, -4, mgr.active_caches, tier, family) - - if mgr is managers[-1] and not next_instr_is_set and not mgr.instr.always_exits: - # Adjust the stack to its final depth, *then* write the - # pokes for all preceding uops. - # Note that for array output effects we may still write - # past the stack top. - out.stack_adjust(mgr.final_offset.deep, mgr.final_offset.high) - write_all_pokes(mgr.final_offset, managers, out) - - return next_instr_is_set - - -def assert_no_pokes(managers: list[EffectManager]) -> None: - for mgr in managers: - for poke in mgr.pokes: - if not poke.effect.size and poke.effect.name not in mgr.instr.unmoved_names: - assert ( - poke.effect.name == UNUSED - ), f"Unexpected poke of {poke.effect.name} in {mgr.instr.name!r}" - - -def write_all_pokes( - offset: StackOffset, managers: list[EffectManager], out: Formatter -) -> None: - # Emit all remaining pushes (pokes) - for m in managers: - m.adjust_inverse(offset) - write_pokes(m, out) - - -def write_pokes(mgr: EffectManager, out: Formatter) -> None: - for poke in mgr.pokes: - if not poke.effect.size and poke.effect.name not in mgr.instr.unmoved_names: - out.assign( - poke.as_stack_effect(), - poke.effect, - ) - - -def write_single_instr_for_abstract_interp(instr: Instruction, out: Formatter) -> None: - try: - _write_components_for_abstract_interp( - [Component(instr, instr.active_caches)], - out, - ) - except AssertionError as err: - raise AssertionError( - f"Error writing abstract instruction {instr.name}" - ) from err - - -def _write_components_for_abstract_interp( - parts: list[Component], - out: Formatter, -) -> None: - managers = get_managers(parts) - for mgr in managers: - if mgr is managers[-1]: - out.stack_adjust(mgr.final_offset.deep, mgr.final_offset.high) - mgr.adjust_inverse(mgr.final_offset) - # NULL out the output stack effects - for poke in mgr.pokes: - if not poke.effect.size and poke.effect.name not in mgr.instr.unmoved_names: - out.emit( - f"PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)" - f"PARTITIONNODE_NULLROOT, PEEK(-({poke.offset.as_index()})), true);" - ) diff --git a/Tools/cases_generator/target_generator.py b/Tools/cases_generator/target_generator.py new file mode 100644 index 00000000000000..44a699c92bbd22 --- /dev/null +++ b/Tools/cases_generator/target_generator.py @@ -0,0 +1,54 @@ +"""Generate targets for computed goto dispatch +Reads the instruction definitions from bytecodes.c. +Writes the table to opcode_targets.h by default. +""" + +import argparse + +from analyzer import ( + Analysis, + analyze_files, +) +from generators_common import ( + DEFAULT_INPUT, + ROOT, +) +from cwriter import CWriter +from typing import TextIO + + +DEFAULT_OUTPUT = ROOT / "Python/opcode_targets.h" + + +def write_opcode_targets(analysis: Analysis, out: CWriter) -> None: + """Write header file that defines the jump target table""" + targets = ["&&_unknown_opcode,\n"] * 256 + for name, op in analysis.opmap.items(): + if op < 256: + targets[op] = f"&&TARGET_{name},\n" + out.emit("static void *opcode_targets[256] = {\n") + for target in targets: + out.emit(target) + out.emit("};\n") + +arg_parser = argparse.ArgumentParser( + description="Generate the file with dispatch targets.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, +) + +arg_parser.add_argument( + "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT +) + +arg_parser.add_argument( + "input", nargs=argparse.REMAINDER, help="Instruction definition file(s)" +) + +if __name__ == "__main__": + args = arg_parser.parse_args() + if len(args.input) == 0: + args.input.append(DEFAULT_INPUT) + data = analyze_files(args.input) + with open(args.output, "w") as outfile: + out = CWriter(outfile, 0, False) + write_opcode_targets(data, out) diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py index 49cede978d821a..aba36ec74e5766 100644 --- a/Tools/cases_generator/tier1_generator.py +++ b/Tools/cases_generator/tier1_generator.py @@ -190,6 +190,7 @@ def generate_tier1_from_files( with open(outfilename, "w") as outfile: generate_tier1(filenames, data, outfile, lines) + if __name__ == "__main__": args = arg_parser.parse_args() if len(args.input) == 0: diff --git a/Tools/cases_generator/tier2_generator.py b/Tools/cases_generator/tier2_generator.py index a22fb6dd932503..7897b89b2752a7 100644 --- a/Tools/cases_generator/tier2_generator.py +++ b/Tools/cases_generator/tier2_generator.py @@ -103,13 +103,6 @@ def tier2_replace_deopt( TIER2_REPLACEMENT_FUNCTIONS["DEOPT_IF"] = tier2_replace_deopt -def is_super(uop: Uop) -> bool: - for tkn in uop.body: - if tkn.kind == "IDENTIFIER" and tkn.text == "oparg1": - return True - return False - - def write_uop(uop: Uop, out: CWriter, stack: Stack) -> None: try: out.start_line() @@ -123,7 +116,7 @@ def write_uop(uop: Uop, out: CWriter, stack: Stack) -> None: for cache in uop.caches: if cache.name != "unused": if cache.size == 4: - type = cast ="PyObject *" + type = cast = "PyObject *" else: type = f"uint{cache.size*16}_t " cast = f"uint{cache.size*16}_t" @@ -156,7 +149,7 @@ def generate_tier2( for name, uop in analysis.uops.items(): if uop.properties.tier_one_only: continue - if is_super(uop): + if uop.is_super(): continue if not uop.is_viable(): out.emit(f"/* {uop.name} is not a viable micro-op for tier 2 */\n\n") diff --git a/Tools/cases_generator/uop_id_generator.py b/Tools/cases_generator/uop_id_generator.py index 277da25835f6fb..633249f1c6b1fe 100644 --- a/Tools/cases_generator/uop_id_generator.py +++ b/Tools/cases_generator/uop_id_generator.py @@ -24,50 +24,32 @@ DEFAULT_OUTPUT = ROOT / "Include/internal/pycore_uop_ids.h" -OMIT = {"_CACHE", "_RESERVED", "_EXTENDED_ARG"} - - def generate_uop_ids( filenames: list[str], analysis: Analysis, outfile: TextIO, distinct_namespace: bool ) -> None: write_header(__file__, filenames, outfile) out = CWriter(outfile, 0, False) - out.emit( - """#ifndef Py_CORE_UOP_IDS_H -#define Py_CORE_UOP_IDS_H -#ifdef __cplusplus -extern "C" { -#endif - -""" - ) - - next_id = 1 if distinct_namespace else 300 - # These two are first by convention - out.emit(f"#define _EXIT_TRACE {next_id}\n") - next_id += 1 - out.emit(f"#define _SET_IP {next_id}\n") - next_id += 1 - PRE_DEFINED = {"_EXIT_TRACE", "_SET_IP"} - - for uop in analysis.uops.values(): - if uop.name in PRE_DEFINED: - continue - # TODO: We should omit all tier-1 only uops, but - # generate_cases.py still generates code for those. - if uop.name in OMIT: - continue - if uop.implicitly_created and not distinct_namespace: - out.emit(f"#define {uop.name} {uop.name[1:]}\n") - else: - out.emit(f"#define {uop.name} {next_id}\n") - next_id += 1 - - out.emit("\n") - out.emit("#ifdef __cplusplus\n") - out.emit("}\n") - out.emit("#endif\n") - out.emit("#endif /* !Py_OPCODE_IDS_H */\n") + with out.header_guard("Py_CORE_UOP_IDS_H"): + next_id = 1 if distinct_namespace else 300 + # These two are first by convention + out.emit(f"#define _EXIT_TRACE {next_id}\n") + next_id += 1 + out.emit(f"#define _SET_IP {next_id}\n") + next_id += 1 + PRE_DEFINED = {"_EXIT_TRACE", "_SET_IP"} + + for uop in analysis.uops.values(): + if uop.name in PRE_DEFINED: + continue + if uop.properties.tier_one_only: + continue + if uop.implicitly_created and not distinct_namespace: + out.emit(f"#define {uop.name} {uop.name[1:]}\n") + else: + out.emit(f"#define {uop.name} {next_id}\n") + next_id += 1 + + out.emit(f"#define MAX_UOP_ID {next_id-1}\n") arg_parser = argparse.ArgumentParser( diff --git a/Tools/cases_generator/uop_metadata_generator.py b/Tools/cases_generator/uop_metadata_generator.py new file mode 100644 index 00000000000000..d4f3a096d2acc1 --- /dev/null +++ b/Tools/cases_generator/uop_metadata_generator.py @@ -0,0 +1,73 @@ +"""Generate uop metedata. +Reads the instruction definitions from bytecodes.c. +Writes the metadata to pycore_uop_metadata.h by default. +""" + +import argparse + +from analyzer import ( + Analysis, + analyze_files, +) +from generators_common import ( + DEFAULT_INPUT, + ROOT, + write_header, + cflags, +) +from cwriter import CWriter +from typing import TextIO + + +DEFAULT_OUTPUT = ROOT / "Include/internal/pycore_uop_metadata.h" + + +def generate_names_and_flags(analysis: Analysis, out: CWriter) -> None: + out.emit("extern const uint16_t _PyUop_Flags[MAX_UOP_ID+1];\n") + out.emit("extern const char * const _PyOpcode_uop_name[MAX_UOP_ID+1];\n\n") + out.emit("#ifdef NEED_OPCODE_METADATA\n") + out.emit("const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {\n") + for uop in analysis.uops.values(): + if uop.is_viable() and not uop.properties.tier_one_only: + out.emit(f"[{uop.name}] = {cflags(uop.properties)},\n") + + out.emit("};\n\n") + out.emit("const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = {\n") + for uop in sorted(analysis.uops.values(), key=lambda t: t.name): + if uop.is_viable() and not uop.properties.tier_one_only: + out.emit(f'[{uop.name}] = "{uop.name}",\n') + out.emit("};\n") + out.emit("#endif // NEED_OPCODE_METADATA\n\n") + + +def generate_uop_metadata( + filenames: list[str], analysis: Analysis, outfile: TextIO +) -> None: + write_header(__file__, filenames, outfile) + out = CWriter(outfile, 0, False) + with out.header_guard("Py_CORE_UOP_METADATA_H"): + out.emit("#include \n") + out.emit('#include "pycore_uop_ids.h"\n') + generate_names_and_flags(analysis, out) + + +arg_parser = argparse.ArgumentParser( + description="Generate the header file with uop metadata.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, +) + +arg_parser.add_argument( + "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT +) + +arg_parser.add_argument( + "input", nargs=argparse.REMAINDER, help="Instruction definition file(s)" +) + +if __name__ == "__main__": + args = arg_parser.parse_args() + if len(args.input) == 0: + args.input.append(DEFAULT_INPUT) + data = analyze_files(args.input) + with open(args.output, "w") as outfile: + generate_uop_metadata(args.input, data, outfile) diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index 87feef1b82ca39..f004bec3cce8f6 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -50,6 +50,11 @@ overload, ) + +# Local imports. +import libclinic + + # TODO: # # soon: @@ -61,25 +66,6 @@ # and keyword-only # -version = '1' - -NO_VARARG = "PY_SSIZE_T_MAX" -CLINIC_PREFIX = "__clinic_" -CLINIC_PREFIXED_ARGS = { - "_keywords", - "_parser", - "args", - "argsbuf", - "fastargs", - "kwargs", - "kwnames", - "nargs", - "noptargs", - "return_value", -} - -# '#include "header.h" // reason': column of '//' comment -INCLUDE_COMMENT_COLUMN = 35 # match '#define Py_LIMITED_API' LIMITED_CAPI_REGEX = re.compile(r'#define +Py_LIMITED_API') @@ -105,44 +91,8 @@ def __repr__(self) -> str: NULL = Null() -sig_end_marker = '--' - -Appender = Callable[[str], None] -Outputter = Callable[[], str] TemplateDict = dict[str, str] -class _TextAccumulator(NamedTuple): - text: list[str] - append: Appender - output: Outputter - -def _text_accumulator() -> _TextAccumulator: - text: list[str] = [] - def output() -> str: - s = ''.join(text) - text.clear() - return s - return _TextAccumulator(text, text.append, output) - - -class TextAccumulator(NamedTuple): - append: Appender - output: Outputter - -def text_accumulator() -> TextAccumulator: - """ - Creates a simple text accumulator / joiner. - - Returns a pair of callables: - append, output - "append" appends a string to the accumulator. - "output" returns the contents of the accumulator - joined together (''.join(accumulator)) and - empties the accumulator. - """ - text, append, output = _text_accumulator() - return TextAccumulator(append, output) - @dc.dataclass class ClinicError(Exception): @@ -215,33 +165,6 @@ def fail( warn_or_fail(*args, filename=filename, line_number=line_number, fail=True) -def quoted_for_c_string(s: str) -> str: - for old, new in ( - ('\\', '\\\\'), # must be first! - ('"', '\\"'), - ("'", "\\'"), - ): - s = s.replace(old, new) - return s - -def c_repr(s: str) -> str: - return '"' + s + '"' - - -def wrapped_c_string_literal( - text: str, - *, - width: int = 72, - suffix: str = '', - initial_indent: int = 0, - subsequent_indent: int = 4 -) -> str: - wrapped = textwrap.wrap(text, width=width, replace_whitespace=False, - drop_whitespace=False, break_on_hyphens=False) - separator = '"' + suffix + '\n' + subsequent_indent * ' ' + '"' - return initial_indent * ' ' + '"' + separator.join(wrapped) + '"' - - is_legal_c_identifier = re.compile('^[A-Za-z_][A-Za-z0-9_]*$').match def is_legal_py_identifier(s: str) -> bool: @@ -266,20 +189,6 @@ def ensure_legal_c_identifier(s: str) -> str: return s + "_value" return s -def rstrip_lines(s: str) -> str: - text, add, output = _text_accumulator() - for line in s.split('\n'): - add(line.rstrip()) - add('\n') - text.pop() - return output() - -def format_escape(s: str) -> str: - # double up curly-braces, this string will be used - # as part of a format_map() template later - s = s.replace('{', '{{') - s = s.replace('}', '}}') - return s def linear_format(s: str, **kwargs: str) -> str: """ @@ -295,19 +204,16 @@ def linear_format(s: str, **kwargs: str) -> str: by the indent of the source line. * A newline will be added to the end. """ - - add, output = text_accumulator() + lines = [] for line in s.split('\n'): indent, curly, trailing = line.partition('{') if not curly: - add(line) - add('\n') + lines.extend([line, "\n"]) continue name, curly, trailing = trailing.partition('}') if not curly or name not in kwargs: - add(line) - add('\n') + lines.extend([line, "\n"]) continue if trailing: @@ -321,101 +227,11 @@ def linear_format(s: str, **kwargs: str) -> str: if not value: continue - value = textwrap.indent(rstrip_lines(value), indent) - add(value) - add('\n') - - return output()[:-1] + stripped = [line.rstrip() for line in value.split("\n")] + value = textwrap.indent("\n".join(stripped), indent) + lines.extend([value, "\n"]) -def indent_all_lines(s: str, prefix: str) -> str: - """ - Returns 's', with 'prefix' prepended to all lines. - - If the last line is empty, prefix is not prepended - to it. (If s is blank, returns s unchanged.) - - (textwrap.indent only adds to non-blank lines.) - """ - split = s.split('\n') - last = split.pop() - final = [] - for line in split: - final.append(prefix) - final.append(line) - final.append('\n') - if last: - final.append(prefix) - final.append(last) - return ''.join(final) - -def suffix_all_lines(s: str, suffix: str) -> str: - """ - Returns 's', with 'suffix' appended to all lines. - - If the last line is empty, suffix is not appended - to it. (If s is blank, returns s unchanged.) - """ - split = s.split('\n') - last = split.pop() - final = [] - for line in split: - final.append(line) - final.append(suffix) - final.append('\n') - if last: - final.append(last) - final.append(suffix) - return ''.join(final) - - -def pprint_words(items: list[str]) -> str: - if len(items) <= 2: - return " and ".join(items) - else: - return ", ".join(items[:-1]) + " and " + items[-1] - - -def version_splitter(s: str) -> tuple[int, ...]: - """Splits a version string into a tuple of integers. - - The following ASCII characters are allowed, and employ - the following conversions: - a -> -3 - b -> -2 - c -> -1 - (This permits Python-style version strings such as "1.4b3".) - """ - version: list[int] = [] - accumulator: list[str] = [] - def flush() -> None: - if not accumulator: - fail(f'Unsupported version string: {s!r}') - version.append(int(''.join(accumulator))) - accumulator.clear() - - for c in s: - if c.isdigit(): - accumulator.append(c) - elif c == '.': - flush() - elif c in 'abc': - flush() - version.append('abc'.index(c) - 3) - else: - fail(f'Illegal character {c!r} in version string {s!r}') - flush() - return tuple(version) - -def version_comparator(version1: str, version2: str) -> Literal[-1, 0, 1]: - iterator = itertools.zip_longest( - version_splitter(version1), version_splitter(version2), fillvalue=0 - ) - for a, b in iterator: - if a < b: - return -1 - if a > b: - return 1 - return 0 + return "".join(lines[:-1]) class CRenderData: @@ -653,34 +469,6 @@ def permute_optional_groups( return tuple(accumulator) -def strip_leading_and_trailing_blank_lines(s: str) -> str: - lines = s.rstrip().split('\n') - while lines: - line = lines[0] - if line.strip(): - break - del lines[0] - return '\n'.join(lines) - -@functools.lru_cache() -def normalize_snippet( - s: str, - *, - indent: int = 0 -) -> str: - """ - Reformats s: - * removes leading and trailing blank lines - * ensures that it does not end with a newline - * dedents so the first nonwhite character on any line is at column "indent" - """ - s = strip_leading_and_trailing_blank_lines(s) - s = textwrap.dedent(s) - if indent: - s = textwrap.indent(s, ' ' * indent) - return s - - def declare_parser( f: Function, *, @@ -751,62 +539,7 @@ def declare_parser( }}; #undef KWTUPLE """ % (format_ or fname) - return normalize_snippet(declarations) - - -def wrap_declarations( - text: str, - length: int = 78 -) -> str: - """ - A simple-minded text wrapper for C function declarations. - - It views a declaration line as looking like this: - xxxxxxxx(xxxxxxxxx,xxxxxxxxx) - If called with length=30, it would wrap that line into - xxxxxxxx(xxxxxxxxx, - xxxxxxxxx) - (If the declaration has zero or one parameters, this - function won't wrap it.) - - If this doesn't work properly, it's probably better to - start from scratch with a more sophisticated algorithm, - rather than try and improve/debug this dumb little function. - """ - lines = [] - for line in text.split('\n'): - prefix, _, after_l_paren = line.partition('(') - if not after_l_paren: - lines.append(line) - continue - in_paren, _, after_r_paren = after_l_paren.partition(')') - if not _: - lines.append(line) - continue - if ',' not in in_paren: - lines.append(line) - continue - parameters = [x.strip() + ", " for x in in_paren.split(',')] - prefix += "(" - if len(prefix) < length: - spaces = " " * len(prefix) - else: - spaces = " " * 4 - - while parameters: - line = prefix - first = True - while parameters: - if (not first and - (len(line) + len(parameters[0]) > length)): - break - line += parameters.pop(0) - first = False - if not parameters: - line = line.rstrip(", ") + ")" + after_r_paren - lines.append(line.rstrip()) - prefix = spaces - return "\n".join(lines) + return libclinic.normalize_snippet(declarations) class CLanguage(Language): @@ -818,78 +551,95 @@ class CLanguage(Language): stop_line = "[{dsl_name} start generated code]*/" checksum_line = "/*[{dsl_name} end generated code: {arguments}]*/" - PARSER_PROTOTYPE_KEYWORD: Final[str] = normalize_snippet(""" + NO_VARARG: Final[str] = "PY_SSIZE_T_MAX" + + PARSER_PROTOTYPE_KEYWORD: Final[str] = libclinic.normalize_snippet(""" static PyObject * {c_basename}({self_type}{self_name}, PyObject *args, PyObject *kwargs) """) - PARSER_PROTOTYPE_KEYWORD___INIT__: Final[str] = normalize_snippet(""" + PARSER_PROTOTYPE_KEYWORD___INIT__: Final[str] = libclinic.normalize_snippet(""" static int {c_basename}({self_type}{self_name}, PyObject *args, PyObject *kwargs) """) - PARSER_PROTOTYPE_VARARGS: Final[str] = normalize_snippet(""" + PARSER_PROTOTYPE_VARARGS: Final[str] = libclinic.normalize_snippet(""" static PyObject * {c_basename}({self_type}{self_name}, PyObject *args) """) - PARSER_PROTOTYPE_FASTCALL: Final[str] = normalize_snippet(""" + PARSER_PROTOTYPE_FASTCALL: Final[str] = libclinic.normalize_snippet(""" static PyObject * {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs) """) - PARSER_PROTOTYPE_FASTCALL_KEYWORDS: Final[str] = normalize_snippet(""" + PARSER_PROTOTYPE_FASTCALL_KEYWORDS: Final[str] = libclinic.normalize_snippet(""" static PyObject * {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) """) - PARSER_PROTOTYPE_DEF_CLASS: Final[str] = normalize_snippet(""" + PARSER_PROTOTYPE_DEF_CLASS: Final[str] = libclinic.normalize_snippet(""" static PyObject * {c_basename}({self_type}{self_name}, PyTypeObject *{defining_class_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) """) - PARSER_PROTOTYPE_NOARGS: Final[str] = normalize_snippet(""" + PARSER_PROTOTYPE_NOARGS: Final[str] = libclinic.normalize_snippet(""" static PyObject * {c_basename}({self_type}{self_name}, PyObject *Py_UNUSED(ignored)) """) - PARSER_PROTOTYPE_GETTER: Final[str] = normalize_snippet(""" + PARSER_PROTOTYPE_GETTER: Final[str] = libclinic.normalize_snippet(""" static PyObject * {c_basename}({self_type}{self_name}, void *Py_UNUSED(context)) """) - PARSER_PROTOTYPE_SETTER: Final[str] = normalize_snippet(""" + PARSER_PROTOTYPE_SETTER: Final[str] = libclinic.normalize_snippet(""" static int {c_basename}({self_type}{self_name}, PyObject *value, void *Py_UNUSED(context)) """) - METH_O_PROTOTYPE: Final[str] = normalize_snippet(""" + METH_O_PROTOTYPE: Final[str] = libclinic.normalize_snippet(""" static PyObject * {c_basename}({impl_parameters}) """) - DOCSTRING_PROTOTYPE_VAR: Final[str] = normalize_snippet(""" + DOCSTRING_PROTOTYPE_VAR: Final[str] = libclinic.normalize_snippet(""" PyDoc_VAR({c_basename}__doc__); """) - DOCSTRING_PROTOTYPE_STRVAR: Final[str] = normalize_snippet(""" + DOCSTRING_PROTOTYPE_STRVAR: Final[str] = libclinic.normalize_snippet(""" PyDoc_STRVAR({c_basename}__doc__, {docstring}); """) - IMPL_DEFINITION_PROTOTYPE: Final[str] = normalize_snippet(""" + GETSET_DOCSTRING_PROTOTYPE_STRVAR: Final[str] = libclinic.normalize_snippet(""" + PyDoc_STRVAR({getset_basename}__doc__, + {docstring}); + #define {getset_basename}_HAS_DOCSTR + """) + IMPL_DEFINITION_PROTOTYPE: Final[str] = libclinic.normalize_snippet(""" static {impl_return_type} {c_basename}_impl({impl_parameters}) """) - METHODDEF_PROTOTYPE_DEFINE: Final[str] = normalize_snippet(r""" + METHODDEF_PROTOTYPE_DEFINE: Final[str] = libclinic.normalize_snippet(r""" #define {methoddef_name} \ {{"{name}", {methoddef_cast}{c_basename}{methoddef_cast_end}, {methoddef_flags}, {c_basename}__doc__}}, """) - GETTERDEF_PROTOTYPE_DEFINE: Final[str] = normalize_snippet(r""" + GETTERDEF_PROTOTYPE_DEFINE: Final[str] = libclinic.normalize_snippet(r""" + #if defined({getset_basename}_HAS_DOCSTR) + # define {getset_basename}_DOCSTR {getset_basename}__doc__ + #else + # define {getset_basename}_DOCSTR NULL + #endif #if defined({getset_name}_GETSETDEF) # undef {getset_name}_GETSETDEF - # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, (setter){getset_basename}_set, NULL}}, + # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, (setter){getset_basename}_set, {getset_basename}_DOCSTR}}, #else - # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, NULL, NULL}}, + # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, NULL, {getset_basename}_DOCSTR}}, #endif """) - SETTERDEF_PROTOTYPE_DEFINE: Final[str] = normalize_snippet(r""" + SETTERDEF_PROTOTYPE_DEFINE: Final[str] = libclinic.normalize_snippet(r""" + #if defined({getset_name}_HAS_DOCSTR) + # define {getset_basename}_DOCSTR {getset_basename}__doc__ + #else + # define {getset_basename}_DOCSTR NULL + #endif #if defined({getset_name}_GETSETDEF) # undef {getset_name}_GETSETDEF - # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, (setter){getset_basename}_set, NULL}}, + # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, (setter){getset_basename}_set, {getset_basename}_DOCSTR}}, #else # define {getset_name}_GETSETDEF {{"{name}", NULL, (setter){getset_basename}_set, NULL}}, #endif """) - METHODDEF_PROTOTYPE_IFNDEF: Final[str] = normalize_snippet(""" + METHODDEF_PROTOTYPE_IFNDEF: Final[str] = libclinic.normalize_snippet(""" #ifndef {methoddef_name} #define {methoddef_name} #endif /* !defined({methoddef_name}) */ @@ -956,9 +706,9 @@ def compiler_deprecated_warning( code = self.COMPILER_DEPRECATION_WARNING_PROTOTYPE.format( major=minversion[0], minor=minversion[1], - message=c_repr(message), + message=libclinic.c_repr(message), ) - return normalize_snippet(code) + return libclinic.normalize_snippet(code) def deprecate_positional_use( self, @@ -987,7 +737,7 @@ def deprecate_positional_use( params.values(), key=attrgetter("deprecated_positional") ): names = [repr(p.name) for p in group] - pstr = pprint_words(names) + pstr = libclinic.pprint_words(names) if len(names) == 1: message += ( f" Parameter {pstr} will become a keyword-only parameter " @@ -1006,10 +756,10 @@ def deprecate_positional_use( code = self.DEPRECATION_WARNING_PROTOTYPE.format( condition=condition, errcheck="", - message=wrapped_c_string_literal(message, width=64, - subsequent_indent=20), + message=libclinic.wrapped_c_string_literal(message, width=64, + subsequent_indent=20), ) - return normalize_snippet(code, indent=4) + return libclinic.normalize_snippet(code, indent=4) def deprecate_keyword_use( self, @@ -1056,7 +806,7 @@ def deprecate_keyword_use( else: condition = f"kwargs && PyDict_GET_SIZE(kwargs) && {condition}" names = [repr(p.name) for p in params.values()] - pstr = pprint_words(names) + pstr = libclinic.pprint_words(names) pl = 's' if len(params) != 1 else '' message = ( f"Passing keyword argument{pl} {pstr} to " @@ -1067,7 +817,7 @@ def deprecate_keyword_use( params.values(), key=attrgetter("deprecated_keyword") ): names = [repr(p.name) for p in group] - pstr = pprint_words(names) + pstr = libclinic.pprint_words(names) pl = 's' if len(names) != 1 else '' message += ( f" Parameter{pl} {pstr} will become positional-only " @@ -1089,30 +839,10 @@ def deprecate_keyword_use( code = self.DEPRECATION_WARNING_PROTOTYPE.format( condition=condition, errcheck=errcheck, - message=wrapped_c_string_literal(message, width=64, - subsequent_indent=20), + message=libclinic.wrapped_c_string_literal(message, width=64, + subsequent_indent=20), ) - return normalize_snippet(code, indent=4) - - def docstring_for_c_string( - self, - f: Function - ) -> str: - text, add, output = _text_accumulator() - # turn docstring into a properly quoted C string - for line in f.docstring.split('\n'): - add('"') - add(quoted_for_c_string(line)) - add('\\n"\n') - - if text[-2] == sig_end_marker: - # If we only have a signature, add the blank line that the - # __text_signature__ getter expects to be there. - add('"\\n"') - else: - text.pop() - add('"') - return ''.join(text) + return libclinic.normalize_snippet(code, indent=4) def output_templates( self, @@ -1142,7 +872,7 @@ def output_templates( and not f.critical_section) new_or_init = f.kind.new_or_init - vararg: int | str = NO_VARARG + vararg: int | str = self.NO_VARARG pos_only = min_pos = max_pos = min_kw_only = pseudo_args = 0 for i, p in enumerate(parameters, 1): if p.is_keyword_only(): @@ -1150,12 +880,12 @@ def output_templates( if not p.is_optional(): min_kw_only = i - max_pos elif p.is_vararg(): - if vararg != NO_VARARG: + if vararg != self.NO_VARARG: fail("Too many var args") pseudo_args += 1 vararg = i - 1 else: - if vararg == NO_VARARG: + if vararg == self.NO_VARARG: max_pos = i if p.is_positional_only(): pos_only = i @@ -1187,11 +917,17 @@ def output_templates( docstring_prototype = docstring_definition = '' elif f.kind is GETTER: methoddef_define = self.GETTERDEF_PROTOTYPE_DEFINE - docstring_prototype = docstring_definition = '' + if f.docstring: + docstring_prototype = '' + docstring_definition = self.GETSET_DOCSTRING_PROTOTYPE_STRVAR + else: + docstring_prototype = docstring_definition = '' elif f.kind is SETTER: + if f.docstring: + fail("docstrings are only supported for @getter, not @setter") return_value_declaration = "int {return_value};" methoddef_define = self.SETTERDEF_PROTOTYPE_DEFINE - docstring_prototype = docstring_prototype = docstring_definition = '' + docstring_prototype = docstring_definition = '' else: docstring_prototype = self.DOCSTRING_PROTOTYPE_VAR docstring_definition = self.DOCSTRING_PROTOTYPE_STRVAR @@ -1207,18 +943,18 @@ def parser_body( declarations: str = '' ) -> str: nonlocal parser_body_fields - add, output = text_accumulator() - add(prototype) + lines = [] + lines.append(prototype) parser_body_fields = fields - preamble = normalize_snippet(""" + preamble = libclinic.normalize_snippet(""" {{ {return_value_declaration} {parser_declarations} {declarations} {initializers} """) + "\n" - finale = normalize_snippet(""" + finale = libclinic.normalize_snippet(""" {modifications} {lock} {return_value} = {c_basename}_impl({impl_arguments}); @@ -1232,9 +968,8 @@ def parser_body( }} """) for field in preamble, *fields, finale: - add('\n') - add(field) - return linear_format(output(), parser_declarations=declarations) + lines.append(field) + return linear_format("\n".join(lines), parser_declarations=declarations) fastcall = not new_or_init limited_capi = clinic.limited_capi @@ -1271,7 +1006,7 @@ def parser_body( parser_prototype = self.PARSER_PROTOTYPE_DEF_CLASS return_error = ('return NULL;' if simple_return else 'goto exit;') - parser_code = [normalize_snippet(""" + parser_code = [libclinic.normalize_snippet(""" if (nargs) {{ PyErr_SetString(PyExc_TypeError, "{name}() takes no arguments"); %s @@ -1311,7 +1046,7 @@ def parser_body( argname = 'arg' if parameters[0].name == argname: argname += '_' - parser_prototype = normalize_snippet(""" + parser_prototype = libclinic.normalize_snippet(""" static PyObject * {c_basename}({self_type}{self_name}, PyObject *%s) """ % argname) @@ -1325,7 +1060,7 @@ def parser_body( }} """ % argname parser_definition = parser_body(parser_prototype, - normalize_snippet(parsearg, indent=4)) + libclinic.normalize_snippet(parsearg, indent=4)) elif has_option_groups: # positional parameters with option groups @@ -1359,15 +1094,16 @@ def parser_body( argname_fmt = 'PyTuple_GET_ITEM(args, %d)' left_args = f"{nargs} - {max_pos}" - max_args = NO_VARARG if (vararg != NO_VARARG) else max_pos + max_args = self.NO_VARARG if (vararg != self.NO_VARARG) else max_pos if limited_capi: parser_code = [] if nargs != 'nargs': - parser_code.append(normalize_snippet(f'Py_ssize_t nargs = {nargs};', indent=4)) + nargs_def = f'Py_ssize_t nargs = {nargs};' + parser_code.append(libclinic.normalize_snippet(nargs_def, indent=4)) nargs = 'nargs' if min_pos == max_args: pl = '' if min_pos == 1 else 's' - parser_code.append(normalize_snippet(f""" + parser_code.append(libclinic.normalize_snippet(f""" if ({nargs} != {min_pos}) {{{{ PyErr_Format(PyExc_TypeError, "{{name}} expected {min_pos} argument{pl}, got %zd", {nargs}); goto exit; @@ -1377,16 +1113,16 @@ def parser_body( else: if min_pos: pl = '' if min_pos == 1 else 's' - parser_code.append(normalize_snippet(f""" + parser_code.append(libclinic.normalize_snippet(f""" if ({nargs} < {min_pos}) {{{{ PyErr_Format(PyExc_TypeError, "{{name}} expected at least {min_pos} argument{pl}, got %zd", {nargs}); goto exit; }}}} """, indent=4)) - if max_args != NO_VARARG: + if max_args != self.NO_VARARG: pl = '' if max_args == 1 else 's' - parser_code.append(normalize_snippet(f""" + parser_code.append(libclinic.normalize_snippet(f""" if ({nargs} > {max_args}) {{{{ PyErr_Format(PyExc_TypeError, "{{name}} expected at most {max_args} argument{pl}, got %zd", {nargs}); goto exit; @@ -1396,7 +1132,7 @@ def parser_body( else: clinic.add_include('pycore_modsupport.h', '_PyArg_CheckPositional()') - parser_code = [normalize_snippet(f""" + parser_code = [libclinic.normalize_snippet(f""" if (!_PyArg_CheckPositional("{{name}}", {nargs}, {min_pos}, {max_args})) {{{{ goto exit; }}}} @@ -1406,7 +1142,7 @@ def parser_body( for i, p in enumerate(parameters): if p.is_vararg(): if fastcall: - parser_code.append(normalize_snippet(""" + parser_code.append(libclinic.normalize_snippet(""" %s = PyTuple_New(%s); if (!%s) {{ goto exit; @@ -1423,7 +1159,7 @@ def parser_body( max_pos ), indent=4)) else: - parser_code.append(normalize_snippet(""" + parser_code.append(libclinic.normalize_snippet(""" %s = PyTuple_GetSlice(%d, -1); """ % ( p.converter.parser_name, @@ -1439,12 +1175,12 @@ def parser_body( break if has_optional or p.is_optional(): has_optional = True - parser_code.append(normalize_snippet(""" + parser_code.append(libclinic.normalize_snippet(""" if (%s < %d) {{ goto skip_optional; }} """, indent=4) % (nargs, i + 1)) - parser_code.append(normalize_snippet(parsearg, indent=4)) + parser_code.append(libclinic.normalize_snippet(parsearg, indent=4)) if parser_code is not None: if has_optional: @@ -1455,7 +1191,7 @@ def parser_body( if fastcall: clinic.add_include('pycore_modsupport.h', '_PyArg_ParseStack()') - parser_code = [normalize_snippet(""" + parser_code = [libclinic.normalize_snippet(""" if (!_PyArg_ParseStack(args, nargs, "{format_units}:{name}", {parse_arguments})) {{ goto exit; @@ -1464,7 +1200,7 @@ def parser_body( else: flags = "METH_VARARGS" parser_prototype = self.PARSER_PROTOTYPE_VARARGS - parser_code = [normalize_snippet(""" + parser_code = [libclinic.normalize_snippet(""" if (!PyArg_ParseTuple(args, "{format_units}:{name}", {parse_arguments})) {{ goto exit; @@ -1481,13 +1217,16 @@ def parser_body( if p.deprecated_keyword: deprecated_keywords[i] = p - has_optional_kw = (max(pos_only, min_pos) + min_kw_only < len(converters) - int(vararg != NO_VARARG)) + has_optional_kw = ( + max(pos_only, min_pos) + min_kw_only + < len(converters) - int(vararg != self.NO_VARARG) + ) if limited_capi: parser_code = None fastcall = False else: - if vararg == NO_VARARG: + if vararg == self.NO_VARARG: clinic.add_include('pycore_modsupport.h', '_PyArg_UnpackKeywords()') args_declaration = "_PyArg_UnpackKeywords", "%s, %s, %s" % ( @@ -1516,7 +1255,7 @@ def parser_body( declarations += "\nPyObject *argsbuf[%s];" % len(converters) if has_optional_kw: declarations += "\nPy_ssize_t noptargs = %s + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - %d;" % (nargs, min_pos + min_kw_only) - parser_code = [normalize_snippet(""" + parser_code = [libclinic.normalize_snippet(""" args = %s(args, nargs, NULL, kwnames, &_parser, %s, argsbuf); if (!args) {{ goto exit; @@ -1534,7 +1273,7 @@ def parser_body( declarations += "\nPy_ssize_t nargs = PyTuple_GET_SIZE(args);" if has_optional_kw: declarations += "\nPy_ssize_t noptargs = %s + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - %d;" % (nargs, min_pos + min_kw_only) - parser_code = [normalize_snippet(""" + parser_code = [libclinic.normalize_snippet(""" fastargs = %s(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, %s, argsbuf); if (!fastargs) {{ goto exit; @@ -1567,19 +1306,19 @@ def parser_body( parser_code.append("%s:" % add_label) add_label = None if not p.is_optional(): - parser_code.append(normalize_snippet(parsearg, indent=4)) + parser_code.append(libclinic.normalize_snippet(parsearg, indent=4)) elif i < pos_only: add_label = 'skip_optional_posonly' - parser_code.append(normalize_snippet(""" + parser_code.append(libclinic.normalize_snippet(""" if (nargs < %d) {{ goto %s; }} """ % (i + 1, add_label), indent=4)) if has_optional_kw: - parser_code.append(normalize_snippet(""" + parser_code.append(libclinic.normalize_snippet(""" noptargs--; """, indent=4)) - parser_code.append(normalize_snippet(parsearg, indent=4)) + parser_code.append(libclinic.normalize_snippet(parsearg, indent=4)) else: if i < max_pos: label = 'skip_optional_pos' @@ -1587,24 +1326,24 @@ def parser_body( else: label = 'skip_optional_kwonly' first_opt = max_pos + min_kw_only - if vararg != NO_VARARG: + if vararg != self.NO_VARARG: first_opt += 1 if i == first_opt: add_label = label - parser_code.append(normalize_snippet(""" + parser_code.append(libclinic.normalize_snippet(""" if (!noptargs) {{ goto %s; }} """ % add_label, indent=4)) if i + 1 == len(parameters): - parser_code.append(normalize_snippet(parsearg, indent=4)) + parser_code.append(libclinic.normalize_snippet(parsearg, indent=4)) else: add_label = label - parser_code.append(normalize_snippet(""" + parser_code.append(libclinic.normalize_snippet(""" if (%s) {{ """ % (argname_fmt % i), indent=4)) - parser_code.append(normalize_snippet(parsearg, indent=8)) - parser_code.append(normalize_snippet(""" + parser_code.append(libclinic.normalize_snippet(parsearg, indent=8)) + parser_code.append(libclinic.normalize_snippet(""" if (!--noptargs) {{ goto %s; }} @@ -1623,7 +1362,7 @@ def parser_body( assert not fastcall flags = "METH_VARARGS|METH_KEYWORDS" parser_prototype = self.PARSER_PROTOTYPE_KEYWORD - parser_code = [normalize_snippet(""" + parser_code = [libclinic.normalize_snippet(""" if (!PyArg_ParseTupleAndKeywords(args, kwargs, "{format_units}:{name}", _keywords, {parse_arguments})) goto exit; @@ -1635,7 +1374,7 @@ def parser_body( elif fastcall: clinic.add_include('pycore_modsupport.h', '_PyArg_ParseStackAndKeywords()') - parser_code = [normalize_snippet(""" + parser_code = [libclinic.normalize_snippet(""" if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser{parse_arguments_comma} {parse_arguments})) {{ goto exit; @@ -1644,7 +1383,7 @@ def parser_body( else: clinic.add_include('pycore_modsupport.h', '_PyArg_ParseTupleAndKeywordsFast()') - parser_code = [normalize_snippet(""" + parser_code = [libclinic.normalize_snippet(""" if (!_PyArg_ParseTupleAndKeywordsFast(args, kwargs, &_parser, {parse_arguments})) {{ goto exit; @@ -1691,7 +1430,7 @@ def parser_body( declarations = '{base_type_ptr}' clinic.add_include('pycore_modsupport.h', '_PyArg_NoKeywords()') - fields.insert(0, normalize_snippet(""" + fields.insert(0, libclinic.normalize_snippet(""" if ({self_type_check}!_PyArg_NoKeywords("{name}", kwargs)) {{ goto exit; }} @@ -1699,7 +1438,7 @@ def parser_body( if not parses_positional: clinic.add_include('pycore_modsupport.h', '_PyArg_NoPositional()') - fields.insert(0, normalize_snippet(""" + fields.insert(0, libclinic.normalize_snippet(""" if ({self_type_check}!_PyArg_NoPositional("{name}", args)) {{ goto exit; }} @@ -1809,7 +1548,7 @@ def render_option_group_parsing( # Clinic prefers groups on the left. So in the above example, # five arguments would map to B+C, not C+D. - add, output = text_accumulator() + out = [] parameters = list(f.parameters.values()) if isinstance(parameters[0].converter, self_converter): del parameters[0] @@ -1841,14 +1580,14 @@ def render_option_group_parsing( nargs = 'PyTuple_Size(args)' else: nargs = 'PyTuple_GET_SIZE(args)' - add(f"switch ({nargs}) {{\n") + out.append(f"switch ({nargs}) {{\n") for subset in permute_optional_groups(left, required, right): count = len(subset) count_min = min(count_min, count) count_max = max(count_max, count) if count == 0: - add(""" case 0: + out.append(""" case 0: break; """) continue @@ -1880,14 +1619,15 @@ def render_option_group_parsing( """ s = linear_format(s, group_booleans=lines) s = s.format_map(d) - add(s) + out.append(s) - add(" default:\n") + out.append(" default:\n") s = ' PyErr_SetString(PyExc_TypeError, "{} requires {} to {} arguments");\n' - add(s.format(f.full_name, count_min, count_max)) - add(' goto exit;\n') - add("}") - template_dict['option_group_parsing'] = format_escape(output()) + out.append(s.format(f.full_name, count_min, count_max)) + out.append(' goto exit;\n') + out.append("}") + + template_dict['option_group_parsing'] = libclinic.format_escape("".join(out)) def render_function( self, @@ -1897,7 +1637,6 @@ def render_function( if f is None or clinic is None: return "" - add, output = text_accumulator() data = CRenderData() assert f.parameters, "We should always have a 'self' at this point!" @@ -1985,8 +1724,7 @@ def render_function( template_dict['methoddef_name'] = f.c_basename.upper() + "_METHODDEF" template_dict['c_basename'] = f.c_basename - template_dict['docstring'] = self.docstring_for_c_string(f) - + template_dict['docstring'] = libclinic.docstring_for_c_string(f.docstring) template_dict['self_name'] = template_dict['self_type'] = template_dict['self_type_check'] = '' template_dict['target_critical_section'] = ', '.join(f.target_critical_section) for converter in converters: @@ -1999,7 +1737,7 @@ def render_function( else: template_dict['impl_return_type'] = f.return_converter.type - template_dict['declarations'] = format_escape("\n".join(data.declarations)) + template_dict['declarations'] = libclinic.format_escape("\n".join(data.declarations)) template_dict['initializers'] = "\n\n".join(data.initializers) template_dict['modifications'] = '\n\n'.join(data.modifications) template_dict['keywords_c'] = ' '.join('"' + k + '",' @@ -2015,9 +1753,11 @@ def render_function( template_dict['parse_arguments_comma'] = ''; template_dict['impl_parameters'] = ", ".join(data.impl_parameters) template_dict['impl_arguments'] = ", ".join(data.impl_arguments) - template_dict['return_conversion'] = format_escape("".join(data.return_conversion).rstrip()) - template_dict['post_parsing'] = format_escape("".join(data.post_parsing).rstrip()) - template_dict['cleanup'] = format_escape("".join(data.cleanup)) + + template_dict['return_conversion'] = libclinic.format_escape("".join(data.return_conversion).rstrip()) + template_dict['post_parsing'] = libclinic.format_escape("".join(data.post_parsing).rstrip()) + template_dict['cleanup'] = libclinic.format_escape("".join(data.cleanup)) + template_dict['return_value'] = data.return_value template_dict['lock'] = "\n".join(data.lock) template_dict['unlock'] = "\n".join(data.unlock) @@ -2061,12 +1801,12 @@ def render_function( # mild hack: # reflow long impl declarations if name in {"impl_prototype", "impl_definition"}: - s = wrap_declarations(s) + s = libclinic.wrap_declarations(s) if clinic.line_prefix: - s = indent_all_lines(s, clinic.line_prefix) + s = libclinic.indent_all_lines(s, clinic.line_prefix) if clinic.line_suffix: - s = suffix_all_lines(s, clinic.line_suffix) + s = libclinic.suffix_all_lines(s, clinic.line_suffix) destination.append(s) @@ -2226,7 +1966,7 @@ def _line(self, lookahead: bool = False) -> str: return line def parse_verbatim_block(self) -> Block: - add, output = text_accumulator() + lines = [] self.block_start_line_number = self.line_number while self.input: @@ -2235,12 +1975,12 @@ def parse_verbatim_block(self) -> Block: if dsl_name: self.dsl_name = dsl_name break - add(line) + lines.append(line) - return Block(output()) + return Block("".join(lines)) def parse_clinic_block(self, dsl_name: str) -> Block: - input_add, input_output = text_accumulator() + in_lines = [] self.block_start_line_number = self.line_number + 1 stop_line = self.language.stop_line.format(dsl_name=dsl_name) body_prefix = self.language.body_prefix.format(dsl_name=dsl_name) @@ -2268,7 +2008,7 @@ def is_stop_line(line: str) -> bool: line = line.lstrip() assert line.startswith(body_prefix) line = line.removeprefix(body_prefix) - input_add(line) + in_lines.append(line) # consume output and checksum line, if present. if self.last_dsl_name == dsl_name: @@ -2282,7 +2022,7 @@ def is_stop_line(line: str) -> bool: assert checksum_re is not None # scan forward for checksum line - output_add, output_output = text_accumulator() + out_lines = [] arguments = None while self.input: line = self._line(lookahead=True) @@ -2290,12 +2030,12 @@ def is_stop_line(line: str) -> bool: arguments = match.group(1) if match else None if arguments: break - output_add(line) + out_lines.append(line) if self.is_start_line(line): break output: str | None - output = output_output() + output = "".join(out_lines) if arguments: d = {} for field in shlex.split(arguments): @@ -2323,7 +2063,7 @@ def is_stop_line(line: str) -> bool: self.input.extend(reversed(output_lines)) output = None - return Block(input_output(), dsl_name, output=output) + return Block("".join(in_lines), dsl_name, output=output) @dc.dataclass(slots=True, frozen=True) @@ -2351,6 +2091,9 @@ class BlockPrinter: language: Language f: io.StringIO = dc.field(default_factory=io.StringIO) + # '#include "header.h" // reason': column of '//' comment + INCLUDE_COMMENT_COLUMN: Final[int] = 35 + def print_block( self, block: Block, @@ -2406,7 +2149,7 @@ def print_block( line = f'#include "{include.filename}"' if include.reason: comment = f'// {include.reason}\n' - line = line.ljust(INCLUDE_COMMENT_COLUMN - 1) + comment + line = line.ljust(self.INCLUDE_COMMENT_COLUMN - 1) + comment output += line if current_condition: @@ -2443,26 +2186,26 @@ class BufferSeries: def __init__(self) -> None: self._start = 0 - self._array: list[_TextAccumulator] = [] - self._constructor = _text_accumulator + self._array: list[list[str]] = [] - def __getitem__(self, i: int) -> _TextAccumulator: + def __getitem__(self, i: int) -> list[str]: i -= self._start if i < 0: self._start += i - prefix = [self._constructor() for x in range(-i)] + prefix: list[list[str]] = [[] for x in range(-i)] self._array = prefix + self._array i = 0 while i >= len(self._array): - self._array.append(self._constructor()) + self._array.append([]) return self._array[i] def clear(self) -> None: for ta in self._array: - ta.text.clear() + ta.clear() def dump(self) -> str: - texts = [ta.output() for ta in self._array] + texts = ["".join(ta) for ta in self._array] + self.clear() return "".join(texts) @@ -2648,7 +2391,7 @@ def __init__( 'impl_definition': d('block'), } - DestBufferType = dict[str, _TextAccumulator] + DestBufferType = dict[str, list[str]] DestBufferList = list[DestBufferType] self.destination_buffers_stack: DestBufferList = [] @@ -2720,7 +2463,7 @@ def get_destination_buffer( self, name: str, item: int = 0 - ) -> _TextAccumulator: + ) -> list[str]: d = self.get_destination(name) return d.buffers[item] @@ -3174,11 +2917,9 @@ def get_displayname(self, i: int) -> str: return f'argument {i}' def render_docstring(self) -> str: - add, out = text_accumulator() - add(f" {self.name}\n") - for line in self.docstring.split("\n"): - add(f" {line}\n") - return out().rstrip() + lines = [f" {self.name}"] + lines.extend(f" {line}" for line in self.docstring.split("\n")) + return "\n".join(lines).rstrip() CConverterClassT = TypeVar("CConverterClassT", bound=type["CConverter"]) @@ -3496,7 +3237,7 @@ def parse_argument(self, args: list[str]) -> None: args.append(self.converter) if self.encoding: - args.append(c_repr(self.encoding)) + args.append(libclinic.c_repr(self.encoding)) elif self.subclass_of: args.append(self.subclass_of) @@ -3674,8 +3415,8 @@ def set_template_dict(self, template_dict: TemplateDict) -> None: @property def parser_name(self) -> str: - if self.name in CLINIC_PREFIXED_ARGS: # bpo-39741 - return CLINIC_PREFIX + self.name + if self.name in libclinic.CLINIC_PREFIXED_ARGS: # bpo-39741 + return libclinic.CLINIC_PREFIX + self.name else: return self.name @@ -5241,13 +4982,6 @@ def reset(self) -> None: self.critical_section = False self.target_critical_section = [] - def directive_version(self, required: str) -> None: - global version - if version_comparator(version, required) < 0: - fail("Insufficient Clinic version!\n" - f" Version: {version}\n" - f" Required: {required}") - def directive_module(self, name: str) -> None: fields = name.split('.')[:-1] module, cls = self.clinic._module_and_class(fields) @@ -5846,11 +5580,11 @@ def parse_parameter(self, line: str) -> None: parameter_name = parameter.arg name, legacy, kwargs = self.parse_converter(parameter.annotation) + value: object if not default: if self.parameter_state is ParamState.OPTIONAL: fail(f"Can't have a parameter without a default ({parameter_name!r}) " "after a parameter with a default!") - value: Sentinels | Null if is_vararg: value = NULL kwargs.setdefault('c_default', "NULL") @@ -5964,7 +5698,7 @@ def bad_node(self, node: ast.AST) -> None: if isinstance(value, (bool, NoneType)): c_default = "Py_" + py_default elif isinstance(value, str): - c_default = c_repr(value) + c_default = libclinic.c_repr(value) else: c_default = py_default @@ -6251,12 +5985,15 @@ def state_function_docstring(self, line: str) -> None: def format_docstring_signature( self, f: Function, parameters: list[Parameter] ) -> str: - text, add, output = _text_accumulator() - add(f.displayname) + lines = [] + lines.append(f.displayname) if self.forced_text_signature: - add(self.forced_text_signature) + lines.append(self.forced_text_signature) + elif f.kind in {GETTER, SETTER}: + # @getter and @setter do not need signatures like a method or a function. + return '' else: - add('(') + lines.append('(') # populate "right_bracket_count" field for every parameter assert parameters, "We should always have a self parameter. " + repr(f) @@ -6310,7 +6047,7 @@ def fix_right_bracket_count(desired: int) -> str: first_parameter = True last_p = parameters[-1] - line_length = len(''.join(text)) + line_length = len(''.join(lines)) indent = " " * line_length def add_parameter(text: str) -> None: nonlocal line_length @@ -6321,12 +6058,11 @@ def add_parameter(text: str) -> None: else: s = ' ' + text if line_length + len(s) >= 72: - add('\n') - add(indent) + lines.extend(["\n", indent]) line_length = len(indent) s = text line_length += len(s) - add(s) + lines.append(s) for p in parameters: if not p.converter.show_in_signature: @@ -6349,8 +6085,7 @@ def add_parameter(text: str) -> None: added_star = True add_parameter('*,') - p_add, p_output = text_accumulator() - p_add(fix_right_bracket_count(p.right_bracket_count)) + p_lines = [fix_right_bracket_count(p.right_bracket_count)] if isinstance(p.converter, self_converter): # annotate first parameter as being a "self". @@ -6368,30 +6103,31 @@ def add_parameter(text: str) -> None: # have a docstring.) if this is an __init__ # (or __new__), then this signature is for # calling the class to construct a new instance. - p_add('$') + p_lines.append('$') if p.is_vararg(): - p_add("*") + p_lines.append("*") name = p.converter.signature_name or p.name - p_add(name) + p_lines.append(name) if not p.is_vararg() and p.converter.is_optional(): - p_add('=') + p_lines.append('=') value = p.converter.py_default if not value: value = repr(p.converter.default) - p_add(value) + p_lines.append(value) if (p != last_p) or need_a_trailing_slash: - p_add(',') + p_lines.append(',') - add_parameter(p_output()) + p_output = "".join(p_lines) + add_parameter(p_output) - add(fix_right_bracket_count(0)) + lines.append(fix_right_bracket_count(0)) if need_a_trailing_slash: add_parameter('/') - add(')') + lines.append(')') # PEP 8 says: # @@ -6403,13 +6139,13 @@ def add_parameter(text: str) -> None: # therefore this is commented out: # # if f.return_converter.py_default: - # add(' -> ') - # add(f.return_converter.py_default) + # lines.append(' -> ') + # lines.append(f.return_converter.py_default) if not f.docstring_only: - add("\n" + sig_end_marker + "\n") + lines.append("\n" + libclinic.SIG_END_MARKER + "\n") - signature_line = output() + signature_line = "".join(lines) # now fix up the places where the brackets look wrong return signature_line.replace(', ]', ',] ') @@ -6417,18 +6153,13 @@ def add_parameter(text: str) -> None: @staticmethod def format_docstring_parameters(params: list[Parameter]) -> str: """Create substitution text for {parameters}""" - add, output = text_accumulator() - for p in params: - if p.docstring: - add(p.render_docstring()) - add('\n') - return output() + return "".join(p.render_docstring() + "\n" for p in params if p.docstring) def format_docstring(self) -> str: assert self.function is not None f = self.function - if f.kind.new_or_init and not f.docstring: - # don't render a docstring at all, no signature, nothing. + # For the following special cases, it does not make sense to render a docstring. + if f.kind in {METHOD_INIT, METHOD_NEW, GETTER, SETTER} and not f.docstring: return f.docstring # Enforce the summary line! @@ -6540,7 +6271,7 @@ def create_cli() -> argparse.ArgumentParser: with writing argument parsing code for builtins and providing introspection signatures ("docstrings") for CPython builtins. -For more information see https://docs.python.org/3/howto/clinic.html""") +For more information see https://devguide.python.org/development-tools/clinic/""") cmdline.add_argument("-f", "--force", action='store_true', help="force output regeneration") cmdline.add_argument("-o", "--output", type=str, diff --git a/Tools/clinic/libclinic/__init__.py b/Tools/clinic/libclinic/__init__.py new file mode 100644 index 00000000000000..0c3c6840901a42 --- /dev/null +++ b/Tools/clinic/libclinic/__init__.py @@ -0,0 +1,46 @@ +from typing import Final + +from .formatting import ( + SIG_END_MARKER, + c_repr, + docstring_for_c_string, + format_escape, + indent_all_lines, + normalize_snippet, + pprint_words, + suffix_all_lines, + wrap_declarations, + wrapped_c_string_literal, +) + + +__all__ = [ + # Formatting helpers + "SIG_END_MARKER", + "c_repr", + "docstring_for_c_string", + "format_escape", + "indent_all_lines", + "normalize_snippet", + "pprint_words", + "suffix_all_lines", + "wrap_declarations", + "wrapped_c_string_literal", +] + + +CLINIC_PREFIX: Final = "__clinic_" +CLINIC_PREFIXED_ARGS: Final = frozenset( + { + "_keywords", + "_parser", + "args", + "argsbuf", + "fastargs", + "kwargs", + "kwnames", + "nargs", + "noptargs", + "return_value", + } +) diff --git a/Tools/clinic/libclinic/formatting.py b/Tools/clinic/libclinic/formatting.py new file mode 100644 index 00000000000000..8b3ad7ba566bc8 --- /dev/null +++ b/Tools/clinic/libclinic/formatting.py @@ -0,0 +1,173 @@ +"""A collection of string formatting helpers.""" + +import functools +import textwrap +from typing import Final + + +SIG_END_MARKER: Final = "--" + + +def docstring_for_c_string(docstring: str) -> str: + lines = [] + # Turn docstring into a properly quoted C string. + for line in docstring.split("\n"): + lines.append('"') + lines.append(_quoted_for_c_string(line)) + lines.append('\\n"\n') + + if lines[-2] == SIG_END_MARKER: + # If we only have a signature, add the blank line that the + # __text_signature__ getter expects to be there. + lines.append('"\\n"') + else: + lines.pop() + lines.append('"') + return "".join(lines) + + +def _quoted_for_c_string(text: str) -> str: + """Helper for docstring_for_c_string().""" + for old, new in ( + ("\\", "\\\\"), # must be first! + ('"', '\\"'), + ("'", "\\'"), + ): + text = text.replace(old, new) + return text + + +def c_repr(text: str) -> str: + return '"' + text + '"' + + +def wrapped_c_string_literal( + text: str, + *, + width: int = 72, + suffix: str = "", + initial_indent: int = 0, + subsequent_indent: int = 4 +) -> str: + wrapped = textwrap.wrap( + text, + width=width, + replace_whitespace=False, + drop_whitespace=False, + break_on_hyphens=False, + ) + separator = c_repr(suffix + "\n" + subsequent_indent * " ") + return initial_indent * " " + c_repr(separator.join(wrapped)) + + +def _add_prefix_and_suffix(text: str, *, prefix: str = "", suffix: str = "") -> str: + """Return 'text' with 'prefix' prepended and 'suffix' appended to all lines. + + If the last line is empty, it remains unchanged. + If text is blank, return text unchanged. + + (textwrap.indent only adds to non-blank lines.) + """ + *split, last = text.split("\n") + lines = [prefix + line + suffix + "\n" for line in split] + if last: + lines.append(prefix + last + suffix) + return "".join(lines) + + +def indent_all_lines(text: str, prefix: str) -> str: + return _add_prefix_and_suffix(text, prefix=prefix) + + +def suffix_all_lines(text: str, suffix: str) -> str: + return _add_prefix_and_suffix(text, suffix=suffix) + + +def pprint_words(items: list[str]) -> str: + if len(items) <= 2: + return " and ".join(items) + return ", ".join(items[:-1]) + " and " + items[-1] + + +def _strip_leading_and_trailing_blank_lines(text: str) -> str: + lines = text.rstrip().split("\n") + while lines: + line = lines[0] + if line.strip(): + break + del lines[0] + return "\n".join(lines) + + +@functools.lru_cache() +def normalize_snippet(text: str, *, indent: int = 0) -> str: + """ + Reformats 'text': + * removes leading and trailing blank lines + * ensures that it does not end with a newline + * dedents so the first nonwhite character on any line is at column "indent" + """ + text = _strip_leading_and_trailing_blank_lines(text) + text = textwrap.dedent(text) + if indent: + text = textwrap.indent(text, " " * indent) + return text + + +def format_escape(text: str) -> str: + # double up curly-braces, this string will be used + # as part of a format_map() template later + text = text.replace("{", "{{") + text = text.replace("}", "}}") + return text + + +def wrap_declarations(text: str, length: int = 78) -> str: + """ + A simple-minded text wrapper for C function declarations. + + It views a declaration line as looking like this: + xxxxxxxx(xxxxxxxxx,xxxxxxxxx) + If called with length=30, it would wrap that line into + xxxxxxxx(xxxxxxxxx, + xxxxxxxxx) + (If the declaration has zero or one parameters, this + function won't wrap it.) + + If this doesn't work properly, it's probably better to + start from scratch with a more sophisticated algorithm, + rather than try and improve/debug this dumb little function. + """ + lines = [] + for line in text.split("\n"): + prefix, _, after_l_paren = line.partition("(") + if not after_l_paren: + lines.append(line) + continue + in_paren, _, after_r_paren = after_l_paren.partition(")") + if not _: + lines.append(line) + continue + if "," not in in_paren: + lines.append(line) + continue + parameters = [x.strip() + ", " for x in in_paren.split(",")] + prefix += "(" + if len(prefix) < length: + spaces = " " * len(prefix) + else: + spaces = " " * 4 + + while parameters: + line = prefix + first = True + while parameters: + if not first and (len(line) + len(parameters[0]) > length): + break + line += parameters.pop(0) + first = False + if not parameters: + line = line.rstrip(", ") + ")" + after_r_paren + lines.append(line.rstrip()) + prefix = spaces + return "\n".join(lines) diff --git a/Tools/freeze/README b/Tools/freeze/README index 9b3ea1f2c723b1..516077bc7daa89 100644 --- a/Tools/freeze/README +++ b/Tools/freeze/README @@ -218,6 +218,11 @@ source tree). It is possible to create frozen programs that don't have a console window, by specifying the option '-s windows'. See the Usage below. +Usage under macOS +----------------- + +On macOS the freeze tool is not supported for framework builds. + Usage ----- diff --git a/Tools/freeze/freeze.py b/Tools/freeze/freeze.py index bc5e43f4853deb..de9772732cdb5d 100755 --- a/Tools/freeze/freeze.py +++ b/Tools/freeze/freeze.py @@ -136,6 +136,11 @@ def main(): makefile = 'Makefile' subsystem = 'console' + if sys.platform == "darwin" and sysconfig.get_config_var("PYTHONFRAMEWORK"): + print(f"{sys.argv[0]} cannot be used with framework builds of Python", file=sys.stderr) + sys.exit(1) + + # parse command line by first replacing any "-i" options with the # file contents. pos = 1 diff --git a/Tools/requirements-dev.txt b/Tools/requirements-dev.txt index 3a2e62f70bbb60..b89f86a35d6115 100644 --- a/Tools/requirements-dev.txt +++ b/Tools/requirements-dev.txt @@ -1,6 +1,6 @@ # Requirements file for external linters and checks we run on # Tools/clinic, Tools/cases_generator/, and Tools/peg_generator/ in CI -mypy==1.7.1 +mypy==1.8.0 # needed for peg_generator: types-psutil==5.9.5.17