diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 9e190d43b28ef9..30937353f5ea7a 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -45,6 +45,8 @@ Python/bytecodes.c @markshannon
Python/optimizer*.c @markshannon
Python/optimizer_analysis.c @Fidget-Spinner
Python/optimizer_bytecodes.c @Fidget-Spinner
+Python/partial_evaluator.c @Fidget-Spinner
+Python/partial_evaluator_bytecodes.c @Fidget-Spinner
Python/symtable.c @JelleZijlstra @carljm
Lib/_pyrepl/* @pablogsal @lysnikolaou @ambv
Lib/test/test_patma.py @brandtbucher
diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h
index 6d70b42f708854..d64562398a20e2 100644
--- a/Include/internal/pycore_optimizer.h
+++ b/Include/internal/pycore_optimizer.h
@@ -48,8 +48,9 @@ typedef struct {
* uint16_t error_target;
*/
typedef struct {
- uint16_t opcode:15;
+ uint16_t opcode:14;
uint16_t format:1;
+ uint16_t is_virtual:1; // Used for tier2 optimization.
uint16_t oparg;
union {
uint32_t target;
@@ -147,6 +148,15 @@ int _Py_uop_analyze_and_optimize(struct _PyInterpreterFrame *frame,
_PyUOpInstruction *trace, int trace_len, int curr_stackentries,
_PyBloomFilter *dependencies);
+int
+_Py_uop_partial_evaluate(
+ _PyInterpreterFrame *frame,
+ _PyUOpInstruction *buffer,
+ int length,
+ int curr_stacklen,
+ _PyBloomFilter *dependencies
+);
+
extern PyTypeObject _PyCounterExecutor_Type;
extern PyTypeObject _PyCounterOptimizer_Type;
extern PyTypeObject _PyDefaultOptimizer_Type;
@@ -156,6 +166,8 @@ extern PyTypeObject _PyUOpOptimizer_Type;
/* Symbols */
/* See explanation in optimizer_symbols.c */
+// Specializer.
+
struct _Py_UopsSymbol {
int flags; // 0 bits: Top; 2 or more bits: Bottom
PyTypeObject *typ; // Borrowed reference
@@ -285,6 +297,88 @@ static inline int is_terminator(const _PyUOpInstruction *uop)
);
}
+// Partial evaluator.
+struct _Py_UopsPESymbol {
+ int flags; // 0 bits: Top; 2 or more bits: Bottom
+ PyObject *const_val; // Owned reference (!)
+};
+
+typedef struct _Py_UopsPESymbol _Py_UopsPESymbol;
+
+typedef struct _Py_UopsPESlot {
+ _Py_UopsPESymbol *sym;
+ _PyUOpInstruction *origin_inst; // The instruction this symbol originates from.
+} _Py_UopsPESlot;
+
+struct _Py_UOpsPEAbstractFrame {
+ // Max stacklen
+ int stack_len;
+ int locals_len;
+
+ _Py_UopsPESlot *stack_pointer;
+ _Py_UopsPESlot *stack;
+ _Py_UopsPESlot *locals;
+};
+
+typedef struct _Py_UOpsPEAbstractFrame _Py_UOpsPEAbstractFrame;
+
+typedef struct pe_arena {
+ int sym_curr_number;
+ int sym_max_number;
+ _Py_UopsPESymbol arena[TY_ARENA_SIZE];
+} pe_arena;
+
+struct _Py_UOpsPEContext {
+ char done;
+ char out_of_space;
+ bool contradiction;
+ // The current "executing" frame.
+ _Py_UOpsPEAbstractFrame *frame;
+ _Py_UOpsPEAbstractFrame frames[MAX_ABSTRACT_FRAME_DEPTH];
+ int curr_frame_depth;
+
+ // Arena for the symbolic information.
+ pe_arena sym_arena;
+
+ _Py_UopsPESlot *n_consumed;
+ _Py_UopsPESlot *limit;
+ _Py_UopsPESlot locals_and_stack[MAX_ABSTRACT_INTERP_SIZE];
+};
+
+typedef struct _Py_UOpsPEContext _Py_UOpsPEContext;
+
+extern bool _Py_uop_pe_sym_is_null(_Py_UopsPESlot *sym);
+extern bool _Py_uop_pe_sym_is_not_null(_Py_UopsPESlot *sym);
+extern bool _Py_uop_pe_sym_is_const(_Py_UopsPESlot *sym);
+extern PyObject *_Py_uop_pe_sym_get_const(_Py_UopsPESlot *sym);
+extern _Py_UopsPESlot _Py_uop_pe_sym_new_unknown(_Py_UOpsPEContext *ctx);
+extern _Py_UopsPESlot _Py_uop_pe_sym_new_not_null(_Py_UOpsPEContext *ctx);
+extern _Py_UopsPESlot _Py_uop_pe_sym_new_const(_Py_UOpsPEContext *ctx, PyObject *const_val);
+extern _Py_UopsPESlot _Py_uop_pe_sym_new_null(_Py_UOpsPEContext *ctx);
+extern void _Py_uop_pe_sym_set_null(_Py_UOpsPEContext *ctx, _Py_UopsPESlot *sym);
+extern void _Py_uop_pe_sym_set_non_null(_Py_UOpsPEContext *ctx, _Py_UopsPESlot *sym);
+extern void _Py_uop_pe_sym_set_const(_Py_UOpsPEContext *ctx, _Py_UopsPESlot *sym, PyObject *const_val);
+extern bool _Py_uop_pe_sym_is_bottom(_Py_UopsPESlot *sym);
+extern int _Py_uop_pe_sym_truthiness(_Py_UopsPESlot *sym);
+extern void _Py_uop_sym_set_origin_inst_override(_Py_UopsPESlot *sym, _PyUOpInstruction *origin);
+extern _PyUOpInstruction *_Py_uop_sym_get_origin(_Py_UopsPESlot *sym);
+extern bool _Py_uop_sym_is_virtual(_Py_UopsPESlot *sym);
+
+
+extern _Py_UOpsPEAbstractFrame *
+_Py_uop_pe_frame_new(
+ _Py_UOpsPEContext *ctx,
+ PyCodeObject *co,
+ int curr_stackentries,
+ _Py_UopsPESlot *args,
+ int arg_len);
+
+int _Py_uop_pe_frame_pop(_Py_UOpsPEContext *ctx);
+
+extern void _Py_uop_pe_abstractcontext_init(_Py_UOpsPEContext *ctx);
+extern void _Py_uop_pe_abstractcontext_fini(_Py_UOpsPEContext *ctx);
+
+
#ifdef __cplusplus
}
#endif
diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py
index 4cf9b66170c055..696c318c4d91aa 100644
--- a/Lib/test/test_capi/test_opt.py
+++ b/Lib/test/test_capi/test_opt.py
@@ -1482,6 +1482,33 @@ def fn(a):
fn(A())
+ def test_pe_load_fast_pop_top(self):
+ def thing(a):
+ x = 0
+ for i in range(TIER2_THRESHOLD):
+ i
+ return i
+
+
+ res, ex = self._run_with_optimizer(thing, 1)
+ self.assertEqual(res, 4095)
+ self.assertIsNotNone(ex)
+ self.assertEqual(list(iter_opnames(ex)).count("_POP_TOP"), 0)
+ self.assertTrue(ex.is_valid())
+
+ def test_pe_dead_store_elimination(self):
+ def thing(a):
+ x = 0
+ for i in range(TIER2_THRESHOLD):
+ x = x
+ return i
+
+
+ res, ex = self._run_with_optimizer(thing, 1)
+ self.assertEqual(res, 4095)
+ self.assertIsNotNone(ex)
+ self.assertEqual(list(iter_opnames(ex)).count("_LOAD_FAST_1"), 0)
+ self.assertTrue(ex.is_valid())
def test_func_guards_removed_or_reduced(self):
def testfunc(n):
for i in range(n):
diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py
index 66862ec17cca98..8fe3b07cc32c7e 100644
--- a/Lib/test/test_generated_cases.py
+++ b/Lib/test/test_generated_cases.py
@@ -38,6 +38,7 @@ def skip_if_different_mount_drives():
import tier1_generator
import opcode_metadata_generator
import optimizer_generator
+ import partial_evaluator_generator
def handle_stderr():
@@ -1641,6 +1642,9 @@ def test_escaping_call_next_to_cmacro(self):
class TestGeneratedAbstractCases(unittest.TestCase):
+
+ generator = None
+
def setUp(self) -> None:
super().setUp()
self.maxDiff = None
@@ -1676,7 +1680,8 @@ def run_cases_test(self, input: str, input2: str, expected: str):
temp_input.flush()
with handle_stderr():
- optimizer_generator.generate_tier2_abstract_from_files(
+ assert self.generator is not None
+ self.generator.generate_tier2_abstract_from_files(
[self.temp_input_filename, self.temp_input2_filename],
self.temp_output_filename
)
@@ -1690,6 +1695,9 @@ def run_cases_test(self, input: str, input2: str, expected: str):
actual = "".join(lines)
self.assertEqual(actual.strip(), expected.strip())
+
+class TestGeneratedOptimizerCases(TestGeneratedAbstractCases):
+ generator = optimizer_generator
def test_overridden_abstract(self):
input = """
pure op(OP, (--)) {
@@ -1790,5 +1798,166 @@ def test_missing_override_failure(self):
self.run_cases_test(input, input2, output)
+class TestGeneratedPECases(TestGeneratedAbstractCases):
+ generator = partial_evaluator_generator
+
+ def test_overridden_abstract(self):
+ input = """
+ pure op(OP, (--)) {
+ SPAM();
+ }
+ """
+ input2 = """
+ pure op(OP, (--)) {
+ eggs();
+ }
+ """
+ output = """
+ case OP: {
+ eggs();
+ break;
+ }
+ """
+ self.run_cases_test(input, input2, output)
+
+ def test_overridden_abstract_args(self):
+ input = """
+ pure op(OP, (arg1 -- out)) {
+ out = SPAM(arg1);
+ }
+ op(OP2, (arg1 -- out)) {
+ out = EGGS(arg1);
+ }
+ """
+ input2 = """
+ op(OP, (arg1 -- out)) {
+ out = EGGS(arg1);
+ }
+ """
+ output = """
+ case OP: {
+ _Py_UopsPESlot arg1;
+ _Py_UopsPESlot out;
+ arg1 = stack_pointer[-1];
+ arg1 = stack_pointer[-1];
+ out = EGGS(arg1);
+ stack_pointer[-1] = out;
+ break;
+ }
+
+ case OP2: {
+ _Py_UopsPESlot arg1;
+ _Py_UopsPESlot out;
+ MATERIALIZE_INST();
+ arg1 = stack_pointer[-1];
+ materialize(&arg1);
+ out = sym_new_not_null(ctx);
+ stack_pointer[-1] = out;
+ break;
+ }
+ """
+ self.run_cases_test(input, input2, output)
+
+ def test_no_overridden_case(self):
+ input = """
+ pure op(OP, (arg1 -- out)) {
+ out = SPAM(arg1);
+ }
+
+ pure op(OP2, (arg1 -- out)) {
+ }
+
+ """
+ input2 = """
+ pure op(OP2, (arg1 -- out)) {
+ out = NULL;
+ }
+ """
+ output = """
+ case OP: {
+ _Py_UopsPESlot arg1;
+ _Py_UopsPESlot out;
+ MATERIALIZE_INST();
+ arg1 = stack_pointer[-1];
+ materialize(&arg1);
+ out = sym_new_not_null(ctx);
+ stack_pointer[-1] = out;
+ break;
+ }
+
+ case OP2: {
+ _Py_UopsPESlot arg1;
+ _Py_UopsPESlot out;
+ arg1 = stack_pointer[-1];
+ out = NULL;
+ stack_pointer[-1] = out;
+ break;
+ }
+ """
+ self.run_cases_test(input, input2, output)
+
+ def test_missing_override_failure(self):
+ input = """
+ pure op(OP, (arg1 -- out)) {
+ SPAM();
+ }
+ """
+ input2 = """
+ pure op(OTHER, (arg1 -- out)) {
+ }
+ """
+ output = """
+ """
+ with self.assertRaisesRegex(AssertionError, "All abstract uops"):
+ self.run_cases_test(input, input2, output)
+
+
+ def test_validate_inputs(self):
+ input = """
+ pure op(OP, (arg1 --)) {
+ SPAM();
+ }
+ """
+ input2 = """
+ // Non-matching input!
+ pure op(OP, (arg1, arg2 --)) {
+ }
+ """
+ output = """
+ """
+ with self.assertRaisesRegex(AssertionError, "input length don't match"):
+ self.run_cases_test(input, input2, output)
+
+ def test_materialize_inputs(self):
+ input = """
+ pure op(OP2, (arg1, arg2, arg3[oparg] --)) {
+ }
+ """
+ input2 = """
+ pure op(OP2, (arg1, arg2, arg3[oparg] --)) {
+ MATERIALIZE_INPUTS();
+ }
+ """
+ output = """
+ case OP2: {
+ _Py_UopsPESlot *arg3;
+ _Py_UopsPESlot arg2;
+ _Py_UopsPESlot arg1;
+ arg3 = &stack_pointer[-2 - oparg];
+ arg2 = stack_pointer[-2];
+ arg1 = stack_pointer[-1];
+ materialize(&arg1);
+ materialize(&arg2);
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&arg3[_i]);
+ }
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+ """
+ self.run_cases_test(input, input2, output)
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Makefile.pre.in b/Makefile.pre.in
index dd8a3ab82eacd2..fd97c855a8834e 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -474,6 +474,8 @@ PYTHON_OBJS= \
Python/optimizer_analysis.o \
Python/optimizer_symbols.o \
Python/parking_lot.o \
+ Python/partial_evaluator.o \
+ Python/partial_evaluator_symbols.o \
Python/pathconfig.o \
Python/preconfig.o \
Python/pyarena.o \
@@ -1985,7 +1987,7 @@ Objects/mimalloc/page.o: $(srcdir)/Objects/mimalloc/page-queue.c
regen-cases: \
regen-opcode-ids regen-opcode-targets regen-uop-ids regen-opcode-metadata-py \
regen-generated-cases regen-executor-cases regen-optimizer-cases \
- regen-opcode-metadata regen-uop-metadata
+ regen-partial-evaluator-cases regen-opcode-metadata regen-uop-metadata
.PHONY: regen-opcode-ids
regen-opcode-ids:
@@ -2031,6 +2033,15 @@ regen-optimizer-cases:
$(srcdir)/Python/bytecodes.c
$(UPDATE_FILE) $(srcdir)/Python/optimizer_cases.c.h $(srcdir)/Python/optimizer_cases.c.h.new
+.PHONY: regen-partial-evaluator-cases
+regen-partial-evaluator-cases:
+ $(PYTHON_FOR_REGEN) $(srcdir)/Tools/cases_generator/partial_evaluator_generator.py \
+ -o $(srcdir)/Python/partial_evaluator_cases.c.h.new \
+ $(srcdir)/Python/partial_evaluator_bytecodes.c \
+ $(srcdir)/Python/bytecodes.c
+ $(UPDATE_FILE) $(srcdir)/Python/partial_evaluator_cases.c.h $(srcdir)/Python/partial_evaluator_cases.c.h.new
+
+
.PHONY: regen-opcode-metadata
regen-opcode-metadata:
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/cases_generator/opcode_metadata_generator.py \
@@ -2068,7 +2079,8 @@ Python/optimizer.o: \
Python/optimizer_analysis.o: \
$(srcdir)/Include/internal/pycore_opcode_metadata.h \
$(srcdir)/Include/internal/pycore_optimizer.h \
- $(srcdir)/Python/optimizer_cases.c.h
+ $(srcdir)/Python/optimizer_cases.c.h \
+ $(srcdir)/Python/partial_evaluator_cases.c.h
Python/frozen.o: $(FROZEN_FILES_OUT)
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-30-03-24-58.gh-issue-120619.u8o8oB.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-30-03-24-58.gh-issue-120619.u8o8oB.rst
new file mode 100644
index 00000000000000..21f7baf43e2c99
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-30-03-24-58.gh-issue-120619.u8o8oB.rst
@@ -0,0 +1 @@
+Set up a JIT optimizer partial evaluation pass. Patch by Ken Jin.
diff --git a/PCbuild/_freeze_module.vcxproj b/PCbuild/_freeze_module.vcxproj
index 51b493f8a84c6f..b61025ca03e4ad 100644
--- a/PCbuild/_freeze_module.vcxproj
+++ b/PCbuild/_freeze_module.vcxproj
@@ -240,6 +240,8 @@
+
+
diff --git a/PCbuild/_freeze_module.vcxproj.filters b/PCbuild/_freeze_module.vcxproj.filters
index 3842f52e514bb4..4d7485e4edfe32 100644
--- a/PCbuild/_freeze_module.vcxproj.filters
+++ b/PCbuild/_freeze_module.vcxproj.filters
@@ -331,6 +331,12 @@
Source Files
+
+ Source Files
+
+
+ Source Files
+
Source Files
diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj
index 9ebf58ae8a9bc4..8695cec18295ce 100644
--- a/PCbuild/pythoncore.vcxproj
+++ b/PCbuild/pythoncore.vcxproj
@@ -629,6 +629,8 @@
+
+
diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters
index 6c76a6ab592a84..dbefef95428de0 100644
--- a/PCbuild/pythoncore.vcxproj.filters
+++ b/PCbuild/pythoncore.vcxproj.filters
@@ -1421,6 +1421,12 @@
Python
+
+ Python
+
+
+ Python
+
Python
diff --git a/PCbuild/regen.targets b/PCbuild/regen.targets
index 416241d9d0df10..8ea0ea1f8e3717 100644
--- a/PCbuild/regen.targets
+++ b/PCbuild/regen.targets
@@ -104,6 +104,8 @@
WorkingDirectory="$(PySourcePath)" />
+
= 2) {
printf("SIDE EXIT: [UOp ");
_PyUOpPrint(&next_uop[-1]);
- printf(", exit %u, temp %d, target %d -> %s]\n",
+ printf(", exit %ld, temp %d, target %d -> %s]\n",
exit - current_executor->exits, exit->temperature.value_and_backoff,
(int)(target - _PyFrame_GetBytecode(frame)),
_PyOpcode_OpName[target->op.code]);
@@ -4977,7 +4977,7 @@ dummy_func(
if (lltrace >= 2) {
printf("DYNAMIC EXIT: [UOp ");
_PyUOpPrint(&next_uop[-1]);
- printf(", exit %u, temp %d, target %d -> %s]\n",
+ printf(", exit %ld, temp %d, target %d -> %s]\n",
exit - current_executor->exits, exit->temperature.value_and_backoff,
(int)(target - _PyFrame_GetBytecode(frame)),
_PyOpcode_OpName[target->op.code]);
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 987ff2e6419669..565d0121ec93e0 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -5757,7 +5757,7 @@
_PyFrame_SetStackPointer(frame, stack_pointer);
printf("SIDE EXIT: [UOp ");
_PyUOpPrint(&next_uop[-1]);
- printf(", exit %u, temp %d, target %d -> %s]\n",
+ printf(", exit %ld, temp %d, target %d -> %s]\n",
exit - current_executor->exits, exit->temperature.value_and_backoff,
(int)(target - _PyFrame_GetBytecode(frame)),
_PyOpcode_OpName[target->op.code]);
@@ -5946,7 +5946,7 @@
_PyFrame_SetStackPointer(frame, stack_pointer);
printf("DYNAMIC EXIT: [UOp ");
_PyUOpPrint(&next_uop[-1]);
- printf(", exit %u, temp %d, target %d -> %s]\n",
+ printf(", exit %ld, temp %d, target %d -> %s]\n",
exit - current_executor->exits, exit->temperature.value_and_backoff,
(int)(target - _PyFrame_GetBytecode(frame)),
_PyOpcode_OpName[target->op.code]);
diff --git a/Python/optimizer.c b/Python/optimizer.c
index 6a232218981dcd..091a2be8f5e21e 100644
--- a/Python/optimizer.c
+++ b/Python/optimizer.c
@@ -1263,6 +1263,10 @@ uop_optimize(
if (length <= 0) {
return length;
}
+ length = _Py_uop_partial_evaluate(frame, buffer, length, curr_stackentries, &dependencies);
+ if (length <= 0) {
+ return length;
+ }
}
assert(length < UOP_MAX_TRACE_LENGTH);
assert(length >= 1);
diff --git a/Python/optimizer_symbols.c b/Python/optimizer_symbols.c
index 40cbf95e3d6d39..78b83737a2bd5d 100644
--- a/Python/optimizer_symbols.c
+++ b/Python/optimizer_symbols.c
@@ -55,7 +55,7 @@ static _Py_UopsSymbol NO_SPACE_SYMBOL = {
.type_version = 0,
};
-_Py_UopsSymbol *
+static _Py_UopsSymbol *
out_of_space(_Py_UOpsContext *ctx)
{
ctx->done = true;
diff --git a/Python/partial_evaluator.c b/Python/partial_evaluator.c
new file mode 100644
index 00000000000000..89cf2a43ceeaa5
--- /dev/null
+++ b/Python/partial_evaluator.c
@@ -0,0 +1,313 @@
+#ifdef _Py_TIER2
+
+/*
+ * This file contains the support code for CPython's partial evaluator.
+ * It performs an abstract interpretation[1] over the trace of uops.
+ * Using the information gained, it chooses to emit, or skip certain instructions
+ * if possible.
+ *
+ * [1] For information on abstract interpertation and partial evaluation, please see
+ * https://en.wikipedia.org/wiki/Abstract_interpretation
+ * https://en.wikipedia.org/wiki/Partial_evaluation
+ *
+ * */
+#include "Python.h"
+#include "opcode.h"
+#include "pycore_dict.h"
+#include "pycore_interp.h"
+#include "pycore_opcode_metadata.h"
+#include "pycore_opcode_utils.h"
+#include "pycore_pystate.h" // _PyInterpreterState_GET()
+#include "pycore_uop_metadata.h"
+#include "pycore_dict.h"
+#include "pycore_long.h"
+#include "pycore_optimizer.h"
+#include "pycore_object.h"
+#include "pycore_dict.h"
+#include "pycore_function.h"
+#include "pycore_uop_metadata.h"
+#include "pycore_uop_ids.h"
+#include "pycore_range.h"
+
+#include
+#include
+#include
+#include
+
+#ifdef Py_DEBUG
+ extern const char *_PyUOpName(int index);
+ extern void _PyUOpPrint(const _PyUOpInstruction *uop);
+ static const char *const DEBUG_ENV = "PYTHON_OPT_DEBUG";
+ static inline int get_lltrace(void) {
+ char *uop_debug = Py_GETENV(DEBUG_ENV);
+ int lltrace = 0;
+ if (uop_debug != NULL && *uop_debug >= '0') {
+ lltrace = *uop_debug - '0'; // TODO: Parse an int and all that
+ }
+ return lltrace;
+ }
+ #define DPRINTF(level, ...) \
+ if (get_lltrace() >= (level)) { printf(__VA_ARGS__); }
+#else
+ #define DPRINTF(level, ...)
+#endif
+
+#define STACK_LEVEL() ((int)(stack_pointer - ctx->frame->stack))
+#define STACK_SIZE() ((int)(ctx->frame->stack_len))
+
+#define WITHIN_STACK_BOUNDS() \
+ (STACK_LEVEL() >= 0 && STACK_LEVEL() <= STACK_SIZE())
+
+
+#define GETLOCAL(idx) ((ctx->frame->locals[idx]))
+
+
+/* _PUSH_FRAME/_RETURN_VALUE's operand can be 0, a PyFunctionObject *, or a
+ * PyCodeObject *. Retrieve the code object if possible.
+ */
+static PyCodeObject *
+get_code(_PyUOpInstruction *op)
+{
+ assert(op->opcode == _PUSH_FRAME || op->opcode == _RETURN_VALUE || op->opcode == _RETURN_GENERATOR);
+ PyCodeObject *co = NULL;
+ uint64_t operand = op->operand0;
+ if (operand == 0) {
+ return NULL;
+ }
+ if (operand & 1) {
+ co = (PyCodeObject *)(operand & ~1);
+ }
+ else {
+ PyFunctionObject *func = (PyFunctionObject *)operand;
+ assert(PyFunction_Check(func));
+ co = (PyCodeObject *)func->func_code;
+ }
+ assert(PyCode_Check(co));
+ return co;
+}
+
+static PyCodeObject *
+get_code_with_logging(_PyUOpInstruction *op)
+{
+ PyCodeObject *co = NULL;
+ uint64_t push_operand = op->operand0;
+ if (push_operand & 1) {
+ co = (PyCodeObject *)(push_operand & ~1);
+ DPRINTF(3, "code=%p ", co);
+ assert(PyCode_Check(co));
+ }
+ else {
+ PyFunctionObject *func = (PyFunctionObject *)push_operand;
+ DPRINTF(3, "func=%p ", func);
+ if (func == NULL) {
+ DPRINTF(3, "\n");
+ DPRINTF(1, "Missing function\n");
+ return NULL;
+ }
+ co = (PyCodeObject *)func->func_code;
+ DPRINTF(3, "code=%p ", co);
+ }
+ return co;
+}
+
+#define sym_is_not_null _Py_uop_pe_sym_is_not_null
+#define sym_is_const _Py_uop_pe_sym_is_const
+#define sym_get_const _Py_uop_pe_sym_get_const
+#define sym_new_unknown _Py_uop_pe_sym_new_unknown
+#define sym_new_not_null _Py_uop_pe_sym_new_not_null
+#define sym_is_null _Py_uop_pe_sym_is_null
+#define sym_new_const _Py_uop_pe_sym_new_const
+#define sym_new_null _Py_uop_pe_sym_new_null
+#define sym_set_null(SYM) _Py_uop_pe_sym_set_null(ctx, SYM)
+#define sym_set_non_null(SYM) _Py_uop_pe_sym_set_non_null(ctx, SYM)
+#define sym_set_const(SYM, CNST) _Py_uop_pe_sym_set_const(ctx, SYM, CNST)
+#define sym_is_bottom _Py_uop_pe_sym_is_bottom
+#define frame_new _Py_uop_pe_frame_new
+#define frame_pop _Py_uop_pe_frame_pop
+
+#define MATERIALIZE_INST() (this_instr->is_virtual = false)
+#define sym_set_origin_inst_override _Py_uop_sym_set_origin_inst_override
+#define sym_is_virtual _Py_uop_sym_is_virtual
+#define sym_get_origin _Py_uop_sym_get_origin
+
+static void
+materialize(_Py_UopsPESlot *slot)
+{
+ assert(slot != NULL);
+ if (slot->origin_inst) {
+ slot->origin_inst->is_virtual = false;
+ }
+}
+
+static void
+materialize_stack(_Py_UopsPESlot *stack_start, _Py_UopsPESlot *stack_end)
+{
+ while (stack_start < stack_end) {
+ materialize(stack_start);
+ stack_start++;
+ }
+}
+
+static void
+materialize_frame(_Py_UOpsPEAbstractFrame *frame)
+{
+ materialize_stack(frame->stack, frame->stack_pointer);
+}
+
+static void
+materialize_ctx(_Py_UOpsPEContext *ctx)
+{
+ for (int i = 0; i < ctx->curr_frame_depth; i++) {
+ materialize_frame(&ctx->frames[i]);
+ }
+}
+
+/* 1 for success, 0 for not ready, cannot error at the moment. */
+static int
+partial_evaluate_uops(
+ PyCodeObject *co,
+ _PyUOpInstruction *trace,
+ int trace_len,
+ int curr_stacklen,
+ _PyBloomFilter *dependencies
+)
+{
+ _PyUOpInstruction trace_dest[UOP_MAX_TRACE_LENGTH];
+ _Py_UOpsPEContext context;
+ _Py_UOpsPEContext *ctx = &context;
+ uint32_t opcode = UINT16_MAX;
+ int curr_space = 0;
+ int max_space = 0;
+ _PyUOpInstruction *first_valid_check_stack = NULL;
+ _PyUOpInstruction *corresponding_check_stack = NULL;
+
+ _Py_uop_pe_abstractcontext_init(ctx);
+ _Py_UOpsPEAbstractFrame *frame = _Py_uop_pe_frame_new(ctx, co, curr_stacklen, NULL, 0);
+ if (frame == NULL) {
+ return -1;
+ }
+ ctx->curr_frame_depth++;
+ ctx->frame = frame;
+ ctx->done = false;
+ ctx->out_of_space = false;
+ ctx->contradiction = false;
+
+ for (int i = 0; i < trace_len; i++) {
+ // The key part of PE --- we assume everything starts off virtual.
+ trace_dest[i] = trace[i];
+ trace_dest[i].is_virtual = true;
+ }
+
+ _PyUOpInstruction *this_instr = NULL;
+ int i = 0;
+ for (; !ctx->done; i++) {
+ assert(i < trace_len);
+ this_instr = &trace_dest[i];
+
+ int oparg = this_instr->oparg;
+ opcode = this_instr->opcode;
+ _Py_UopsPESlot *stack_pointer = ctx->frame->stack_pointer;
+
+#ifdef Py_DEBUG
+ if (get_lltrace() >= 3) {
+ printf("%4d pe: ", (int)(this_instr - trace_dest));
+ _PyUOpPrint(this_instr);
+ printf(" ");
+ }
+#endif
+
+ switch (opcode) {
+
+#include "partial_evaluator_cases.c.h"
+
+ default:
+ DPRINTF(1, "\nUnknown opcode in pe's abstract interpreter\n");
+ Py_UNREACHABLE();
+ }
+ assert(ctx->frame != NULL);
+ DPRINTF(3, " stack_level %d\n", STACK_LEVEL());
+ ctx->frame->stack_pointer = stack_pointer;
+ assert(STACK_LEVEL() >= 0);
+ if (ctx->done) {
+ break;
+ }
+ }
+ if (ctx->out_of_space) {
+ DPRINTF(3, "\n");
+ DPRINTF(1, "Out of space in pe's abstract interpreter\n");
+ }
+ if (ctx->contradiction) {
+ // Attempted to push a "bottom" (contradiction) symbol onto the stack.
+ // This means that the abstract interpreter has hit unreachable code.
+ // We *could* generate an _EXIT_TRACE or _FATAL_ERROR here, but hitting
+ // bottom indicates type instability, so we are probably better off
+ // retrying later.
+ DPRINTF(3, "\n");
+ DPRINTF(1, "Hit bottom in pe's abstract interpreter\n");
+ _Py_uop_pe_abstractcontext_fini(ctx);
+ return 0;
+ }
+
+ if (ctx->out_of_space || !is_terminator(this_instr)) {
+ _Py_uop_pe_abstractcontext_fini(ctx);
+ return trace_len;
+ }
+ else {
+ // We MUST not have bailed early here.
+ // That's the only time the PE's residual is valid.
+ assert(is_terminator(this_instr));
+
+ // Copy trace_dest into trace.
+ int trace_dest_len = trace_len;
+ // Only valid before we start inserting side exits.
+ assert(trace_dest_len == trace_len);
+ for (int x = 0; x < trace_dest_len; x++) {
+ // Skip all virtual instructions.
+ if (trace_dest[x].is_virtual) {
+ trace[x].opcode = _NOP;
+ }
+ else {
+ trace[x] = trace_dest[x];
+ }
+ }
+ _Py_uop_pe_abstractcontext_fini(ctx);
+ return trace_dest_len;
+ }
+
+error:
+ DPRINTF(3, "\n");
+ DPRINTF(1, "Encountered error in pe's abstract interpreter\n");
+ if (opcode <= MAX_UOP_ID) {
+ OPT_ERROR_IN_OPCODE(opcode);
+ }
+ _Py_uop_pe_abstractcontext_fini(ctx);
+ return -1;
+
+}
+
+
+// 0 - failure, no error raised, just fall back to Tier 1
+// -1 - failure, and raise error
+// > 0 - length of optimized trace
+int
+_Py_uop_partial_evaluate(
+ _PyInterpreterFrame *frame,
+ _PyUOpInstruction *buffer,
+ int length,
+ int curr_stacklen,
+ _PyBloomFilter *dependencies
+)
+{
+
+ length = partial_evaluate_uops(
+ _PyFrame_GetCode(frame), buffer,
+ length, curr_stacklen, dependencies);
+
+ if (length <= 0) {
+ return length;
+ }
+
+ return length;
+}
+
+#endif /* _Py_TIER2 */
diff --git a/Python/partial_evaluator_bytecodes.c b/Python/partial_evaluator_bytecodes.c
new file mode 100644
index 00000000000000..f231a0364b0d28
--- /dev/null
+++ b/Python/partial_evaluator_bytecodes.c
@@ -0,0 +1,351 @@
+#include "Python.h"
+#include "pycore_optimizer.h"
+#include "pycore_uops.h"
+#include "pycore_uop_ids.h"
+#include "internal/pycore_moduleobject.h"
+
+#define op(name, ...) /* NAME is ignored */
+
+typedef struct _Py_UopsPESymbol _Py_UopsPESymbol;
+typedef struct _Py_UOpsPEContext _Py_UOpsPEContext;
+typedef struct _Py_UOpsPEAbstractFrame _Py_UOpsPEAbstractFrame;
+
+/* Shortened forms for convenience */
+#define sym_is_not_null _Py_uop_pe_sym_is_not_null
+#define sym_is_const _Py_uop_pe_sym_is_const
+#define sym_get_const _Py_uop_pe_sym_get_const
+#define sym_new_unknown _Py_uop_pe_sym_new_unknown
+#define sym_new_not_null _Py_uop_pe_sym_new_not_null
+#define sym_is_null _Py_uop_pe_sym_is_null
+#define sym_new_const _Py_uop_pe_sym_new_const
+#define sym_new_null _Py_uop_pe_sym_new_null
+#define sym_set_null(SYM) _Py_uop_pe_sym_set_null(ctx, SYM)
+#define sym_set_non_null(SYM) _Py_uop_pe_sym_set_non_null(ctx, SYM)
+#define sym_set_const(SYM, CNST) _Py_uop_pe_sym_set_const(ctx, SYM, CNST)
+#define sym_is_bottom _Py_uop_pe_sym_is_bottom
+#define frame_new _Py_uop_pe_frame_new
+#define frame_pop _Py_uop_pe_frame_pop
+
+extern PyCodeObject *get_code(_PyUOpInstruction *op);
+
+static int
+dummy_func(void) {
+
+// BEGIN BYTECODES //
+
+ op(_LOAD_FAST_CHECK, (-- value)) {
+ MATERIALIZE_INST();
+ value = GETLOCAL(oparg);
+ // We guarantee this will error - just bail and don't optimize it.
+ if (sym_is_null(&value)) {
+ ctx->done = true;
+ }
+ }
+
+ op(_LOAD_FAST, (-- value)) {
+ value = GETLOCAL(oparg);
+ sym_set_origin_inst_override(&value, this_instr);
+ }
+
+ op(_LOAD_FAST_AND_CLEAR, (-- value)) {
+ MATERIALIZE_INST();
+ value = GETLOCAL(oparg);
+ GETLOCAL(oparg) = sym_new_null(ctx);
+ sym_set_origin_inst_override(&value, this_instr);
+ }
+
+ op(_LOAD_CONST, (-- value)) {
+ // Should've all been converted by specializer.
+ Py_UNREACHABLE();
+ // Just to please the code generator that value is defined.
+ value = sym_new_const(ctx, NULL);
+ }
+
+ op(_LOAD_CONST_INLINE, (ptr/4 -- value)) {
+ MATERIALIZE_INST();
+ value = sym_new_const(ctx, ptr);
+ sym_set_origin_inst_override(&value, this_instr);
+ }
+
+ op(_LOAD_CONST_INLINE_BORROW, (ptr/4 -- value)) {
+ MATERIALIZE_INST();
+ value = sym_new_const(ctx, ptr);
+ sym_set_origin_inst_override(&value, this_instr);
+ }
+
+ op(_STORE_FAST, (value --)) {
+ _PyUOpInstruction *origin = sym_get_origin(&value);
+ // Gets rid of things like x = x.
+ if (sym_is_virtual(&value) &&
+ origin != NULL &&
+ origin->opcode == _LOAD_FAST &&
+ origin->oparg == oparg) {
+ // Leave it as virtual.
+ }
+ else {
+ materialize(&value);
+ MATERIALIZE_INST();
+ GETLOCAL(oparg) = value;
+ }
+
+ }
+
+ op(_POP_TOP, (value --)) {
+ if (!sym_is_virtual(&value)) {
+ MATERIALIZE_INST();
+ }
+ }
+
+ op(_NOP, (--)) {
+ }
+
+ op(_CHECK_STACK_SPACE_OPERAND, ( -- )) {
+ MATERIALIZE_INST();
+ }
+
+ op(_BINARY_SUBSCR_INIT_CALL, (container, sub -- new_frame)) {
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ new_frame = (_Py_UopsPESlot){NULL, NULL};
+ ctx->done = true;
+ }
+
+ op(_LOAD_ATTR_PROPERTY_FRAME, (fget/4, owner -- new_frame)) {
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ new_frame = (_Py_UopsPESlot){NULL, NULL};
+ ctx->done = true;
+ }
+
+ op(_INIT_CALL_PY_EXACT_ARGS, (callable[1], self_or_null[1], args[oparg] -- new_frame)) {
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+
+ int argcount = oparg;
+
+ PyCodeObject *co = NULL;
+ assert((this_instr + 2)->opcode == _PUSH_FRAME);
+ co = get_code_with_logging((this_instr + 2));
+ if (co == NULL) {
+ ctx->done = true;
+ break;
+ }
+
+
+ assert(self_or_null->sym != NULL);
+ assert(args != NULL);
+ if (sym_is_not_null(self_or_null)) {
+ // Bound method fiddling, same as _INIT_CALL_PY_EXACT_ARGS in VM
+ args--;
+ argcount++;
+ }
+
+ _Py_UopsPESlot temp;
+ if (sym_is_null(self_or_null) || sym_is_not_null(self_or_null)) {
+ temp = (_Py_UopsPESlot){
+ (_Py_UopsPESymbol *)frame_new(ctx, co, 0, args, argcount), NULL
+ };
+ } else {
+ temp = (_Py_UopsPESlot){
+ (_Py_UopsPESymbol *)frame_new(ctx, co, 0, NULL, 0), NULL
+ };
+ }
+ new_frame = temp;
+ }
+
+ op(_PY_FRAME_GENERAL, (callable[1], self_or_null[1], args[oparg] -- new_frame)) {
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ PyCodeObject *co = NULL;
+ assert((this_instr + 2)->opcode == _PUSH_FRAME);
+ co = get_code_with_logging((this_instr + 2));
+ if (co == NULL) {
+ ctx->done = true;
+ break;
+ }
+
+ _Py_UopsPESlot temp = (_Py_UopsPESlot){(_Py_UopsPESymbol *)frame_new(ctx, co, 0, NULL, 0), NULL};
+ new_frame = temp;
+ }
+
+ op(_PY_FRAME_KW, (callable[1], self_or_null[1], args[oparg], kwnames -- new_frame)) {
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ new_frame = (_Py_UopsPESlot){NULL, NULL};
+ ctx->done = true;
+ }
+
+ op(_CHECK_AND_ALLOCATE_OBJECT, (type_version/2, callable[1], null[1], args[oparg] -- init[1], self[1], args[oparg])) {
+ (void)type_version;
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ self[0] = sym_new_not_null(ctx);
+ init[0] = sym_new_not_null(ctx);
+ }
+
+ op(_CREATE_INIT_FRAME, (init[1], self[1], args[oparg] -- init_frame)) {
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ init_frame = (_Py_UopsPESlot){NULL, NULL};
+ ctx->done = true;
+ }
+
+ op(_FOR_ITER_GEN_FRAME, (iter -- iter, gen_frame)) {
+ MATERIALIZE_INST();
+ gen_frame = (_Py_UopsPESlot){NULL, NULL};
+ /* We are about to hit the end of the trace */
+ ctx->done = true;
+ }
+
+ op(_SEND_GEN_FRAME, (receiver, v -- receiver, gen_frame)) {
+ gen_frame = (_Py_UopsPESlot){NULL, NULL};
+ MATERIALIZE_INST();
+ // We are about to hit the end of the trace:
+ ctx->done = true;
+ }
+
+ op(_PUSH_FRAME, (new_frame --)) {
+ MATERIALIZE_INST();
+ SYNC_SP();
+ ctx->frame->stack_pointer = stack_pointer;
+ ctx->frame = (_Py_UOpsPEAbstractFrame *)new_frame.sym;
+ ctx->curr_frame_depth++;
+ stack_pointer = ((_Py_UOpsPEAbstractFrame *)new_frame.sym)->stack_pointer;
+ co = get_code(this_instr);
+ if (co == NULL) {
+ // should be about to _EXIT_TRACE anyway
+ ctx->done = true;
+ break;
+ }
+
+ /* Stack space handling */
+ int framesize = co->co_framesize;
+ assert(framesize > 0);
+ curr_space += framesize;
+ if (curr_space < 0 || curr_space > INT32_MAX) {
+ // won't fit in signed 32-bit int
+ ctx->done = true;
+ break;
+ }
+ max_space = curr_space > max_space ? curr_space : max_space;
+ if (first_valid_check_stack == NULL) {
+ first_valid_check_stack = corresponding_check_stack;
+ }
+ else if (corresponding_check_stack) {
+ // delete all but the first valid _CHECK_STACK_SPACE
+ corresponding_check_stack->opcode = _NOP;
+ }
+ corresponding_check_stack = NULL;
+ }
+
+ op(_RETURN_VALUE, (retval -- res)) {
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ SYNC_SP();
+ ctx->frame->stack_pointer = stack_pointer;
+ frame_pop(ctx);
+ stack_pointer = ctx->frame->stack_pointer;
+ res = retval;
+
+ /* Stack space handling */
+ assert(corresponding_check_stack == NULL);
+ assert(co != NULL);
+ int framesize = co->co_framesize;
+ assert(framesize > 0);
+ assert(framesize <= curr_space);
+ curr_space -= framesize;
+
+ co = get_code(this_instr);
+ if (co == NULL) {
+ // might be impossible, but bailing is still safe
+ ctx->done = true;
+ }
+ }
+
+ op(_RETURN_GENERATOR, ( -- res)) {
+ MATERIALIZE_INST();
+ SYNC_SP();
+ ctx->frame->stack_pointer = stack_pointer;
+ frame_pop(ctx);
+ stack_pointer = ctx->frame->stack_pointer;
+ res = sym_new_unknown(ctx);
+
+ /* Stack space handling */
+ assert(corresponding_check_stack == NULL);
+ assert(co != NULL);
+ int framesize = co->co_framesize;
+ assert(framesize > 0);
+ assert(framesize <= curr_space);
+ curr_space -= framesize;
+
+ co = get_code(this_instr);
+ if (co == NULL) {
+ // might be impossible, but bailing is still safe
+ ctx->done = true;
+ }
+ }
+
+ op(_YIELD_VALUE, (retval -- value)) {
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ value = sym_new_unknown(ctx);
+ }
+
+ op(_JUMP_TO_TOP, (--)) {
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ ctx->done = true;
+ }
+
+ op(_EXIT_TRACE, (exit_p/4 --)) {
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ (void)exit_p;
+ ctx->done = true;
+ }
+
+ op(_UNPACK_SEQUENCE, (seq -- output[oparg])) {
+ /* This has to be done manually */
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ for (int i = 0; i < oparg; i++) {
+ output[i] = sym_new_unknown(ctx);
+ }
+ }
+
+ op(_UNPACK_EX, (seq -- left[oparg & 0xFF], unused, right[oparg >> 8])) {
+ /* This has to be done manually */
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ int totalargs = (oparg & 0xFF) + (oparg >> 8) + 1;
+ for (int i = 0; i < totalargs; i++) {
+ left[i] = sym_new_unknown(ctx);
+ }
+ (void)right;
+ }
+
+ op(_MAYBE_EXPAND_METHOD, (callable[1], self_or_null[1], args[oparg] -- func[1], maybe_self[1], args[oparg])) {
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ func[0] = sym_new_not_null(ctx);
+ maybe_self[0] = sym_new_not_null(ctx);
+ }
+
+ op(_LOAD_GLOBAL_MODULE_FROM_KEYS, (index/1, globals_keys -- res, null if (oparg & 1))) {
+ (void)index;
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ res = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ }
+
+ op(_LOAD_GLOBAL_BUILTINS_FROM_KEYS, (index/1, builtins_keys -- res, null if (oparg & 1))) {
+ (void)index;
+ MATERIALIZE_INST();
+ MATERIALIZE_INPUTS();
+ res = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ }
+// END BYTECODES //
+
+}
diff --git a/Python/partial_evaluator_cases.c.h b/Python/partial_evaluator_cases.c.h
new file mode 100644
index 00000000000000..b7aa63674504cb
--- /dev/null
+++ b/Python/partial_evaluator_cases.c.h
@@ -0,0 +1,3619 @@
+// This file is generated by Tools/cases_generator/partial_evaluator_generator.py
+// from:
+// Python/partial_evaluator_bytecodes.c
+// Do not edit!
+
+ case _NOP: {
+ break;
+ }
+
+ case _CHECK_PERIODIC: {
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ break;
+ }
+
+ case _CHECK_PERIODIC_IF_NOT_YIELD_FROM: {
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ break;
+ }
+
+ /* _QUICKEN_RESUME is not a viable micro-op for tier 2 */
+
+ /* _LOAD_BYTECODE is not a viable micro-op for tier 2 */
+
+ case _RESUME_CHECK: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ /* _MONITOR_RESUME is not a viable micro-op for tier 2 */
+
+ case _LOAD_FAST_CHECK: {
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ value = GETLOCAL(oparg);
+ // We guarantee this will error - just bail and don't optimize it.
+ if (sym_is_null(&value)) {
+ ctx->done = true;
+ }
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_FAST: {
+ _Py_UopsPESlot value;
+ value = GETLOCAL(oparg);
+ sym_set_origin_inst_override(&value, this_instr);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_FAST_AND_CLEAR: {
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ value = GETLOCAL(oparg);
+ GETLOCAL(oparg) = sym_new_null(ctx);
+ sym_set_origin_inst_override(&value, this_instr);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_CONST: {
+ _Py_UopsPESlot value;
+ // Should've all been converted by specializer.
+ Py_UNREACHABLE();
+ // Just to please the code generator that value is defined.
+ value = sym_new_const(ctx, NULL);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_CONST_IMMORTAL: {
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ value = sym_new_not_null(ctx);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_SMALL_INT: {
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ value = sym_new_not_null(ctx);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _STORE_FAST: {
+ _Py_UopsPESlot value;
+ value = stack_pointer[-1];
+ value = stack_pointer[-1];
+ _PyUOpInstruction *origin = sym_get_origin(&value);
+ // Gets rid of things like x = x.
+ if (sym_is_virtual(&value) &&
+ origin != NULL &&
+ origin->opcode == _LOAD_FAST &&
+ origin->oparg == oparg) {
+ // Leave it as virtual.
+ }
+ else {
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ materialize(&value);
+ MATERIALIZE_INST();
+ GETLOCAL(oparg) = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ }
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _POP_TOP: {
+ _Py_UopsPESlot value;
+ value = stack_pointer[-1];
+ value = stack_pointer[-1];
+ if (!sym_is_virtual(&value)) {
+ MATERIALIZE_INST();
+ }
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _PUSH_NULL: {
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ res = sym_new_not_null(ctx);
+ stack_pointer[0] = res;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _END_SEND: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot receiver;
+ _Py_UopsPESlot val;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ receiver = stack_pointer[-2];
+ materialize(&receiver);
+ val = sym_new_not_null(ctx);
+ stack_pointer[-2] = val;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _UNARY_NEGATIVE: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = res;
+ break;
+ }
+
+ case _UNARY_NOT: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-1] = res;
+ break;
+ }
+
+ case _TO_BOOL: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = res;
+ break;
+ }
+
+ case _TO_BOOL_BOOL: {
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ break;
+ }
+
+ case _TO_BOOL_INT: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-1] = res;
+ break;
+ }
+
+ case _TO_BOOL_LIST: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-1] = res;
+ break;
+ }
+
+ case _TO_BOOL_NONE: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-1] = res;
+ break;
+ }
+
+ case _TO_BOOL_STR: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-1] = res;
+ break;
+ }
+
+ case _REPLACE_WITH_TRUE: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-1] = res;
+ break;
+ }
+
+ case _UNARY_INVERT: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = res;
+ break;
+ }
+
+ case _GUARD_BOTH_INT: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ break;
+ }
+
+ case _GUARD_NOS_INT: {
+ _Py_UopsPESlot unused_0;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot unused_1;
+ MATERIALIZE_INST();
+ unused_0 = stack_pointer[-1];
+ materialize(&unused_0);
+ left = stack_pointer[-2];
+ materialize(&left);
+ break;
+ }
+
+ case _GUARD_TOS_INT: {
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ break;
+ }
+
+ case _BINARY_OP_MULTIPLY_INT: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_OP_ADD_INT: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_OP_SUBTRACT_INT: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _GUARD_BOTH_FLOAT: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ break;
+ }
+
+ case _GUARD_NOS_FLOAT: {
+ _Py_UopsPESlot unused_0;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot unused_1;
+ MATERIALIZE_INST();
+ unused_0 = stack_pointer[-1];
+ materialize(&unused_0);
+ left = stack_pointer[-2];
+ materialize(&left);
+ break;
+ }
+
+ case _GUARD_TOS_FLOAT: {
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ break;
+ }
+
+ case _BINARY_OP_MULTIPLY_FLOAT: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_OP_ADD_FLOAT: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_OP_SUBTRACT_FLOAT: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _GUARD_BOTH_UNICODE: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ break;
+ }
+
+ case _BINARY_OP_ADD_UNICODE: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_OP_INPLACE_ADD_UNICODE: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_SUBSCR: {
+ _Py_UopsPESlot sub;
+ _Py_UopsPESlot container;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ sub = stack_pointer[-1];
+ materialize(&sub);
+ container = stack_pointer[-2];
+ materialize(&container);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_SLICE: {
+ _Py_UopsPESlot stop;
+ _Py_UopsPESlot start;
+ _Py_UopsPESlot container;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ stop = stack_pointer[-1];
+ materialize(&stop);
+ start = stack_pointer[-2];
+ materialize(&start);
+ container = stack_pointer[-3];
+ materialize(&container);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-3] = res;
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _STORE_SLICE: {
+ _Py_UopsPESlot stop;
+ _Py_UopsPESlot start;
+ _Py_UopsPESlot container;
+ _Py_UopsPESlot v;
+ MATERIALIZE_INST();
+ stop = stack_pointer[-1];
+ materialize(&stop);
+ start = stack_pointer[-2];
+ materialize(&start);
+ container = stack_pointer[-3];
+ materialize(&container);
+ v = stack_pointer[-4];
+ materialize(&v);
+ materialize_ctx(ctx);
+ stack_pointer += -4;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_SUBSCR_LIST_INT: {
+ _Py_UopsPESlot sub_st;
+ _Py_UopsPESlot list_st;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ sub_st = stack_pointer[-1];
+ materialize(&sub_st);
+ list_st = stack_pointer[-2];
+ materialize(&list_st);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_SUBSCR_STR_INT: {
+ _Py_UopsPESlot sub_st;
+ _Py_UopsPESlot str_st;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ sub_st = stack_pointer[-1];
+ materialize(&sub_st);
+ str_st = stack_pointer[-2];
+ materialize(&str_st);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_SUBSCR_TUPLE_INT: {
+ _Py_UopsPESlot sub_st;
+ _Py_UopsPESlot tuple_st;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ sub_st = stack_pointer[-1];
+ materialize(&sub_st);
+ tuple_st = stack_pointer[-2];
+ materialize(&tuple_st);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_SUBSCR_DICT: {
+ _Py_UopsPESlot sub_st;
+ _Py_UopsPESlot dict_st;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ sub_st = stack_pointer[-1];
+ materialize(&sub_st);
+ dict_st = stack_pointer[-2];
+ materialize(&dict_st);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_SUBSCR_CHECK_FUNC: {
+ _Py_UopsPESlot unused_0;
+ _Py_UopsPESlot container;
+ _Py_UopsPESlot unused_1;
+ MATERIALIZE_INST();
+ unused_0 = stack_pointer[-1];
+ materialize(&unused_0);
+ container = stack_pointer[-2];
+ materialize(&container);
+ break;
+ }
+
+ case _BINARY_SUBSCR_INIT_CALL: {
+ _Py_UopsPESlot sub;
+ _Py_UopsPESlot container;
+ _Py_UopsPESlot new_frame;
+ sub = stack_pointer[-2];
+ container = stack_pointer[-1];
+ MATERIALIZE_INST();
+ materialize(&container);
+ materialize(&sub);
+ new_frame = (_Py_UopsPESlot){NULL, NULL};
+ ctx->done = true;
+ stack_pointer[-2] = new_frame;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LIST_APPEND: {
+ _Py_UopsPESlot v;
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot list;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ v = stack_pointer[-1];
+ materialize(&v);
+ unused_0 = &stack_pointer[-1 - (oparg-1)];
+ for (int _i = oparg-1; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ list = stack_pointer[-2 - (oparg-1)];
+ materialize(&list);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _SET_ADD: {
+ _Py_UopsPESlot v;
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot set;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ v = stack_pointer[-1];
+ materialize(&v);
+ unused_0 = &stack_pointer[-1 - (oparg-1)];
+ for (int _i = oparg-1; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ set = stack_pointer[-2 - (oparg-1)];
+ materialize(&set);
+ materialize_ctx(ctx);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _STORE_SUBSCR: {
+ _Py_UopsPESlot sub;
+ _Py_UopsPESlot container;
+ _Py_UopsPESlot v;
+ MATERIALIZE_INST();
+ sub = stack_pointer[-1];
+ materialize(&sub);
+ container = stack_pointer[-2];
+ materialize(&container);
+ v = stack_pointer[-3];
+ materialize(&v);
+ materialize_ctx(ctx);
+ stack_pointer += -3;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _STORE_SUBSCR_LIST_INT: {
+ _Py_UopsPESlot sub_st;
+ _Py_UopsPESlot list_st;
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ sub_st = stack_pointer[-1];
+ materialize(&sub_st);
+ list_st = stack_pointer[-2];
+ materialize(&list_st);
+ value = stack_pointer[-3];
+ materialize(&value);
+ stack_pointer += -3;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _STORE_SUBSCR_DICT: {
+ _Py_UopsPESlot sub;
+ _Py_UopsPESlot dict_st;
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ sub = stack_pointer[-1];
+ materialize(&sub);
+ dict_st = stack_pointer[-2];
+ materialize(&dict_st);
+ value = stack_pointer[-3];
+ materialize(&value);
+ materialize_ctx(ctx);
+ stack_pointer += -3;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _DELETE_SUBSCR: {
+ _Py_UopsPESlot sub;
+ _Py_UopsPESlot container;
+ MATERIALIZE_INST();
+ sub = stack_pointer[-1];
+ materialize(&sub);
+ container = stack_pointer[-2];
+ materialize(&container);
+ materialize_ctx(ctx);
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_INTRINSIC_1: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = res;
+ break;
+ }
+
+ case _CALL_INTRINSIC_2: {
+ _Py_UopsPESlot value1_st;
+ _Py_UopsPESlot value2_st;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value1_st = stack_pointer[-1];
+ materialize(&value1_st);
+ value2_st = stack_pointer[-2];
+ materialize(&value2_st);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _RETURN_VALUE: {
+ _Py_UopsPESlot retval;
+ _Py_UopsPESlot res;
+ retval = stack_pointer[-1];
+ retval = stack_pointer[-1];
+ MATERIALIZE_INST();
+ materialize(&retval);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ ctx->frame->stack_pointer = stack_pointer;
+ frame_pop(ctx);
+ stack_pointer = ctx->frame->stack_pointer;
+ res = retval;
+ /* Stack space handling */
+ assert(corresponding_check_stack == NULL);
+ assert(co != NULL);
+ int framesize = co->co_framesize;
+ assert(framesize > 0);
+ assert(framesize <= curr_space);
+ curr_space -= framesize;
+ stack_pointer[0] = res;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ co = get_code(this_instr);
+ if (co == NULL) {
+ // might be impossible, but bailing is still safe
+ ctx->done = true;
+ }
+ break;
+ }
+
+ case _GET_AITER: {
+ _Py_UopsPESlot obj;
+ _Py_UopsPESlot iter;
+ MATERIALIZE_INST();
+ obj = stack_pointer[-1];
+ materialize(&obj);
+ iter = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = iter;
+ break;
+ }
+
+ case _GET_ANEXT: {
+ _Py_UopsPESlot aiter;
+ _Py_UopsPESlot awaitable;
+ MATERIALIZE_INST();
+ aiter = stack_pointer[-1];
+ materialize(&aiter);
+ awaitable = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[0] = awaitable;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _GET_AWAITABLE: {
+ _Py_UopsPESlot iterable;
+ _Py_UopsPESlot iter;
+ MATERIALIZE_INST();
+ iterable = stack_pointer[-1];
+ materialize(&iterable);
+ iter = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = iter;
+ break;
+ }
+
+ /* _SEND is not a viable micro-op for tier 2 */
+
+ case _SEND_GEN_FRAME: {
+ _Py_UopsPESlot v;
+ _Py_UopsPESlot receiver;
+ _Py_UopsPESlot gen_frame;
+ v = stack_pointer[-2];
+ receiver = stack_pointer[-1];
+ gen_frame = (_Py_UopsPESlot){NULL, NULL};
+ MATERIALIZE_INST();
+ // We are about to hit the end of the trace:
+ ctx->done = true;
+ stack_pointer[-1] = gen_frame;
+ break;
+ }
+
+ case _YIELD_VALUE: {
+ _Py_UopsPESlot retval;
+ _Py_UopsPESlot value;
+ retval = stack_pointer[-1];
+ MATERIALIZE_INST();
+ materialize(&retval);
+ value = sym_new_unknown(ctx);
+ stack_pointer[-1] = value;
+ break;
+ }
+
+ case _POP_EXCEPT: {
+ _Py_UopsPESlot exc_value;
+ MATERIALIZE_INST();
+ exc_value = stack_pointer[-1];
+ materialize(&exc_value);
+ materialize_ctx(ctx);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_COMMON_CONSTANT: {
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ value = sym_new_not_null(ctx);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_BUILD_CLASS: {
+ _Py_UopsPESlot bc;
+ MATERIALIZE_INST();
+ bc = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[0] = bc;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _STORE_NAME: {
+ _Py_UopsPESlot v;
+ MATERIALIZE_INST();
+ v = stack_pointer[-1];
+ materialize(&v);
+ materialize_ctx(ctx);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _DELETE_NAME: {
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ break;
+ }
+
+ case _UNPACK_SEQUENCE: {
+ _Py_UopsPESlot seq;
+ _Py_UopsPESlot *output;
+ output = &stack_pointer[-1];
+ seq = stack_pointer[-1];
+ /* This has to be done manually */
+ MATERIALIZE_INST();
+ materialize(&seq);
+ for (int i = 0; i < oparg; i++) {
+ output[i] = sym_new_unknown(ctx);
+ }
+ stack_pointer += -1 + oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _UNPACK_SEQUENCE_TWO_TUPLE: {
+ _Py_UopsPESlot seq;
+ _Py_UopsPESlot val1;
+ _Py_UopsPESlot val0;
+ MATERIALIZE_INST();
+ seq = stack_pointer[-1];
+ materialize(&seq);
+ val1 = sym_new_not_null(ctx);
+ val0 = sym_new_not_null(ctx);
+ stack_pointer[-1] = val1;
+ stack_pointer[0] = val0;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _UNPACK_SEQUENCE_TUPLE: {
+ _Py_UopsPESlot seq;
+ _Py_UopsPESlot *values;
+ MATERIALIZE_INST();
+ seq = stack_pointer[-1];
+ materialize(&seq);
+ values = &stack_pointer[-1];
+ for (int _i = oparg; --_i >= 0;) {
+ values[_i] = sym_new_not_null(ctx);
+ }
+ stack_pointer += -1 + oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _UNPACK_SEQUENCE_LIST: {
+ _Py_UopsPESlot seq;
+ _Py_UopsPESlot *values;
+ MATERIALIZE_INST();
+ seq = stack_pointer[-1];
+ materialize(&seq);
+ values = &stack_pointer[-1];
+ for (int _i = oparg; --_i >= 0;) {
+ values[_i] = sym_new_not_null(ctx);
+ }
+ stack_pointer += -1 + oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _UNPACK_EX: {
+ _Py_UopsPESlot seq;
+ _Py_UopsPESlot *left;
+ _Py_UopsPESlot unused_0;
+ _Py_UopsPESlot *right;
+ left = &stack_pointer[-1];
+ right = &stack_pointer[(oparg & 0xFF)];
+ seq = stack_pointer[-1];
+ /* This has to be done manually */
+ MATERIALIZE_INST();
+ materialize(&seq);
+ int totalargs = (oparg & 0xFF) + (oparg >> 8) + 1;
+ for (int i = 0; i < totalargs; i++) {
+ left[i] = sym_new_unknown(ctx);
+ }
+ (void)right;
+ stack_pointer += (oparg & 0xFF) + (oparg >> 8);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _STORE_ATTR: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot v;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ v = stack_pointer[-2];
+ materialize(&v);
+ materialize_ctx(ctx);
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _DELETE_ATTR: {
+ _Py_UopsPESlot owner;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ materialize_ctx(ctx);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _STORE_GLOBAL: {
+ _Py_UopsPESlot v;
+ MATERIALIZE_INST();
+ v = stack_pointer[-1];
+ materialize(&v);
+ materialize_ctx(ctx);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _DELETE_GLOBAL: {
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ break;
+ }
+
+ case _LOAD_LOCALS: {
+ _Py_UopsPESlot locals;
+ MATERIALIZE_INST();
+ locals = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[0] = locals;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ /* _LOAD_FROM_DICT_OR_GLOBALS is not a viable micro-op for tier 2 */
+
+ case _LOAD_NAME: {
+ _Py_UopsPESlot v;
+ MATERIALIZE_INST();
+ v = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[0] = v;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_GLOBAL: {
+ _Py_UopsPESlot *res;
+ _Py_UopsPESlot null = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ res = &stack_pointer[0];
+ res[0] = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ materialize_ctx(ctx);
+ if (oparg & 1) stack_pointer[1] = null;
+ stack_pointer += 1 + (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _GUARD_GLOBALS_VERSION: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _GUARD_GLOBALS_VERSION_PUSH_KEYS: {
+ _Py_UopsPESlot globals_keys;
+ MATERIALIZE_INST();
+ globals_keys = sym_new_not_null(ctx);
+ stack_pointer[0] = globals_keys;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _GUARD_BUILTINS_VERSION_PUSH_KEYS: {
+ _Py_UopsPESlot builtins_keys;
+ MATERIALIZE_INST();
+ builtins_keys = sym_new_not_null(ctx);
+ stack_pointer[0] = builtins_keys;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_GLOBAL_MODULE_FROM_KEYS: {
+ _Py_UopsPESlot globals_keys;
+ _Py_UopsPESlot res;
+ _Py_UopsPESlot null = (_Py_UopsPESlot){NULL, 0};
+ uint16_t index = (uint16_t)this_instr->operand0;
+ globals_keys = stack_pointer[-1];
+ (void)index;
+ MATERIALIZE_INST();
+ materialize(&globals_keys);
+ res = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ stack_pointer[-1] = res;
+ if (oparg & 1) stack_pointer[0] = null;
+ stack_pointer += (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_GLOBAL_BUILTINS_FROM_KEYS: {
+ _Py_UopsPESlot builtins_keys;
+ _Py_UopsPESlot res;
+ _Py_UopsPESlot null = (_Py_UopsPESlot){NULL, 0};
+ uint16_t index = (uint16_t)this_instr->operand0;
+ builtins_keys = stack_pointer[-1];
+ (void)index;
+ MATERIALIZE_INST();
+ materialize(&builtins_keys);
+ res = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ stack_pointer[-1] = res;
+ if (oparg & 1) stack_pointer[0] = null;
+ stack_pointer += (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _DELETE_FAST: {
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ break;
+ }
+
+ case _MAKE_CELL: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _DELETE_DEREF: {
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ break;
+ }
+
+ case _LOAD_FROM_DICT_OR_DEREF: {
+ _Py_UopsPESlot class_dict_st;
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ class_dict_st = stack_pointer[-1];
+ materialize(&class_dict_st);
+ value = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = value;
+ break;
+ }
+
+ case _LOAD_DEREF: {
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ value = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _STORE_DEREF: {
+ _Py_UopsPESlot v;
+ MATERIALIZE_INST();
+ v = stack_pointer[-1];
+ materialize(&v);
+ materialize_ctx(ctx);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _COPY_FREE_VARS: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _BUILD_STRING: {
+ _Py_UopsPESlot *pieces;
+ _Py_UopsPESlot str;
+ MATERIALIZE_INST();
+ pieces = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&pieces[_i]);
+ }
+ str = sym_new_not_null(ctx);
+ stack_pointer[-oparg] = str;
+ stack_pointer += 1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BUILD_TUPLE: {
+ _Py_UopsPESlot *values;
+ _Py_UopsPESlot tup;
+ MATERIALIZE_INST();
+ values = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&values[_i]);
+ }
+ tup = sym_new_not_null(ctx);
+ stack_pointer[-oparg] = tup;
+ stack_pointer += 1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BUILD_LIST: {
+ _Py_UopsPESlot *values;
+ _Py_UopsPESlot list;
+ MATERIALIZE_INST();
+ values = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&values[_i]);
+ }
+ list = sym_new_not_null(ctx);
+ stack_pointer[-oparg] = list;
+ stack_pointer += 1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LIST_EXTEND: {
+ _Py_UopsPESlot iterable_st;
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot list_st;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ iterable_st = stack_pointer[-1];
+ materialize(&iterable_st);
+ unused_0 = &stack_pointer[-1 - (oparg-1)];
+ for (int _i = oparg-1; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ list_st = stack_pointer[-2 - (oparg-1)];
+ materialize(&list_st);
+ materialize_ctx(ctx);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _SET_UPDATE: {
+ _Py_UopsPESlot iterable;
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot set;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ iterable = stack_pointer[-1];
+ materialize(&iterable);
+ unused_0 = &stack_pointer[-1 - (oparg-1)];
+ for (int _i = oparg-1; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ set = stack_pointer[-2 - (oparg-1)];
+ materialize(&set);
+ materialize_ctx(ctx);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BUILD_SET: {
+ _Py_UopsPESlot *values;
+ _Py_UopsPESlot set;
+ MATERIALIZE_INST();
+ values = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&values[_i]);
+ }
+ set = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-oparg] = set;
+ stack_pointer += 1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BUILD_MAP: {
+ _Py_UopsPESlot *values;
+ _Py_UopsPESlot map;
+ MATERIALIZE_INST();
+ values = &stack_pointer[-oparg*2];
+ for (int _i = oparg*2; --_i >= 0;) {
+ materialize(&values[_i]);
+ }
+ map = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-oparg*2] = map;
+ stack_pointer += 1 - oparg*2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _SETUP_ANNOTATIONS: {
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ break;
+ }
+
+ case _DICT_UPDATE: {
+ _Py_UopsPESlot update;
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot dict;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ update = stack_pointer[-1];
+ materialize(&update);
+ unused_0 = &stack_pointer[-1 - (oparg - 1)];
+ for (int _i = oparg - 1; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ dict = stack_pointer[-2 - (oparg - 1)];
+ materialize(&dict);
+ materialize_ctx(ctx);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _DICT_MERGE: {
+ _Py_UopsPESlot update;
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot dict;
+ _Py_UopsPESlot unused_1;
+ _Py_UopsPESlot unused_2;
+ _Py_UopsPESlot callable;
+ _Py_UopsPESlot unused_3;
+ _Py_UopsPESlot unused_4;
+ _Py_UopsPESlot *unused_5;
+ MATERIALIZE_INST();
+ update = stack_pointer[-1];
+ materialize(&update);
+ unused_0 = &stack_pointer[-1 - (oparg - 1)];
+ for (int _i = oparg - 1; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ dict = stack_pointer[-2 - (oparg - 1)];
+ materialize(&dict);
+ unused_1 = stack_pointer[-3 - (oparg - 1)];
+ materialize(&unused_1);
+ unused_2 = stack_pointer[-4 - (oparg - 1)];
+ materialize(&unused_2);
+ callable = stack_pointer[-5 - (oparg - 1)];
+ materialize(&callable);
+ materialize_ctx(ctx);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _MAP_ADD: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot key;
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot dict_st;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ key = stack_pointer[-2];
+ materialize(&key);
+ unused_0 = &stack_pointer[-2 - (oparg - 1)];
+ for (int _i = oparg - 1; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ dict_st = stack_pointer[-3 - (oparg - 1)];
+ materialize(&dict_st);
+ materialize_ctx(ctx);
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ /* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 */
+
+ case _LOAD_SUPER_ATTR_ATTR: {
+ _Py_UopsPESlot self_st;
+ _Py_UopsPESlot class_st;
+ _Py_UopsPESlot global_super_st;
+ _Py_UopsPESlot attr_st;
+ _Py_UopsPESlot unused_0 = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ self_st = stack_pointer[-1];
+ materialize(&self_st);
+ class_st = stack_pointer[-2];
+ materialize(&class_st);
+ global_super_st = stack_pointer[-3];
+ materialize(&global_super_st);
+ attr_st = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-3] = attr_st;
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_SUPER_ATTR_METHOD: {
+ _Py_UopsPESlot self_st;
+ _Py_UopsPESlot class_st;
+ _Py_UopsPESlot global_super_st;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot self_or_null;
+ MATERIALIZE_INST();
+ self_st = stack_pointer[-1];
+ materialize(&self_st);
+ class_st = stack_pointer[-2];
+ materialize(&class_st);
+ global_super_st = stack_pointer[-3];
+ materialize(&global_super_st);
+ attr = sym_new_not_null(ctx);
+ self_or_null = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-3] = attr;
+ stack_pointer[-2] = self_or_null;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_ATTR: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot self_or_null = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ self_or_null = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = attr;
+ if (oparg & 1) stack_pointer[0] = self_or_null;
+ stack_pointer += (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _GUARD_TYPE_VERSION: {
+ _Py_UopsPESlot owner;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ break;
+ }
+
+ case _CHECK_MANAGED_OBJECT_HAS_VALUES: {
+ _Py_UopsPESlot owner;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ break;
+ }
+
+ case _LOAD_ATTR_INSTANCE_VALUE: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot null = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ stack_pointer[-1] = attr;
+ if (oparg & 1) stack_pointer[0] = null;
+ stack_pointer += (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CHECK_ATTR_MODULE: {
+ _Py_UopsPESlot owner;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ break;
+ }
+
+ case _LOAD_ATTR_MODULE: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot null = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ stack_pointer[-1] = attr;
+ if (oparg & 1) stack_pointer[0] = null;
+ stack_pointer += (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CHECK_ATTR_WITH_HINT: {
+ _Py_UopsPESlot owner;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ break;
+ }
+
+ case _LOAD_ATTR_WITH_HINT: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot null = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ stack_pointer[-1] = attr;
+ if (oparg & 1) stack_pointer[0] = null;
+ stack_pointer += (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_ATTR_SLOT: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot null = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ stack_pointer[-1] = attr;
+ if (oparg & 1) stack_pointer[0] = null;
+ stack_pointer += (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CHECK_ATTR_CLASS: {
+ _Py_UopsPESlot owner;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ break;
+ }
+
+ case _LOAD_ATTR_CLASS: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot null = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ stack_pointer[-1] = attr;
+ if (oparg & 1) stack_pointer[0] = null;
+ stack_pointer += (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_ATTR_PROPERTY_FRAME: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot new_frame;
+ PyObject *fget = (PyObject *)this_instr->operand0;
+ owner = stack_pointer[-1];
+ MATERIALIZE_INST();
+ materialize(&owner);
+ new_frame = (_Py_UopsPESlot){NULL, NULL};
+ ctx->done = true;
+ stack_pointer[-1] = new_frame;
+ break;
+ }
+
+ /* _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN is not a viable micro-op for tier 2 */
+
+ case _GUARD_DORV_NO_DICT: {
+ _Py_UopsPESlot owner;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ break;
+ }
+
+ case _STORE_ATTR_INSTANCE_VALUE: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ value = stack_pointer[-2];
+ materialize(&value);
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _STORE_ATTR_WITH_HINT: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ value = stack_pointer[-2];
+ materialize(&value);
+ materialize_ctx(ctx);
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _STORE_ATTR_SLOT: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ value = stack_pointer[-2];
+ materialize(&value);
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _COMPARE_OP: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _COMPARE_OP_FLOAT: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _COMPARE_OP_INT: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _COMPARE_OP_STR: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _IS_OP: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot b;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ b = sym_new_not_null(ctx);
+ stack_pointer[-2] = b;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CONTAINS_OP: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot b;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ b = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = b;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CONTAINS_OP_SET: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot b;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ b = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = b;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CONTAINS_OP_DICT: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot b;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ b = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = b;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CHECK_EG_MATCH: {
+ _Py_UopsPESlot match_type_st;
+ _Py_UopsPESlot exc_value_st;
+ _Py_UopsPESlot rest;
+ _Py_UopsPESlot match;
+ MATERIALIZE_INST();
+ match_type_st = stack_pointer[-1];
+ materialize(&match_type_st);
+ exc_value_st = stack_pointer[-2];
+ materialize(&exc_value_st);
+ rest = sym_new_not_null(ctx);
+ match = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = rest;
+ stack_pointer[-1] = match;
+ break;
+ }
+
+ case _CHECK_EXC_MATCH: {
+ _Py_UopsPESlot right;
+ _Py_UopsPESlot left;
+ _Py_UopsPESlot b;
+ MATERIALIZE_INST();
+ right = stack_pointer[-1];
+ materialize(&right);
+ left = stack_pointer[-2];
+ materialize(&left);
+ b = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = b;
+ break;
+ }
+
+ case _IMPORT_NAME: {
+ _Py_UopsPESlot fromlist;
+ _Py_UopsPESlot level;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ fromlist = stack_pointer[-1];
+ materialize(&fromlist);
+ level = stack_pointer[-2];
+ materialize(&level);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _IMPORT_FROM: {
+ _Py_UopsPESlot from;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ from = stack_pointer[-1];
+ materialize(&from);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[0] = res;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ /* _POP_JUMP_IF_FALSE is not a viable micro-op for tier 2 */
+
+ /* _POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 */
+
+ case _IS_NONE: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot b;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ b = sym_new_not_null(ctx);
+ stack_pointer[-1] = b;
+ break;
+ }
+
+ case _GET_LEN: {
+ _Py_UopsPESlot obj;
+ _Py_UopsPESlot len;
+ MATERIALIZE_INST();
+ obj = stack_pointer[-1];
+ materialize(&obj);
+ len = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[0] = len;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _MATCH_CLASS: {
+ _Py_UopsPESlot names;
+ _Py_UopsPESlot type;
+ _Py_UopsPESlot subject;
+ _Py_UopsPESlot attrs;
+ MATERIALIZE_INST();
+ names = stack_pointer[-1];
+ materialize(&names);
+ type = stack_pointer[-2];
+ materialize(&type);
+ subject = stack_pointer[-3];
+ materialize(&subject);
+ attrs = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-3] = attrs;
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _MATCH_MAPPING: {
+ _Py_UopsPESlot subject;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ subject = stack_pointer[-1];
+ materialize(&subject);
+ res = sym_new_not_null(ctx);
+ stack_pointer[0] = res;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _MATCH_SEQUENCE: {
+ _Py_UopsPESlot subject;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ subject = stack_pointer[-1];
+ materialize(&subject);
+ res = sym_new_not_null(ctx);
+ stack_pointer[0] = res;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _MATCH_KEYS: {
+ _Py_UopsPESlot keys;
+ _Py_UopsPESlot subject;
+ _Py_UopsPESlot values_or_none;
+ MATERIALIZE_INST();
+ keys = stack_pointer[-1];
+ materialize(&keys);
+ subject = stack_pointer[-2];
+ materialize(&subject);
+ values_or_none = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[0] = values_or_none;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _GET_ITER: {
+ _Py_UopsPESlot iterable;
+ _Py_UopsPESlot iter;
+ MATERIALIZE_INST();
+ iterable = stack_pointer[-1];
+ materialize(&iterable);
+ iter = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = iter;
+ break;
+ }
+
+ case _GET_YIELD_FROM_ITER: {
+ _Py_UopsPESlot iterable;
+ _Py_UopsPESlot iter;
+ MATERIALIZE_INST();
+ iterable = stack_pointer[-1];
+ materialize(&iterable);
+ iter = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = iter;
+ break;
+ }
+
+ /* _FOR_ITER is not a viable micro-op for tier 2 */
+
+ case _FOR_ITER_TIER_TWO: {
+ _Py_UopsPESlot iter;
+ _Py_UopsPESlot next;
+ MATERIALIZE_INST();
+ iter = stack_pointer[-1];
+ materialize(&iter);
+ next = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[0] = next;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ /* _INSTRUMENTED_FOR_ITER is not a viable micro-op for tier 2 */
+
+ case _ITER_CHECK_LIST: {
+ _Py_UopsPESlot iter;
+ MATERIALIZE_INST();
+ iter = stack_pointer[-1];
+ materialize(&iter);
+ break;
+ }
+
+ /* _ITER_JUMP_LIST is not a viable micro-op for tier 2 */
+
+ case _GUARD_NOT_EXHAUSTED_LIST: {
+ _Py_UopsPESlot iter;
+ MATERIALIZE_INST();
+ iter = stack_pointer[-1];
+ materialize(&iter);
+ break;
+ }
+
+ case _ITER_NEXT_LIST: {
+ _Py_UopsPESlot iter;
+ _Py_UopsPESlot next;
+ MATERIALIZE_INST();
+ iter = stack_pointer[-1];
+ materialize(&iter);
+ next = sym_new_not_null(ctx);
+ stack_pointer[0] = next;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _ITER_CHECK_TUPLE: {
+ _Py_UopsPESlot iter;
+ MATERIALIZE_INST();
+ iter = stack_pointer[-1];
+ materialize(&iter);
+ break;
+ }
+
+ /* _ITER_JUMP_TUPLE is not a viable micro-op for tier 2 */
+
+ case _GUARD_NOT_EXHAUSTED_TUPLE: {
+ _Py_UopsPESlot iter;
+ MATERIALIZE_INST();
+ iter = stack_pointer[-1];
+ materialize(&iter);
+ break;
+ }
+
+ case _ITER_NEXT_TUPLE: {
+ _Py_UopsPESlot iter;
+ _Py_UopsPESlot next;
+ MATERIALIZE_INST();
+ iter = stack_pointer[-1];
+ materialize(&iter);
+ next = sym_new_not_null(ctx);
+ stack_pointer[0] = next;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _ITER_CHECK_RANGE: {
+ _Py_UopsPESlot iter;
+ MATERIALIZE_INST();
+ iter = stack_pointer[-1];
+ materialize(&iter);
+ break;
+ }
+
+ /* _ITER_JUMP_RANGE is not a viable micro-op for tier 2 */
+
+ case _GUARD_NOT_EXHAUSTED_RANGE: {
+ _Py_UopsPESlot iter;
+ MATERIALIZE_INST();
+ iter = stack_pointer[-1];
+ materialize(&iter);
+ break;
+ }
+
+ case _ITER_NEXT_RANGE: {
+ _Py_UopsPESlot iter;
+ _Py_UopsPESlot next;
+ MATERIALIZE_INST();
+ iter = stack_pointer[-1];
+ materialize(&iter);
+ next = sym_new_not_null(ctx);
+ stack_pointer[0] = next;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _FOR_ITER_GEN_FRAME: {
+ _Py_UopsPESlot iter;
+ _Py_UopsPESlot gen_frame;
+ iter = stack_pointer[-1];
+ MATERIALIZE_INST();
+ gen_frame = (_Py_UopsPESlot){NULL, NULL};
+ /* We are about to hit the end of the trace */
+ ctx->done = true;
+ stack_pointer[0] = gen_frame;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_SPECIAL: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot self_or_null;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ self_or_null = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = attr;
+ stack_pointer[0] = self_or_null;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _WITH_EXCEPT_START: {
+ _Py_UopsPESlot val;
+ _Py_UopsPESlot unused_0;
+ _Py_UopsPESlot lasti;
+ _Py_UopsPESlot exit_self;
+ _Py_UopsPESlot exit_func;
+ _Py_UopsPESlot unused_1;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ val = stack_pointer[-1];
+ materialize(&val);
+ unused_0 = stack_pointer[-2];
+ materialize(&unused_0);
+ lasti = stack_pointer[-3];
+ materialize(&lasti);
+ exit_self = stack_pointer[-4];
+ materialize(&exit_self);
+ exit_func = stack_pointer[-5];
+ materialize(&exit_func);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[0] = res;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _PUSH_EXC_INFO: {
+ _Py_UopsPESlot exc;
+ _Py_UopsPESlot prev_exc;
+ _Py_UopsPESlot new_exc;
+ MATERIALIZE_INST();
+ exc = stack_pointer[-1];
+ materialize(&exc);
+ prev_exc = sym_new_not_null(ctx);
+ new_exc = sym_new_not_null(ctx);
+ stack_pointer[-1] = prev_exc;
+ stack_pointer[0] = new_exc;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: {
+ _Py_UopsPESlot owner;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ break;
+ }
+
+ case _GUARD_KEYS_VERSION: {
+ _Py_UopsPESlot owner;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ break;
+ }
+
+ case _LOAD_ATTR_METHOD_WITH_VALUES: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot self = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ self = sym_new_not_null(ctx);
+ stack_pointer[-1] = attr;
+ stack_pointer[0] = self;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_ATTR_METHOD_NO_DICT: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot self = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ self = sym_new_not_null(ctx);
+ stack_pointer[-1] = attr;
+ stack_pointer[0] = self;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot unused_0 = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ stack_pointer[-1] = attr;
+ break;
+ }
+
+ case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot unused_0 = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ stack_pointer[-1] = attr;
+ break;
+ }
+
+ case _CHECK_ATTR_METHOD_LAZY_DICT: {
+ _Py_UopsPESlot owner;
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ break;
+ }
+
+ case _LOAD_ATTR_METHOD_LAZY_DICT: {
+ _Py_UopsPESlot owner;
+ _Py_UopsPESlot attr;
+ _Py_UopsPESlot self = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ owner = stack_pointer[-1];
+ materialize(&owner);
+ attr = sym_new_not_null(ctx);
+ self = sym_new_not_null(ctx);
+ stack_pointer[-1] = attr;
+ stack_pointer[0] = self;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _MAYBE_EXPAND_METHOD: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *func;
+ _Py_UopsPESlot *maybe_self;
+ args = &stack_pointer[-oparg];
+ func = &stack_pointer[-2 - oparg];
+ maybe_self = &stack_pointer[-1 - oparg];
+ args = &stack_pointer[-2 - oparg];
+ self_or_null = &stack_pointer[-2];
+ callable = &stack_pointer[-1];
+ MATERIALIZE_INST();
+ materialize(&callable[0]);
+ materialize(&self_or_null[0]);
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ func[0] = sym_new_not_null(ctx);
+ maybe_self[0] = sym_new_not_null(ctx);
+ break;
+ }
+
+ /* _DO_CALL is not a viable micro-op for tier 2 */
+
+ /* _MONITOR_CALL is not a viable micro-op for tier 2 */
+
+ case _PY_FRAME_GENERAL: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot new_frame;
+ args = &stack_pointer[-2 - oparg];
+ self_or_null = &stack_pointer[-2];
+ callable = &stack_pointer[-1];
+ MATERIALIZE_INST();
+ materialize(&callable[0]);
+ materialize(&self_or_null[0]);
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ PyCodeObject *co = NULL;
+ assert((this_instr + 2)->opcode == _PUSH_FRAME);
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ co = get_code_with_logging((this_instr + 2));
+ if (co == NULL) {
+ ctx->done = true;
+ break;
+ }
+ _Py_UopsPESlot temp = (_Py_UopsPESlot){
+ (_Py_UopsPESymbol *)frame_new(ctx, co, 0, NULL, 0), NULL};
+ new_frame = temp;
+ stack_pointer[0] = new_frame;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CHECK_FUNCTION_VERSION: {
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ unused_0 = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ break;
+ }
+
+ case _CHECK_FUNCTION_VERSION_INLINE: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _CHECK_METHOD_VERSION: {
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot *null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ unused_0 = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ break;
+ }
+
+ case _EXPAND_METHOD: {
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot *null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *method;
+ _Py_UopsPESlot *self;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ unused_0 = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ method = &stack_pointer[-2 - oparg];
+ self = &stack_pointer[-1 - oparg];
+ method[0] = sym_new_not_null(ctx);
+ self[0] = sym_new_not_null(ctx);
+ break;
+ }
+
+ case _CHECK_IS_NOT_PY_CALLABLE: {
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot *unused_1;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *unused_2;
+ _Py_UopsPESlot *unused_3;
+ MATERIALIZE_INST();
+ unused_0 = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ unused_1 = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&unused_1[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ break;
+ }
+
+ case _CALL_NON_PY_GENERAL: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ args = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: {
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot *null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ unused_0 = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ break;
+ }
+
+ case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: {
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot *null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *func;
+ _Py_UopsPESlot *self;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ unused_0 = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ func = &stack_pointer[-2 - oparg];
+ self = &stack_pointer[-1 - oparg];
+ func[0] = sym_new_not_null(ctx);
+ self[0] = sym_new_not_null(ctx);
+ break;
+ }
+
+ case _CHECK_PEP_523: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _CHECK_FUNCTION_EXACT_ARGS: {
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ unused_0 = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ break;
+ }
+
+ case _CHECK_STACK_SPACE: {
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ unused_0 = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ break;
+ }
+
+ case _INIT_CALL_PY_EXACT_ARGS: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot new_frame;
+ args = &stack_pointer[-oparg];
+ self_or_null = &stack_pointer[-1 - oparg];
+ args = &stack_pointer[-2 - oparg];
+ self_or_null = &stack_pointer[-2];
+ callable = &stack_pointer[-1];
+ MATERIALIZE_INST();
+ materialize(&callable[0]);
+ materialize(&self_or_null[0]);
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ int argcount = oparg;
+ PyCodeObject *co = NULL;
+ assert((this_instr + 2)->opcode == _PUSH_FRAME);
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ co = get_code_with_logging((this_instr + 2));
+ if (co == NULL) {
+ ctx->done = true;
+ break;
+ }
+ assert(self_or_null->sym != NULL);
+ assert(args != NULL);
+ if (sym_is_not_null(self_or_null)) {
+ // Bound method fiddling, same as _INIT_CALL_PY_EXACT_ARGS in VM
+ args--;
+ argcount++;
+ }
+ _Py_UopsPESlot temp;
+ if (sym_is_null(self_or_null) || sym_is_not_null(self_or_null)) {
+ temp = (_Py_UopsPESlot){
+ (_Py_UopsPESymbol *)frame_new(ctx, co, 0, args, argcount), NULL
+ };
+ } else {
+ temp = (_Py_UopsPESlot){
+ (_Py_UopsPESymbol *)frame_new(ctx, co, 0, NULL, 0), NULL
+ };
+ }
+ new_frame = temp;
+ stack_pointer[0] = new_frame;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _PUSH_FRAME: {
+ _Py_UopsPESlot new_frame;
+ new_frame = stack_pointer[-1];
+ new_frame = stack_pointer[-1];
+ MATERIALIZE_INST();
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ ctx->frame->stack_pointer = stack_pointer;
+ ctx->frame = (_Py_UOpsPEAbstractFrame *)new_frame.sym;
+ ctx->curr_frame_depth++;
+ stack_pointer = ((_Py_UOpsPEAbstractFrame *)new_frame.sym)->stack_pointer;
+ co = get_code(this_instr);
+ if (co == NULL) {
+ // should be about to _EXIT_TRACE anyway
+ ctx->done = true;
+ break;
+ }
+ /* Stack space handling */
+ int framesize = co->co_framesize;
+ assert(framesize > 0);
+ curr_space += framesize;
+ if (curr_space < 0 || curr_space > INT32_MAX) {
+ // won't fit in signed 32-bit int
+ ctx->done = true;
+ break;
+ }
+ max_space = curr_space > max_space ? curr_space : max_space;
+ if (first_valid_check_stack == NULL) {
+ first_valid_check_stack = corresponding_check_stack;
+ }
+ else {
+ if (corresponding_check_stack) {
+ // delete all but the first valid _CHECK_STACK_SPACE
+ corresponding_check_stack->opcode = _NOP;
+ }
+ }
+ corresponding_check_stack = NULL;
+ break;
+ }
+
+ case _CALL_TYPE_1: {
+ _Py_UopsPESlot arg;
+ _Py_UopsPESlot null;
+ _Py_UopsPESlot callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ arg = stack_pointer[-1];
+ materialize(&arg);
+ null = stack_pointer[-2];
+ materialize(&null);
+ callable = stack_pointer[-3];
+ materialize(&callable);
+ res = sym_new_not_null(ctx);
+ stack_pointer[-3] = res;
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_STR_1: {
+ _Py_UopsPESlot arg;
+ _Py_UopsPESlot null;
+ _Py_UopsPESlot callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ arg = stack_pointer[-1];
+ materialize(&arg);
+ null = stack_pointer[-2];
+ materialize(&null);
+ callable = stack_pointer[-3];
+ materialize(&callable);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-3] = res;
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_TUPLE_1: {
+ _Py_UopsPESlot arg;
+ _Py_UopsPESlot null;
+ _Py_UopsPESlot callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ arg = stack_pointer[-1];
+ materialize(&arg);
+ null = stack_pointer[-2];
+ materialize(&null);
+ callable = stack_pointer[-3];
+ materialize(&callable);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-3] = res;
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CHECK_AND_ALLOCATE_OBJECT: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *init;
+ _Py_UopsPESlot *self;
+ args = &stack_pointer[-oparg];
+ init = &stack_pointer[-2 - oparg];
+ self = &stack_pointer[-1 - oparg];
+ uint32_t type_version = (uint32_t)this_instr->operand0;
+ args = &stack_pointer[-2 - oparg];
+ null = &stack_pointer[-2];
+ callable = &stack_pointer[-1];
+ (void)type_version;
+ MATERIALIZE_INST();
+ materialize(&callable[0]);
+ materialize(&null[0]);
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self[0] = sym_new_not_null(ctx);
+ init[0] = sym_new_not_null(ctx);
+ break;
+ }
+
+ case _CREATE_INIT_FRAME: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self;
+ _Py_UopsPESlot *init;
+ _Py_UopsPESlot init_frame;
+ args = &stack_pointer[-2 - oparg];
+ self = &stack_pointer[-2];
+ init = &stack_pointer[-1];
+ MATERIALIZE_INST();
+ materialize(&init[0]);
+ materialize(&self[0]);
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ init_frame = (_Py_UopsPESlot){NULL, NULL};
+ ctx->done = true;
+ stack_pointer[-2 - oparg] = init_frame;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _EXIT_INIT_CHECK: {
+ _Py_UopsPESlot should_be_none;
+ MATERIALIZE_INST();
+ should_be_none = stack_pointer[-1];
+ materialize(&should_be_none);
+ materialize_ctx(ctx);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_BUILTIN_CLASS: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ args = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_BUILTIN_O: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ args = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_BUILTIN_FAST: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ args = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_BUILTIN_FAST_WITH_KEYWORDS: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ args = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_LEN: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ args = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_ISINSTANCE: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ args = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_LIST_APPEND: {
+ _Py_UopsPESlot arg;
+ _Py_UopsPESlot self;
+ _Py_UopsPESlot callable;
+ MATERIALIZE_INST();
+ arg = stack_pointer[-1];
+ materialize(&arg);
+ self = stack_pointer[-2];
+ materialize(&self);
+ callable = stack_pointer[-3];
+ materialize(&callable);
+ stack_pointer += -3;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_METHOD_DESCRIPTOR_O: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ args = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ args = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_METHOD_DESCRIPTOR_NOARGS: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ args = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CALL_METHOD_DESCRIPTOR_FAST: {
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ args = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-1 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ /* _INSTRUMENTED_CALL_KW is not a viable micro-op for tier 2 */
+
+ case _MAYBE_EXPAND_METHOD_KW: {
+ _Py_UopsPESlot kwnames_in;
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *func;
+ _Py_UopsPESlot *maybe_self;
+ _Py_UopsPESlot kwnames_out;
+ MATERIALIZE_INST();
+ kwnames_in = stack_pointer[-1];
+ materialize(&kwnames_in);
+ args = &stack_pointer[-1 - oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-3 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ func = &stack_pointer[-3 - oparg];
+ maybe_self = &stack_pointer[-2 - oparg];
+ args = &stack_pointer[-1 - oparg];
+ func[0] = sym_new_not_null(ctx);
+ maybe_self[0] = sym_new_not_null(ctx);
+ for (int _i = oparg; --_i >= 0;) {
+ args[_i] = sym_new_not_null(ctx);
+ }
+ kwnames_out = sym_new_not_null(ctx);
+ stack_pointer[-1] = kwnames_out;
+ break;
+ }
+
+ /* _DO_CALL_KW is not a viable micro-op for tier 2 */
+
+ case _PY_FRAME_KW: {
+ _Py_UopsPESlot kwnames;
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot new_frame;
+ kwnames = stack_pointer[-3 - oparg];
+ args = &stack_pointer[-2 - oparg];
+ self_or_null = &stack_pointer[-2];
+ callable = &stack_pointer[-1];
+ MATERIALIZE_INST();
+ materialize(&callable[0]);
+ materialize(&self_or_null[0]);
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ materialize(&kwnames);
+ new_frame = (_Py_UopsPESlot){NULL, NULL};
+ ctx->done = true;
+ stack_pointer[-3 - oparg] = new_frame;
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CHECK_FUNCTION_VERSION_KW: {
+ _Py_UopsPESlot kwnames;
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ kwnames = stack_pointer[-1];
+ materialize(&kwnames);
+ unused_0 = &stack_pointer[-1 - oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ self_or_null = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-3 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ break;
+ }
+
+ case _CHECK_METHOD_VERSION_KW: {
+ _Py_UopsPESlot kwnames;
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot *null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *unused_1;
+ MATERIALIZE_INST();
+ kwnames = stack_pointer[-1];
+ materialize(&kwnames);
+ unused_0 = &stack_pointer[-1 - oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ null = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&null[_i]);
+ }
+ callable = &stack_pointer[-3 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ break;
+ }
+
+ case _EXPAND_METHOD_KW: {
+ _Py_UopsPESlot unused_0;
+ _Py_UopsPESlot *unused_1;
+ _Py_UopsPESlot *null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *method;
+ _Py_UopsPESlot *self;
+ _Py_UopsPESlot *unused_2;
+ _Py_UopsPESlot unused_3;
+ MATERIALIZE_INST();
+ unused_0 = stack_pointer[-1];
+ materialize(&unused_0);
+ unused_1 = &stack_pointer[-1 - oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_1[_i]);
+ }
+ null = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&null[_i]);
+ }
+ callable = &stack_pointer[-3 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ method = &stack_pointer[-3 - oparg];
+ self = &stack_pointer[-2 - oparg];
+ method[0] = sym_new_not_null(ctx);
+ self[0] = sym_new_not_null(ctx);
+ break;
+ }
+
+ case _CHECK_IS_NOT_PY_CALLABLE_KW: {
+ _Py_UopsPESlot kwnames;
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot *unused_1;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot *unused_2;
+ _Py_UopsPESlot *unused_3;
+ MATERIALIZE_INST();
+ kwnames = stack_pointer[-1];
+ materialize(&kwnames);
+ unused_0 = &stack_pointer[-1 - oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ unused_1 = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&unused_1[_i]);
+ }
+ callable = &stack_pointer[-3 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ break;
+ }
+
+ case _CALL_KW_NON_PY: {
+ _Py_UopsPESlot kwnames;
+ _Py_UopsPESlot *args;
+ _Py_UopsPESlot *self_or_null;
+ _Py_UopsPESlot *callable;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ kwnames = stack_pointer[-1];
+ materialize(&kwnames);
+ args = &stack_pointer[-1 - oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&args[_i]);
+ }
+ self_or_null = &stack_pointer[-2 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&self_or_null[_i]);
+ }
+ callable = &stack_pointer[-3 - oparg];
+ for (int _i = 1; --_i >= 0;) {
+ materialize(&callable[_i]);
+ }
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-3 - oparg] = res;
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ /* _INSTRUMENTED_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */
+
+ case _MAKE_CALLARGS_A_TUPLE: {
+ _Py_UopsPESlot kwargs_in = (_Py_UopsPESlot){NULL, 0};
+ _Py_UopsPESlot callargs;
+ _Py_UopsPESlot unused_0;
+ _Py_UopsPESlot func;
+ _Py_UopsPESlot unused_1;
+ _Py_UopsPESlot tuple;
+ _Py_UopsPESlot kwargs_out = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ if (oparg & 1) { kwargs_in = stack_pointer[-(oparg & 1)]; }
+ materialize(&kwargs_in);
+ callargs = stack_pointer[-1 - (oparg & 1)];
+ materialize(&callargs);
+ unused_0 = stack_pointer[-2 - (oparg & 1)];
+ materialize(&unused_0);
+ func = stack_pointer[-3 - (oparg & 1)];
+ materialize(&func);
+ tuple = sym_new_not_null(ctx);
+ kwargs_out = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1 - (oparg & 1)] = tuple;
+ if (oparg & 1) stack_pointer[-(oparg & 1)] = kwargs_out;
+ break;
+ }
+
+ /* _DO_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */
+
+ case _MAKE_FUNCTION: {
+ _Py_UopsPESlot codeobj_st;
+ _Py_UopsPESlot func;
+ MATERIALIZE_INST();
+ codeobj_st = stack_pointer[-1];
+ materialize(&codeobj_st);
+ func = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = func;
+ break;
+ }
+
+ case _SET_FUNCTION_ATTRIBUTE: {
+ _Py_UopsPESlot func_in;
+ _Py_UopsPESlot attr_st;
+ _Py_UopsPESlot func_out;
+ MATERIALIZE_INST();
+ func_in = stack_pointer[-1];
+ materialize(&func_in);
+ attr_st = stack_pointer[-2];
+ materialize(&attr_st);
+ func_out = sym_new_not_null(ctx);
+ stack_pointer[-2] = func_out;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _RETURN_GENERATOR: {
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ ctx->frame->stack_pointer = stack_pointer;
+ frame_pop(ctx);
+ stack_pointer = ctx->frame->stack_pointer;
+ res = sym_new_unknown(ctx);
+ /* Stack space handling */
+ assert(corresponding_check_stack == NULL);
+ assert(co != NULL);
+ int framesize = co->co_framesize;
+ assert(framesize > 0);
+ assert(framesize <= curr_space);
+ curr_space -= framesize;
+ stack_pointer[0] = res;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ co = get_code(this_instr);
+ if (co == NULL) {
+ // might be impossible, but bailing is still safe
+ ctx->done = true;
+ }
+ break;
+ }
+
+ case _BUILD_SLICE: {
+ _Py_UopsPESlot step = (_Py_UopsPESlot){NULL, 0};
+ _Py_UopsPESlot stop;
+ _Py_UopsPESlot start;
+ _Py_UopsPESlot slice;
+ MATERIALIZE_INST();
+ if (oparg == 3) { step = stack_pointer[-((oparg == 3) ? 1 : 0)]; }
+ materialize(&step);
+ stop = stack_pointer[-1 - ((oparg == 3) ? 1 : 0)];
+ materialize(&stop);
+ start = stack_pointer[-2 - ((oparg == 3) ? 1 : 0)];
+ materialize(&start);
+ slice = sym_new_not_null(ctx);
+ stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice;
+ stack_pointer += -1 - ((oparg == 3) ? 1 : 0);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CONVERT_VALUE: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot result;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ result = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = result;
+ break;
+ }
+
+ case _FORMAT_SIMPLE: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ value = stack_pointer[-1];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-1] = res;
+ break;
+ }
+
+ case _FORMAT_WITH_SPEC: {
+ _Py_UopsPESlot fmt_spec;
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ fmt_spec = stack_pointer[-1];
+ materialize(&fmt_spec);
+ value = stack_pointer[-2];
+ materialize(&value);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _COPY: {
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot bottom;
+ _Py_UopsPESlot *unused_1;
+ _Py_UopsPESlot top;
+ MATERIALIZE_INST();
+ unused_0 = &stack_pointer[-(oparg-1)];
+ for (int _i = oparg-1; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ bottom = stack_pointer[-1 - (oparg-1)];
+ materialize(&bottom);
+ top = sym_new_not_null(ctx);
+ stack_pointer[0] = top;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _BINARY_OP: {
+ _Py_UopsPESlot rhs;
+ _Py_UopsPESlot lhs;
+ _Py_UopsPESlot res;
+ MATERIALIZE_INST();
+ rhs = stack_pointer[-1];
+ materialize(&rhs);
+ lhs = stack_pointer[-2];
+ materialize(&lhs);
+ res = sym_new_not_null(ctx);
+ materialize_ctx(ctx);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _SWAP: {
+ _Py_UopsPESlot top_in;
+ _Py_UopsPESlot *unused_0;
+ _Py_UopsPESlot bottom_in;
+ _Py_UopsPESlot top_out;
+ _Py_UopsPESlot *unused_1;
+ _Py_UopsPESlot bottom_out;
+ MATERIALIZE_INST();
+ top_in = stack_pointer[-1];
+ materialize(&top_in);
+ unused_0 = &stack_pointer[-1 - (oparg-2)];
+ for (int _i = oparg-2; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ bottom_in = stack_pointer[-2 - (oparg-2)];
+ materialize(&bottom_in);
+ top_out = sym_new_not_null(ctx);
+ bottom_out = sym_new_not_null(ctx);
+ stack_pointer[-2 - (oparg-2)] = top_out;
+ stack_pointer[-1] = bottom_out;
+ break;
+ }
+
+ /* _INSTRUMENTED_LINE is not a viable micro-op for tier 2 */
+
+ /* _INSTRUMENTED_INSTRUCTION is not a viable micro-op for tier 2 */
+
+ /* _INSTRUMENTED_JUMP_FORWARD is not a viable micro-op for tier 2 */
+
+ /* _MONITOR_JUMP_BACKWARD is not a viable micro-op for tier 2 */
+
+ /* _INSTRUMENTED_POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 */
+
+ /* _INSTRUMENTED_POP_JUMP_IF_FALSE is not a viable micro-op for tier 2 */
+
+ /* _INSTRUMENTED_POP_JUMP_IF_NONE is not a viable micro-op for tier 2 */
+
+ /* _INSTRUMENTED_POP_JUMP_IF_NOT_NONE is not a viable micro-op for tier 2 */
+
+ case _GUARD_IS_TRUE_POP: {
+ _Py_UopsPESlot flag;
+ MATERIALIZE_INST();
+ flag = stack_pointer[-1];
+ materialize(&flag);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _GUARD_IS_FALSE_POP: {
+ _Py_UopsPESlot flag;
+ MATERIALIZE_INST();
+ flag = stack_pointer[-1];
+ materialize(&flag);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _GUARD_IS_NONE_POP: {
+ _Py_UopsPESlot val;
+ MATERIALIZE_INST();
+ val = stack_pointer[-1];
+ materialize(&val);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _GUARD_IS_NOT_NONE_POP: {
+ _Py_UopsPESlot val;
+ MATERIALIZE_INST();
+ val = stack_pointer[-1];
+ materialize(&val);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _JUMP_TO_TOP: {
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ ctx->done = true;
+ break;
+ }
+
+ case _SET_IP: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _CHECK_STACK_SPACE_OPERAND: {
+ uint32_t framesize = (uint32_t)this_instr->operand0;
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _SAVE_RETURN_OFFSET: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _EXIT_TRACE: {
+ PyObject *exit_p = (PyObject *)this_instr->operand0;
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ (void)exit_p;
+ ctx->done = true;
+ break;
+ }
+
+ case _CHECK_VALIDITY: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _LOAD_CONST_INLINE: {
+ _Py_UopsPESlot value;
+ PyObject *ptr = (PyObject *)this_instr->operand0;
+ MATERIALIZE_INST();
+ value = sym_new_const(ctx, ptr);
+ sym_set_origin_inst_override(&value, this_instr);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_CONST_INLINE_BORROW: {
+ _Py_UopsPESlot value;
+ PyObject *ptr = (PyObject *)this_instr->operand0;
+ MATERIALIZE_INST();
+ value = sym_new_const(ctx, ptr);
+ sym_set_origin_inst_override(&value, this_instr);
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _POP_TOP_LOAD_CONST_INLINE_BORROW: {
+ _Py_UopsPESlot pop;
+ _Py_UopsPESlot value;
+ MATERIALIZE_INST();
+ pop = stack_pointer[-1];
+ materialize(&pop);
+ value = sym_new_not_null(ctx);
+ stack_pointer[-1] = value;
+ break;
+ }
+
+ case _LOAD_CONST_INLINE_WITH_NULL: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot null;
+ MATERIALIZE_INST();
+ value = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ stack_pointer[0] = value;
+ stack_pointer[1] = null;
+ stack_pointer += 2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_CONST_INLINE_BORROW_WITH_NULL: {
+ _Py_UopsPESlot value;
+ _Py_UopsPESlot null;
+ MATERIALIZE_INST();
+ value = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ stack_pointer[0] = value;
+ stack_pointer[1] = null;
+ stack_pointer += 2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _CHECK_FUNCTION: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _LOAD_GLOBAL_MODULE: {
+ _Py_UopsPESlot res;
+ _Py_UopsPESlot null = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ res = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ stack_pointer[0] = res;
+ if (oparg & 1) stack_pointer[1] = null;
+ stack_pointer += 1 + (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _LOAD_GLOBAL_BUILTINS: {
+ _Py_UopsPESlot res;
+ _Py_UopsPESlot null = (_Py_UopsPESlot){NULL, 0};
+ MATERIALIZE_INST();
+ res = sym_new_not_null(ctx);
+ null = sym_new_null(ctx);
+ stack_pointer[0] = res;
+ if (oparg & 1) stack_pointer[1] = null;
+ stack_pointer += 1 + (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _INTERNAL_INCREMENT_OPT_COUNTER: {
+ _Py_UopsPESlot opt;
+ MATERIALIZE_INST();
+ opt = stack_pointer[-1];
+ materialize(&opt);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _DYNAMIC_EXIT: {
+ MATERIALIZE_INST();
+ materialize_ctx(ctx);
+ break;
+ }
+
+ case _START_EXECUTOR: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _MAKE_WARM: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _FATAL_ERROR: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _CHECK_VALIDITY_AND_SET_IP: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _DEOPT: {
+ MATERIALIZE_INST();
+ break;
+ }
+
+ case _ERROR_POP_N: {
+ _Py_UopsPESlot *unused_0;
+ MATERIALIZE_INST();
+ unused_0 = &stack_pointer[-oparg];
+ for (int _i = oparg; --_i >= 0;) {
+ materialize(&unused_0[_i]);
+ }
+ materialize_ctx(ctx);
+ stack_pointer += -oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
+ case _TIER2_RESUME_CHECK: {
+ MATERIALIZE_INST();
+ break;
+ }
+
diff --git a/Python/partial_evaluator_symbols.c b/Python/partial_evaluator_symbols.c
new file mode 100644
index 00000000000000..96f950fcc2003a
--- /dev/null
+++ b/Python/partial_evaluator_symbols.c
@@ -0,0 +1,413 @@
+#ifdef _Py_TIER2
+
+#include "Python.h"
+
+#include "pycore_code.h"
+#include "pycore_frame.h"
+#include "pycore_long.h"
+#include "pycore_optimizer.h"
+
+#include
+#include
+#include
+
+/* Symbols
+ =======
+
+ Documentation TODO gh-120619
+ */
+
+// Flags for below.
+#define IS_NULL 1 << 0
+#define NOT_NULL 1 << 1
+#define NO_SPACE 1 << 2
+
+#ifdef Py_DEBUG
+static inline int get_lltrace(void) {
+ char *uop_debug = Py_GETENV("PYTHON_OPT_DEBUG");
+ int lltrace = 0;
+ if (uop_debug != NULL && *uop_debug >= '0') {
+ lltrace = *uop_debug - '0'; // TODO: Parse an int and all that
+ }
+ return lltrace;
+}
+#define DPRINTF(level, ...) \
+ if (get_lltrace() >= (level)) { printf(__VA_ARGS__); }
+#else
+#define DPRINTF(level, ...)
+#endif
+
+static _Py_UopsPESymbol NO_SPACE_SYMBOL = {
+ .flags = IS_NULL | NOT_NULL | NO_SPACE,
+ .const_val = NULL,
+};
+
+static _Py_UopsPESlot
+out_of_space(_Py_UOpsPEContext *ctx)
+{
+ ctx->done = true;
+ ctx->out_of_space = true;
+ return (_Py_UopsPESlot){&NO_SPACE_SYMBOL, NULL};
+}
+
+static _Py_UopsPESlot
+sym_new(_Py_UOpsPEContext *ctx)
+{
+ _Py_UopsPESymbol *self = &ctx->sym_arena.arena[ctx->sym_arena.sym_curr_number];
+ if (ctx->sym_arena.sym_curr_number >= ctx->sym_arena.sym_max_number) {
+ OPT_STAT_INC(optimizer_failure_reason_no_memory);
+ DPRINTF(1, "out of space for symbolic expression type\n");
+ return (_Py_UopsPESlot){NULL, NULL};
+ }
+ ctx->sym_arena.sym_curr_number++;
+ self->flags = 0;
+ self->const_val = NULL;
+
+ return (_Py_UopsPESlot){self, NULL};
+}
+
+static inline void
+sym_set_flag(_Py_UopsPESlot *sym, int flag)
+{
+ sym->sym->flags |= flag;
+}
+
+static inline void
+sym_set_bottom(_Py_UOpsPEContext *ctx, _Py_UopsPESlot *sym)
+{
+ sym_set_flag(sym, IS_NULL | NOT_NULL);
+ Py_CLEAR(sym->sym->const_val);
+ ctx->done = true;
+ ctx->contradiction = true;
+}
+
+bool
+_Py_uop_pe_sym_is_bottom(_Py_UopsPESlot *sym)
+{
+ if ((sym->sym->flags & IS_NULL) && (sym->sym->flags & NOT_NULL)) {
+ assert(sym->sym->flags == (IS_NULL | NOT_NULL));
+ assert(sym->sym->const_val == NULL);
+ return true;
+ }
+ return false;
+}
+
+bool
+_Py_uop_pe_sym_is_not_null(_Py_UopsPESlot *sym)
+{
+ return sym->sym->flags == NOT_NULL;
+}
+
+bool
+_Py_uop_pe_sym_is_null(_Py_UopsPESlot *sym)
+{
+ return sym->sym->flags == IS_NULL;
+}
+
+bool
+_Py_uop_pe_sym_is_const(_Py_UopsPESlot *sym)
+{
+ return sym->sym->const_val != NULL;
+}
+
+PyObject *
+_Py_uop_pe_sym_get_const(_Py_UopsPESlot *sym)
+{
+ return sym->sym->const_val;
+}
+
+void
+_Py_uop_pe_sym_set_const(_Py_UOpsPEContext *ctx, _Py_UopsPESlot *sym, PyObject *const_val)
+{
+ assert(const_val != NULL);
+ if (sym->sym->flags & IS_NULL) {
+ sym_set_bottom(ctx, sym);
+ }
+ if (sym->sym->const_val != NULL) {
+ if (sym->sym->const_val != const_val) {
+ // TODO: What if they're equal?
+ sym_set_bottom(ctx, sym);
+ }
+ }
+ else {
+ sym_set_flag(sym, NOT_NULL);
+ sym->sym->const_val = Py_NewRef(const_val);
+ }
+}
+
+void
+_Py_uop_pe_sym_set_null(_Py_UOpsPEContext *ctx, _Py_UopsPESlot *sym)
+{
+ if (_Py_uop_pe_sym_is_not_null(sym)) {
+ sym_set_bottom(ctx, sym);
+ }
+ sym_set_flag(sym, IS_NULL);
+}
+
+void
+_Py_uop_pe_sym_set_non_null(_Py_UOpsPEContext *ctx, _Py_UopsPESlot *sym)
+{
+ if (_Py_uop_pe_sym_is_null(sym)) {
+ sym_set_bottom(ctx, sym);
+ }
+ sym_set_flag(sym, NOT_NULL);
+}
+
+
+_Py_UopsPESlot
+_Py_uop_pe_sym_new_unknown(_Py_UOpsPEContext *ctx)
+{
+ return sym_new(ctx);
+}
+
+_Py_UopsPESlot
+_Py_uop_pe_sym_new_not_null(_Py_UOpsPEContext *ctx)
+{
+ _Py_UopsPESlot res = _Py_uop_pe_sym_new_unknown(ctx);
+ if (res.sym == NULL) {
+ return out_of_space(ctx);
+ }
+ sym_set_flag(&res, NOT_NULL);
+ return res;
+}
+
+// Adds a new reference to const_val, owned by the symbol.
+_Py_UopsPESlot
+_Py_uop_pe_sym_new_const(_Py_UOpsPEContext *ctx, PyObject *const_val)
+{
+ assert(const_val != NULL);
+ _Py_UopsPESlot res = sym_new(ctx);
+ if (res.sym == NULL) {
+ return out_of_space(ctx);
+ }
+ _Py_uop_pe_sym_set_const(ctx, &res, const_val);
+ return res;
+}
+
+_Py_UopsPESlot
+_Py_uop_pe_sym_new_null(_Py_UOpsPEContext *ctx)
+{
+ _Py_UopsPESlot null_sym = _Py_uop_pe_sym_new_unknown(ctx);
+ if (null_sym.sym == NULL) {
+ return out_of_space(ctx);
+ }
+ _Py_uop_pe_sym_set_null(ctx, &null_sym);
+ return null_sym;
+}
+
+int
+_Py_uop_pe_sym_truthiness(_Py_UopsPESlot *sym)
+{
+ /* There are some non-constant values for
+ * which `bool(val)` always evaluates to
+ * True or False, such as tuples with known
+ * length, but unknown contents, or bound-methods.
+ * This function will need updating
+ * should we support those values.
+ */
+ if (_Py_uop_pe_sym_is_bottom(sym)) {
+ return -1;
+ }
+ if (!_Py_uop_pe_sym_is_const(sym)) {
+ return -1;
+ }
+ PyObject *value = _Py_uop_pe_sym_get_const(sym);
+ if (value == Py_None) {
+ return 0;
+ }
+ /* Only handle a few known safe types */
+ PyTypeObject *tp = Py_TYPE(value);
+ if (tp == &PyLong_Type) {
+ return !_PyLong_IsZero((PyLongObject *)value);
+ }
+ if (tp == &PyUnicode_Type) {
+ return value != &_Py_STR(empty);
+ }
+ if (tp == &PyBool_Type) {
+ return value == Py_True;
+ }
+ return -1;
+}
+
+void
+_Py_uop_sym_set_origin_inst_override(_Py_UopsPESlot *sym, _PyUOpInstruction *origin)
+{
+ sym->origin_inst = origin;
+}
+
+_PyUOpInstruction *
+_Py_uop_sym_get_origin(_Py_UopsPESlot *sym)
+{
+ return sym->origin_inst;
+}
+
+bool
+_Py_uop_sym_is_virtual(_Py_UopsPESlot *sym)
+{
+ if (!sym->origin_inst) {
+ return false;
+ }
+ else {
+ return sym->origin_inst->is_virtual;
+ }
+}
+
+// 0 on success, -1 on error.
+_Py_UOpsPEAbstractFrame *
+_Py_uop_pe_frame_new(
+ _Py_UOpsPEContext *ctx,
+ PyCodeObject *co,
+ int curr_stackentries,
+ _Py_UopsPESlot *args,
+ int arg_len)
+{
+ assert(ctx->curr_frame_depth < MAX_ABSTRACT_FRAME_DEPTH);
+ _Py_UOpsPEAbstractFrame *frame = &ctx->frames[ctx->curr_frame_depth];
+
+ frame->stack_len = co->co_stacksize;
+ frame->locals_len = co->co_nlocalsplus;
+
+ frame->locals = ctx->n_consumed;
+ frame->stack = frame->locals + co->co_nlocalsplus;
+ frame->stack_pointer = frame->stack + curr_stackentries;
+ ctx->n_consumed = ctx->n_consumed + (co->co_nlocalsplus + co->co_stacksize);
+ if (ctx->n_consumed >= ctx->limit) {
+ ctx->done = true;
+ ctx->out_of_space = true;
+ return NULL;
+ }
+
+ // Initialize with the initial state of all local variables
+ for (int i = 0; i < arg_len; i++) {
+ frame->locals[i] = args[i];
+ }
+
+ for (int i = arg_len; i < co->co_nlocalsplus; i++) {
+ _Py_UopsPESlot local = _Py_uop_pe_sym_new_unknown(ctx);
+ frame->locals[i] = local;
+ }
+
+
+ // Initialize the stack as well
+ for (int i = 0; i < curr_stackentries; i++) {
+ _Py_UopsPESlot stackvar = _Py_uop_pe_sym_new_unknown(ctx);
+ frame->stack[i] = stackvar;
+ }
+
+ return frame;
+}
+
+void
+_Py_uop_pe_abstractcontext_fini(_Py_UOpsPEContext *ctx)
+{
+ if (ctx == NULL) {
+ return;
+ }
+ ctx->curr_frame_depth = 0;
+ int tys = ctx->sym_arena.sym_curr_number;
+ for (int i = 0; i < tys; i++) {
+ Py_CLEAR(ctx->sym_arena.arena[i].const_val);
+ }
+}
+
+void
+_Py_uop_pe_abstractcontext_init(_Py_UOpsPEContext *ctx)
+{
+ ctx->limit = ctx->locals_and_stack + MAX_ABSTRACT_INTERP_SIZE;
+ ctx->n_consumed = ctx->locals_and_stack;
+#ifdef Py_DEBUG // Aids debugging a little. There should never be NULL in the abstract interpreter.
+ for (int i = 0 ; i < MAX_ABSTRACT_INTERP_SIZE; i++) {
+ ctx->locals_and_stack[i] = (_Py_UopsPESlot){NULL, NULL};
+ }
+#endif
+
+ // Setup the arena for sym expressions.
+ ctx->sym_arena.sym_curr_number = 0;
+ ctx->sym_arena.sym_max_number = TY_ARENA_SIZE;
+
+ // Frame setup
+ ctx->curr_frame_depth = 0;
+}
+
+int
+_Py_uop_pe_frame_pop(_Py_UOpsPEContext *ctx)
+{
+ _Py_UOpsPEAbstractFrame *frame = ctx->frame;
+ ctx->n_consumed = frame->locals;
+ ctx->curr_frame_depth--;
+ assert(ctx->curr_frame_depth >= 1);
+ ctx->frame = &ctx->frames[ctx->curr_frame_depth - 1];
+
+ return 0;
+}
+
+
+#define TEST_PREDICATE(PRED, MSG) \
+do { \
+ if (!(PRED)) { \
+ PyErr_SetString( \
+ PyExc_AssertionError, \
+ (MSG)); \
+ goto fail; \
+ } \
+} while (0)
+
+static _Py_UopsPESlot
+make_bottom(_Py_UOpsPEContext *ctx)
+{
+ _Py_UopsPESlot sym = _Py_uop_pe_sym_new_unknown(ctx);
+ _Py_uop_pe_sym_set_null(ctx, &sym);
+ _Py_uop_pe_sym_set_non_null(ctx, &sym);
+ return sym;
+}
+
+PyObject *
+_Py_uop_pe_symbols_test(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored))
+{
+ _Py_UOpsPEContext context;
+ _Py_UOpsPEContext *ctx = &context;
+ _Py_uop_pe_abstractcontext_init(ctx);
+ PyObject *val_42 = NULL;
+ PyObject *val_43 = NULL;
+
+ // Use a single 'sym' variable so copy-pasting tests is easier.
+ _Py_UopsPESlot sym = _Py_uop_pe_sym_new_unknown(ctx);
+ if (sym.sym == NULL) {
+ goto fail;
+ }
+ TEST_PREDICATE(!_Py_uop_pe_sym_is_null(&sym), "top is NULL");
+ TEST_PREDICATE(!_Py_uop_pe_sym_is_not_null(&sym), "top is not NULL");
+ TEST_PREDICATE(!_Py_uop_pe_sym_is_const(&sym), "top is a constant");
+ TEST_PREDICATE(_Py_uop_pe_sym_get_const(&sym) == NULL, "top as constant is not NULL");
+ TEST_PREDICATE(!_Py_uop_pe_sym_is_bottom(&sym), "top is bottom");
+
+ sym = make_bottom(ctx);
+ if (sym.sym == NULL) {
+ goto fail;
+ }
+ TEST_PREDICATE(!_Py_uop_pe_sym_is_null(&sym), "bottom is NULL is not false");
+ TEST_PREDICATE(!_Py_uop_pe_sym_is_not_null(&sym), "bottom is not NULL is not false");
+ TEST_PREDICATE(!_Py_uop_pe_sym_is_const(&sym), "bottom is a constant is not false");
+ TEST_PREDICATE(_Py_uop_pe_sym_get_const(&sym) == NULL, "bottom as constant is not NULL");
+ TEST_PREDICATE(_Py_uop_pe_sym_is_bottom(&sym), "bottom isn't bottom");
+
+ sym = _Py_uop_pe_sym_new_const(ctx, Py_None);
+ TEST_PREDICATE(_Py_uop_pe_sym_truthiness(&sym) == 0, "bool(None) is not False");
+ sym = _Py_uop_pe_sym_new_const(ctx, Py_False);
+ TEST_PREDICATE(_Py_uop_pe_sym_truthiness(&sym) == 0, "bool(False) is not False");
+ sym = _Py_uop_pe_sym_new_const(ctx, PyLong_FromLong(0));
+ TEST_PREDICATE(_Py_uop_pe_sym_truthiness(&sym) == 0, "bool(0) is not False");
+
+ _Py_uop_pe_abstractcontext_fini(ctx);
+ Py_DECREF(val_42);
+ Py_DECREF(val_43);
+ Py_RETURN_NONE;
+
+fail:
+ _Py_uop_pe_abstractcontext_fini(ctx);
+ Py_XDECREF(val_42);
+ Py_XDECREF(val_43);
+ return NULL;
+}
+
+#endif /* _Py_TIER2 */
diff --git a/Tools/c-analyzer/cpython/_parser.py b/Tools/c-analyzer/cpython/_parser.py
index a08b32fa45db3e..1d3e3dd9844ad7 100644
--- a/Tools/c-analyzer/cpython/_parser.py
+++ b/Tools/c-analyzer/cpython/_parser.py
@@ -82,10 +82,12 @@ def clean_lines(text):
Python/generated_cases.c.h
Python/executor_cases.c.h
Python/optimizer_cases.c.h
+Python/partial_evaluator_cases.c.h
# not actually source
Python/bytecodes.c
Python/optimizer_bytecodes.c
+Python/partial_evaluator_bytecodes.c
# mimalloc
Objects/mimalloc/*.c
diff --git a/Tools/cases_generator/README.md b/Tools/cases_generator/README.md
index fb512c4646b851..02dd9c46f4a767 100644
--- a/Tools/cases_generator/README.md
+++ b/Tools/cases_generator/README.md
@@ -16,6 +16,9 @@ What's currently here:
- `optimizer_generator.py`: reads `Python/bytecodes.c` and
`Python/optimizer_bytecodes.c` and writes
`Python/optimizer_cases.c.h`
+- `partial_evaluator_generator.py`: reads `Python/bytecodes.c` and
+ `Python/partial_evaluator_bytecodes.c` and writes
+ `Python/partial_evaluator_cases.c.h`
- `stack.py`: code to handle generalized stack effects
- `cwriter.py`: code which understands tokens and how to format C code;
main class: `CWriter`
diff --git a/Tools/cases_generator/partial_evaluator_generator.py b/Tools/cases_generator/partial_evaluator_generator.py
new file mode 100644
index 00000000000000..3840b26c5bca70
--- /dev/null
+++ b/Tools/cases_generator/partial_evaluator_generator.py
@@ -0,0 +1,303 @@
+"""Generate the cases for the tier 2 partial evaluator.
+Reads the instruction definitions from bytecodes.c, optimizer_bytecodes.c and partial_evaluator_bytecodes.c
+Writes the cases to partial_evaluator_cases.c.h, which is #included in Python/optimizer_analysis.c.
+"""
+
+import argparse
+
+from analyzer import (
+ Analysis,
+ Instruction,
+ Uop,
+ analyze_files,
+ StackItem,
+ analysis_error,
+)
+from generators_common import (
+ DEFAULT_INPUT,
+ ROOT,
+ write_header,
+ Emitter,
+ TokenIterator,
+)
+from cwriter import CWriter
+from typing import TextIO, Iterator
+from lexer import Token
+from stack import Local, Stack, StackError, Storage
+
+DEFAULT_OUTPUT = ROOT / "Python/partial_evaluator_cases.c.h"
+DEFAULT_ABSTRACT_INPUT = (ROOT / "Python/partial_evaluator_bytecodes.c").absolute().as_posix()
+
+def validate_uop(override: Uop, uop: Uop) -> None:
+ if len(override.stack.inputs) != len(uop.stack.inputs):
+ assert False, f"Uop {uop.name} input length don't match."
+ if len(override.stack.outputs) != len(uop.stack.outputs):
+ assert False, f"Uop {uop.name} output length don't match."
+ for override_input, uop_input in zip(override.stack.inputs, uop.stack.inputs):
+ if override_input.name != uop_input.name:
+ assert False, f"Uop {uop.name} input names don't match."
+ if override_input.size != uop_input.size:
+ assert False, f"Uop {uop.name} input sizes don't match."
+ for override_output, uop_output in zip(override.stack.outputs, uop.stack.outputs):
+ if override_output.name != uop_output.name:
+ assert False, f"Uop {uop.name} output names don't match."
+ if override_output.size != uop_output.size:
+ assert False, f"Uop {uop.name} output sizes don't match."
+
+
+
+def type_name(var: StackItem) -> str:
+ if var.is_array():
+ return f"_Py_UopsPESlot *"
+ return f"_Py_UopsPESlot "
+
+def var_name(var: StackItem, unused_count: int) -> tuple[str, int]:
+ if var.name == "unused":
+ name = f"unused_{unused_count}"
+ unused_count += 1
+ else:
+ name = var.name
+ return name, unused_count
+
+
+def declare_variables(uop: Uop, out: CWriter) -> None:
+ variables = set()
+ unused_count = 0
+ for var in reversed(uop.stack.inputs):
+ if var.name not in variables:
+ name, unused_count = var_name(var, unused_count)
+ variables.add(name)
+ if var.condition:
+ out.emit(f"{type_name(var)}{name} = (_Py_UopsPESlot){{NULL, 0}};\n")
+ else:
+ out.emit(f"{type_name(var)}{name};\n")
+ for var in uop.stack.outputs:
+ if var.name not in variables:
+ name, unused_count = var_name(var, unused_count)
+ variables.add(name)
+ if var.condition:
+ out.emit(f"{type_name(var)}{name} = (_Py_UopsPESlot){{NULL, 0}};\n")
+ else:
+ out.emit(f"{type_name(var)}{name};\n")
+
+
+
+
+def emit_default(out: CWriter, uop: Uop, stack: Stack) -> None:
+ out.emit("MATERIALIZE_INST();\n")
+ unused_count = 0
+ for var in reversed(uop.stack.inputs):
+ name, unused_count = var_name(var, unused_count)
+ old_var_name = var.name
+ var.name = name
+ assign, _ = stack.pop(var, assign_unused=True)
+ var.name = old_var_name
+ out.emit(assign)
+ if var.is_array():
+ out.emit(f"for (int _i = {var.size}; --_i >= 0;) {{\n")
+ out.emit(f"materialize(&{name}[_i]);\n")
+ out.emit("}\n")
+ else:
+ out.emit(f"materialize(&{name});\n")
+ top_offset = stack.top_offset.copy()
+ for var in uop.stack.outputs:
+ if var.is_array() and not var.peek and not var.name == "unused":
+ c_offset = top_offset.to_c()
+ out.emit(f"{var.name} = &stack_pointer[{c_offset}];\n")
+ top_offset.push(var)
+ for var in uop.stack.outputs:
+ local = Local.undefined(var)
+ stack.push(local)
+ if var.name != "unused" and not var.peek:
+ local.defined = True
+ if var.is_array():
+ if var.size == "1":
+ out.emit(f"{var.name}[0] = sym_new_not_null(ctx);\n")
+ else:
+ out.emit(f"for (int _i = {var.size}; --_i >= 0;) {{\n")
+ out.emit(f"{var.name}[_i] = sym_new_not_null(ctx);\n")
+ out.emit("}\n")
+ elif var.name == "null":
+ out.emit(f"{var.name} = sym_new_null(ctx);\n")
+ else:
+ out.emit(f"{var.name} = sym_new_not_null(ctx);\n")
+ if uop.properties.escapes:
+ out.emit("materialize_ctx(ctx);\n")
+
+
+class Tier2PEEmitter(Emitter):
+ def __init__(self, out: CWriter):
+ super().__init__(out)
+ self._replacers["MATERIALIZE_INPUTS"] = self.materialize_inputs
+
+ def materialize_inputs(
+ self,
+ tkn: Token,
+ tkn_iter: TokenIterator,
+ uop: Uop,
+ storage: Storage,
+ inst: Instruction | None,
+ ) -> bool:
+ next(tkn_iter)
+ next(tkn_iter)
+ next(tkn_iter)
+ self.out.emit_at("", tkn)
+ for var in uop.stack.inputs:
+ if var.size:
+ if var.size == "1":
+ self.out.emit(f"materialize(&{var.name}[0]);\n")
+ else:
+ self.out.emit(f"for (int _i = {var.size}; --_i >= 0;) {{\n")
+ self.out.emit(f"materialize(&{var.name}[_i]);\n")
+ self.out.emit("}\n")
+ elif var.condition:
+ if var.condition == "1":
+ self.out.emit(f"materialize(&{var.name});\n")
+ elif var.condition != "0":
+ self.out.emit(f"materialize(&{var.name});\n")
+ else:
+ self.out.emit(f"materialize(&{var.name});\n")
+ return False
+
+ def emit_save(self, storage: Storage) -> None:
+ storage.flush(self.out)
+
+ def emit_reload(self, storage: Storage) -> None:
+ pass
+
+def write_uop(
+ override: Uop | None,
+ uop: Uop,
+ out: CWriter,
+ stack: Stack,
+ debug: bool,
+) -> None:
+ prototype = override if override else uop
+ try:
+ out.start_line()
+ if override:
+ code_list, storage = Storage.for_uop(stack, prototype, extract_bits=False)
+ for code in code_list:
+ out.emit(code)
+ if debug:
+ args = []
+ for input in prototype.stack.inputs:
+ if not input.peek or override:
+ args.append(input.name)
+ out.emit(f'DEBUG_PRINTF({", ".join(args)});\n')
+ if override:
+ for cache in uop.caches:
+ if cache.name != "unused":
+ if cache.size == 4:
+ type = cast = "PyObject *"
+ else:
+ type = f"uint{cache.size*16}_t "
+ cast = f"uint{cache.size*16}_t"
+ out.emit(f"{type}{cache.name} = ({cast})this_instr->operand0;\n")
+ if override:
+ emitter = Tier2PEEmitter(out)
+ # No reference management of inputs needed.
+ for var in storage.inputs: # type: ignore[possibly-undefined]
+ var.defined = False
+ base_offset = stack.base_offset.copy()
+ for input in reversed(uop.stack.inputs):
+ c_offset = base_offset.to_c()
+ if input.is_array():
+ out.emit(f"{input.name} = &stack_pointer[{c_offset}];\n")
+ else:
+ out.emit(f"{input.name} = stack_pointer[{c_offset}];\n")
+ base_offset.push(input)
+ storage = emitter.emit_tokens(override, storage, None)
+ out.start_line()
+ storage.flush(out, cast_type="", extract_bits=False)
+ else:
+ emit_default(out, uop, stack)
+ out.start_line()
+ stack.flush(out, cast_type="", extract_bits=False)
+ except StackError as ex:
+ raise analysis_error(ex.args[0], prototype.body[0]) # from None
+
+
+SKIPS = ("_EXTENDED_ARG",)
+
+
+def generate_abstract_interpreter(
+ filenames: list[str],
+ abstract: Analysis,
+ base: Analysis,
+ outfile: TextIO,
+ debug: bool,
+) -> None:
+ write_header(__file__, filenames, outfile)
+ out = CWriter(outfile, 2, False)
+ out.emit("\n")
+ base_uop_names = set([uop.name for uop in base.uops.values()])
+ for abstract_uop_name in abstract.uops:
+ assert (
+ abstract_uop_name in base_uop_names
+ ), f"All abstract uops should override base uops, but {abstract_uop_name} is not."
+
+ for uop in base.uops.values():
+ override: Uop | None = None
+ if uop.name in abstract.uops:
+ override = abstract.uops[uop.name]
+ validate_uop(override, uop)
+ if uop.properties.tier == 1:
+ continue
+ if uop.replicates:
+ continue
+ if uop.is_super():
+ continue
+ if not uop.is_viable():
+ out.emit(f"/* {uop.name} is not a viable micro-op for tier 2 */\n\n")
+ continue
+ out.emit(f"case {uop.name}: {{\n")
+ declare_variables(uop, out)
+ stack = Stack()
+ write_uop(override, uop, out, stack, debug)
+ out.start_line()
+ out.emit("break;\n")
+ out.emit("}")
+ out.emit("\n\n")
+
+
+def generate_tier2_abstract_from_files(
+ filenames: list[str], outfilename: str, debug: bool = False
+) -> None:
+ assert len(filenames) == 2, "Need a base file and an abstract cases file."
+ base = analyze_files([filenames[0]])
+ abstract = analyze_files([filenames[1]])
+ with open(outfilename, "w") as outfile:
+ generate_abstract_interpreter(filenames, abstract, base, outfile, debug)
+
+
+arg_parser = argparse.ArgumentParser(
+ description="Generate the code for the tier 2 interpreter.",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+)
+
+arg_parser.add_argument(
+ "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT
+)
+
+
+arg_parser.add_argument("input", nargs="*", help="Abstract interpreter definition file")
+
+arg_parser.add_argument(
+ "base", nargs="*", help="The base instruction definition file(s)"
+)
+
+arg_parser.add_argument("-d", "--debug", help="Insert debug calls", action="store_true")
+
+if __name__ == "__main__":
+ args = arg_parser.parse_args()
+ if not args.input:
+ args.base.append(DEFAULT_INPUT)
+ args.input.append(DEFAULT_ABSTRACT_INPUT)
+ else:
+ args.base.append(args.input[-1])
+ args.input.pop()
+ abstract = analyze_files(args.input)
+ base = analyze_files(args.base)
+ with open(args.output, "w") as outfile:
+ generate_abstract_interpreter(args.input, abstract, base, outfile, args.debug)
diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py
index 286f47d0cfb11b..ff1a3702f62710 100644
--- a/Tools/cases_generator/stack.py
+++ b/Tools/cases_generator/stack.py
@@ -230,7 +230,7 @@ def __init__(self) -> None:
self.variables: list[Local] = []
self.defined: set[str] = set()
- def pop(self, var: StackItem, extract_bits: bool = True) -> tuple[str, Local]:
+ def pop(self, var: StackItem, extract_bits: bool = False, assign_unused: bool = False) -> tuple[str, Local]:
self.top_offset.pop(var)
indirect = "&" if var.is_array() else ""
if self.variables:
@@ -245,7 +245,7 @@ def pop(self, var: StackItem, extract_bits: bool = True) -> tuple[str, Local]:
f"Size mismatch when popping '{popped.name}' from stack to assign to '{var.name}'. "
f"Expected {var_size(var)} got {var_size(popped.item)}"
)
- if var.name in UNUSED:
+ if var.name in UNUSED and not assign_unused:
if popped.name not in UNUSED and popped.name in self.defined:
raise StackError(
f"Value is declared unused, but is already cached by prior operation as '{popped.name}'"
@@ -268,7 +268,7 @@ def pop(self, var: StackItem, extract_bits: bool = True) -> tuple[str, Local]:
return defn, Local.redefinition(var, popped)
self.base_offset.pop(var)
- if var.name in UNUSED or not var.used:
+ if not assign_unused and (var.name in UNUSED or not var.used):
return "", Local.unused(var)
self.defined.add(var.name)
cast = f"({var.type})" if (not indirect and var.type) else ""
@@ -300,7 +300,7 @@ def _do_emit(
cast_type: str = "uintptr_t",
extract_bits: bool = True,
) -> None:
- cast = f"({cast_type})" if var.type else ""
+ cast = f"({cast_type})" if (var.type and cast_type) else ""
bits = ".bits" if cast and extract_bits else ""
if var.condition == "0":
return