| 
									
										
										
										
											2023-11-01 13:13:02 -07:00
										 |  |  | // Macros and other things needed by ceval.c, and bytecodes.c
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | 
 | 
					
						
							|  |  |  | /* Computed GOTOs, or
 | 
					
						
							|  |  |  |        the-optimization-commonly-but-improperly-known-as-"threaded code" | 
					
						
							|  |  |  |    using gcc's labels-as-values extension | 
					
						
							|  |  |  |    (http://gcc.gnu.org/onlinedocs/gcc/Labels-as-Values.html).
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |    The traditional bytecode evaluation loop uses a "switch" statement, which | 
					
						
							|  |  |  |    decent compilers will optimize as a single indirect branch instruction | 
					
						
							|  |  |  |    combined with a lookup table of jump addresses. However, since the | 
					
						
							|  |  |  |    indirect jump instruction is shared by all opcodes, the CPU will have a | 
					
						
							|  |  |  |    hard time making the right prediction for where to jump next (actually, | 
					
						
							|  |  |  |    it will be always wrong except in the uncommon case of a sequence of | 
					
						
							|  |  |  |    several identical opcodes). | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |    "Threaded code" in contrast, uses an explicit jump table and an explicit | 
					
						
							|  |  |  |    indirect jump instruction at the end of each opcode. Since the jump | 
					
						
							|  |  |  |    instruction is at a different address for each opcode, the CPU will make a | 
					
						
							|  |  |  |    separate prediction for each of these instructions, which is equivalent to | 
					
						
							|  |  |  |    predicting the second opcode of each opcode pair. These predictions have | 
					
						
							|  |  |  |    a much better chance to turn out valid, especially in small bytecode loops. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |    A mispredicted branch on a modern CPU flushes the whole pipeline and | 
					
						
							|  |  |  |    can cost several CPU cycles (depending on the pipeline depth), | 
					
						
							|  |  |  |    and potentially many more instructions (depending on the pipeline width). | 
					
						
							|  |  |  |    A correctly predicted branch, however, is nearly free. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |    At the time of this writing, the "threaded code" version is up to 15-20% | 
					
						
							|  |  |  |    faster than the normal "switch" version, depending on the compiler and the | 
					
						
							|  |  |  |    CPU architecture. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |    NOTE: care must be taken that the compiler doesn't try to "optimize" the | 
					
						
							|  |  |  |    indirect jumps by sharing them between all opcodes. Such optimizations | 
					
						
							|  |  |  |    can be disabled on gcc by using the -fno-gcse flag (or possibly | 
					
						
							|  |  |  |    -fno-crossjumping). | 
					
						
							|  |  |  | */ | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /* Use macros rather than inline functions, to make it as clear as possible
 | 
					
						
							|  |  |  |  * to the C compiler that the tracing check is a simple test then branch. | 
					
						
							|  |  |  |  * We want to be sure that the compiler knows this before it generates | 
					
						
							|  |  |  |  * the CFG. | 
					
						
							|  |  |  |  */ | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #ifdef WITH_DTRACE
 | 
					
						
							|  |  |  | #define OR_DTRACE_LINE | (PyDTrace_LINE_ENABLED() ? 255 : 0)
 | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #define OR_DTRACE_LINE
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #ifdef HAVE_COMPUTED_GOTOS
 | 
					
						
							|  |  |  |     #ifndef USE_COMPUTED_GOTOS
 | 
					
						
							|  |  |  |     #define USE_COMPUTED_GOTOS 1
 | 
					
						
							|  |  |  |     #endif
 | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  |     #if defined(USE_COMPUTED_GOTOS) && USE_COMPUTED_GOTOS
 | 
					
						
							|  |  |  |     #error "Computed gotos are not supported on this compiler."
 | 
					
						
							|  |  |  |     #endif
 | 
					
						
							|  |  |  |     #undef USE_COMPUTED_GOTOS
 | 
					
						
							|  |  |  |     #define USE_COMPUTED_GOTOS 0
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #ifdef Py_STATS
 | 
					
						
							| 
									
										
										
										
											2023-10-31 10:09:54 +00:00
										 |  |  | #define INSTRUCTION_STATS(op) \
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  |     do { \ | 
					
						
							|  |  |  |         OPCODE_EXE_INC(op); \ | 
					
						
							| 
									
										
										
										
											2023-09-06 17:54:59 +02:00
										 |  |  |         if (_Py_stats) _Py_stats->opcode_stats[lastopcode].pair_count[op]++; \ | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  |         lastopcode = op; \ | 
					
						
							|  |  |  |     } while (0) | 
					
						
							|  |  |  | #else
 | 
					
						
							| 
									
										
										
										
											2023-10-31 10:09:54 +00:00
										 |  |  | #define INSTRUCTION_STATS(op) ((void)0)
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-06 23:21:57 +08:00
										 |  |  | #define TAIL_CALL_PARAMS _PyInterpreterFrame *frame, _PyStackRef *stack_pointer, PyThreadState *tstate, _Py_CODEUNIT *next_instr, int oparg
 | 
					
						
							|  |  |  | #define TAIL_CALL_ARGS frame, stack_pointer, tstate, next_instr, oparg
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-16 03:01:24 +08:00
										 |  |  | #if Py_TAIL_CALL_INTERP
 | 
					
						
							| 
									
										
										
										
											2025-02-06 23:21:57 +08:00
										 |  |  |     // Note: [[clang::musttail]] works for GCC 15, but not __attribute__((musttail)) at the moment.
 | 
					
						
							|  |  |  | #   define Py_MUSTTAIL [[clang::musttail]]
 | 
					
						
							|  |  |  | #   define Py_PRESERVE_NONE_CC __attribute__((preserve_none))
 | 
					
						
							|  |  |  |     Py_PRESERVE_NONE_CC typedef PyObject* (*py_tail_call_funcptr)(TAIL_CALL_PARAMS); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #   define TARGET(op) Py_PRESERVE_NONE_CC PyObject *_TAIL_CALL_##op(TAIL_CALL_PARAMS)
 | 
					
						
							|  |  |  | #   define DISPATCH_GOTO() \
 | 
					
						
							|  |  |  |         do { \ | 
					
						
							|  |  |  |             Py_MUSTTAIL return (INSTRUCTION_TABLE[opcode])(TAIL_CALL_ARGS); \ | 
					
						
							|  |  |  |         } while (0) | 
					
						
							|  |  |  | #   define JUMP_TO_LABEL(name) \
 | 
					
						
							|  |  |  |         do { \ | 
					
						
							|  |  |  |             Py_MUSTTAIL return (_TAIL_CALL_##name)(TAIL_CALL_ARGS); \ | 
					
						
							|  |  |  |         } while (0) | 
					
						
							|  |  |  | #   define JUMP_TO_PREDICTED(name) \
 | 
					
						
							|  |  |  |         do { \ | 
					
						
							|  |  |  |             Py_MUSTTAIL return (_TAIL_CALL_##name)(frame, stack_pointer, tstate, this_instr, oparg); \ | 
					
						
							|  |  |  |         } while (0) | 
					
						
							|  |  |  | #    define LABEL(name) TARGET(name)
 | 
					
						
							|  |  |  | #elif USE_COMPUTED_GOTOS
 | 
					
						
							| 
									
										
										
										
											2023-10-31 10:09:54 +00:00
										 |  |  | #  define TARGET(op) TARGET_##op:
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #  define DISPATCH_GOTO() goto *opcode_targets[opcode]
 | 
					
						
							| 
									
										
										
										
											2025-02-06 23:21:57 +08:00
										 |  |  | #  define JUMP_TO_LABEL(name) goto name;
 | 
					
						
							|  |  |  | #  define JUMP_TO_PREDICTED(name) goto PREDICTED_##name;
 | 
					
						
							|  |  |  | #  define LABEL(name) name:
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #else
 | 
					
						
							| 
									
										
										
										
											2023-10-31 10:09:54 +00:00
										 |  |  | #  define TARGET(op) case op: TARGET_##op:
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #  define DISPATCH_GOTO() goto dispatch_opcode
 | 
					
						
							| 
									
										
										
										
											2025-02-06 23:21:57 +08:00
										 |  |  | #  define JUMP_TO_LABEL(name) goto name;
 | 
					
						
							|  |  |  | #  define JUMP_TO_PREDICTED(name) goto PREDICTED_##name;
 | 
					
						
							|  |  |  | #  define LABEL(name) name:
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /* PRE_DISPATCH_GOTO() does lltrace if enabled. Normally a no-op */ | 
					
						
							| 
									
										
										
										
											2025-02-07 08:49:51 -08:00
										 |  |  | #ifdef Py_DEBUG
 | 
					
						
							| 
									
										
										
										
											2025-01-21 22:17:15 +08:00
										 |  |  | #define PRE_DISPATCH_GOTO() if (frame->lltrace >= 5) { \
 | 
					
						
							| 
									
										
										
										
											2023-12-11 16:42:30 -08:00
										 |  |  |     lltrace_instruction(frame, stack_pointer, next_instr, opcode, oparg); } | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #else
 | 
					
						
							|  |  |  | #define PRE_DISPATCH_GOTO() ((void)0)
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-07 08:49:51 -08:00
										 |  |  | #ifdef Py_DEBUG
 | 
					
						
							| 
									
										
										
										
											2024-04-25 11:32:47 +01:00
										 |  |  | #define LLTRACE_RESUME_FRAME() \
 | 
					
						
							|  |  |  | do { \ | 
					
						
							| 
									
										
										
										
											2025-02-12 17:44:59 +00:00
										 |  |  |     _PyFrame_SetStackPointer(frame, stack_pointer); \ | 
					
						
							| 
									
										
										
										
											2025-01-21 22:17:15 +08:00
										 |  |  |     int lltrace = maybe_lltrace_resume_frame(frame, GLOBALS()); \ | 
					
						
							| 
									
										
										
										
											2025-02-12 17:44:59 +00:00
										 |  |  |     stack_pointer = _PyFrame_GetStackPointer(frame); \ | 
					
						
							| 
									
										
										
										
											2024-04-25 11:32:47 +01:00
										 |  |  |     if (lltrace < 0) { \ | 
					
						
							| 
									
										
										
										
											2025-02-06 23:21:57 +08:00
										 |  |  |         JUMP_TO_LABEL(exit_unwind); \ | 
					
						
							| 
									
										
										
										
											2024-04-25 11:32:47 +01:00
										 |  |  |     } \ | 
					
						
							| 
									
										
										
										
											2025-02-07 08:49:51 -08:00
										 |  |  |     frame->lltrace = lltrace; \ | 
					
						
							| 
									
										
										
										
											2024-04-25 11:32:47 +01:00
										 |  |  | } while (0) | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #define LLTRACE_RESUME_FRAME() ((void)0)
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-02-16 15:25:19 -05:00
										 |  |  | #ifdef Py_GIL_DISABLED
 | 
					
						
							|  |  |  | #define QSBR_QUIESCENT_STATE(tstate) _Py_qsbr_quiescent_state(((_PyThreadStateImpl *)tstate)->qsbr)
 | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #define QSBR_QUIESCENT_STATE(tstate)
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | 
 | 
					
						
							|  |  |  | /* Do interpreter dispatch accounting for tracing and instrumentation */ | 
					
						
							|  |  |  | #define DISPATCH() \
 | 
					
						
							|  |  |  |     { \ | 
					
						
							| 
									
										
										
										
											2024-10-07 14:56:39 +01:00
										 |  |  |         assert(frame->stackpointer == NULL); \ | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  |         NEXTOPARG(); \ | 
					
						
							|  |  |  |         PRE_DISPATCH_GOTO(); \ | 
					
						
							|  |  |  |         DISPATCH_GOTO(); \ | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #define DISPATCH_SAME_OPARG() \
 | 
					
						
							|  |  |  |     { \ | 
					
						
							| 
									
										
										
										
											2023-02-20 14:56:48 +00:00
										 |  |  |         opcode = next_instr->op.code; \ | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  |         PRE_DISPATCH_GOTO(); \ | 
					
						
							|  |  |  |         DISPATCH_GOTO(); \ | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #define DISPATCH_INLINED(NEW_FRAME)                     \
 | 
					
						
							|  |  |  |     do {                                                \ | 
					
						
							| 
									
										
										
										
											2023-05-12 15:23:13 -07:00
										 |  |  |         assert(tstate->interp->eval_frame == NULL);     \ | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  |         _PyFrame_SetStackPointer(frame, stack_pointer); \ | 
					
						
							| 
									
										
										
										
											2024-08-20 16:52:58 +01:00
										 |  |  |         assert((NEW_FRAME)->previous == frame);         \ | 
					
						
							| 
									
										
										
										
											2023-08-17 11:16:03 +01:00
										 |  |  |         frame = tstate->current_frame = (NEW_FRAME);     \ | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  |         CALL_STAT_INC(inlined_py_calls);                \ | 
					
						
							| 
									
										
										
										
											2025-02-06 23:21:57 +08:00
										 |  |  |         JUMP_TO_LABEL(start_frame);                      \ | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  |     } while (0) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /* Tuple access macros */ | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #ifndef Py_DEBUG
 | 
					
						
							|  |  |  | #define GETITEM(v, i) PyTuple_GET_ITEM((v), (i))
 | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | static inline PyObject * | 
					
						
							|  |  |  | GETITEM(PyObject *v, Py_ssize_t i) { | 
					
						
							|  |  |  |     assert(PyTuple_Check(v)); | 
					
						
							|  |  |  |     assert(i >= 0); | 
					
						
							|  |  |  |     assert(i < PyTuple_GET_SIZE(v)); | 
					
						
							|  |  |  |     return PyTuple_GET_ITEM(v, i); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /* Code access macros */ | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /* The integer overflow is checked by an assertion below. */ | 
					
						
							| 
									
										
										
										
											2024-11-04 11:13:32 -08:00
										 |  |  | #define INSTR_OFFSET() ((int)(next_instr - _PyFrame_GetBytecode(frame)))
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #define NEXTOPARG()  do { \
 | 
					
						
							| 
									
										
										
										
											2024-04-30 11:38:05 -07:00
										 |  |  |         _Py_CODEUNIT word  = {.cache = FT_ATOMIC_LOAD_UINT16_RELAXED(*(uint16_t*)next_instr)}; \ | 
					
						
							| 
									
										
										
										
											2023-02-20 14:56:48 +00:00
										 |  |  |         opcode = word.op.code; \ | 
					
						
							|  |  |  |         oparg = word.op.arg; \ | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  |     } while (0) | 
					
						
							| 
									
										
										
										
											2023-06-15 00:14:22 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | /* JUMPBY makes the generator identify the instruction as a jump. SKIP_OVER is
 | 
					
						
							|  |  |  |  * for advancing to the next instruction, taking into account cache entries | 
					
						
							|  |  |  |  * and skipped instructions. | 
					
						
							|  |  |  |  */ | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #define JUMPBY(x)       (next_instr += (x))
 | 
					
						
							| 
									
										
										
										
											2023-06-15 00:14:22 +01:00
										 |  |  | #define SKIP_OVER(x)    (next_instr += (x))
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /* Stack manipulation macros */ | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /* The stack can grow at most MAXINT deep, as co_nlocals and
 | 
					
						
							|  |  |  |    co_stacksize are ints. */ | 
					
						
							|  |  |  | #define STACK_LEVEL()     ((int)(stack_pointer - _PyFrame_Stackbase(frame)))
 | 
					
						
							| 
									
										
										
										
											2023-06-14 13:46:37 +01:00
										 |  |  | #define STACK_SIZE()      (_PyFrame_GetCode(frame)->co_stacksize)
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #define EMPTY()           (STACK_LEVEL() == 0)
 | 
					
						
							|  |  |  | #define TOP()             (stack_pointer[-1])
 | 
					
						
							|  |  |  | #define SECOND()          (stack_pointer[-2])
 | 
					
						
							|  |  |  | #define THIRD()           (stack_pointer[-3])
 | 
					
						
							|  |  |  | #define FOURTH()          (stack_pointer[-4])
 | 
					
						
							|  |  |  | #define PEEK(n)           (stack_pointer[-(n)])
 | 
					
						
							|  |  |  | #define POKE(n, v)        (stack_pointer[-(n)] = (v))
 | 
					
						
							|  |  |  | #define SET_TOP(v)        (stack_pointer[-1] = (v))
 | 
					
						
							|  |  |  | #define SET_SECOND(v)     (stack_pointer[-2] = (v))
 | 
					
						
							|  |  |  | #define BASIC_STACKADJ(n) (stack_pointer += n)
 | 
					
						
							|  |  |  | #define BASIC_PUSH(v)     (*stack_pointer++ = (v))
 | 
					
						
							|  |  |  | #define BASIC_POP()       (*--stack_pointer)
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #ifdef Py_DEBUG
 | 
					
						
							|  |  |  | #define PUSH(v)         do { \
 | 
					
						
							|  |  |  |                             BASIC_PUSH(v); \ | 
					
						
							|  |  |  |                             assert(STACK_LEVEL() <= STACK_SIZE()); \ | 
					
						
							|  |  |  |                         } while (0) | 
					
						
							|  |  |  | #define POP()           (assert(STACK_LEVEL() > 0), BASIC_POP())
 | 
					
						
							|  |  |  | #define STACK_GROW(n)   do { \
 | 
					
						
							|  |  |  |                             assert(n >= 0); \ | 
					
						
							|  |  |  |                             BASIC_STACKADJ(n); \ | 
					
						
							|  |  |  |                             assert(STACK_LEVEL() <= STACK_SIZE()); \ | 
					
						
							|  |  |  |                         } while (0) | 
					
						
							|  |  |  | #define STACK_SHRINK(n) do { \
 | 
					
						
							|  |  |  |                             assert(n >= 0); \ | 
					
						
							|  |  |  |                             assert(STACK_LEVEL() >= n); \ | 
					
						
							|  |  |  |                             BASIC_STACKADJ(-(n)); \ | 
					
						
							|  |  |  |                         } while (0) | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #define PUSH(v)                BASIC_PUSH(v)
 | 
					
						
							|  |  |  | #define POP()                  BASIC_POP()
 | 
					
						
							|  |  |  | #define STACK_GROW(n)          BASIC_STACKADJ(n)
 | 
					
						
							|  |  |  | #define STACK_SHRINK(n)        BASIC_STACKADJ(-(n))
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-06-25 16:42:29 +01:00
										 |  |  | #define WITHIN_STACK_BOUNDS() \
 | 
					
						
							| 
									
										
										
										
											2025-01-21 10:15:02 +00:00
										 |  |  |    (frame->owner == FRAME_OWNED_BY_INTERPRETER || (STACK_LEVEL() >= 0 && STACK_LEVEL() <= STACK_SIZE())) | 
					
						
							| 
									
										
										
										
											2023-06-13 21:42:03 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | /* Data access macros */ | 
					
						
							| 
									
										
										
										
											2023-06-14 13:46:37 +01:00
										 |  |  | #define FRAME_CO_CONSTS (_PyFrame_GetCode(frame)->co_consts)
 | 
					
						
							|  |  |  | #define FRAME_CO_NAMES  (_PyFrame_GetCode(frame)->co_names)
 | 
					
						
							| 
									
										
										
										
											2023-06-13 21:42:03 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | /* Local variable macros */ | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-07-18 19:42:44 +01:00
										 |  |  | #define LOCALS_ARRAY    (frame->localsplus)
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #define GETLOCAL(i)     (frame->localsplus[i])
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #ifdef Py_STATS
 | 
					
						
							|  |  |  | #define UPDATE_MISS_STATS(INSTNAME)                              \
 | 
					
						
							|  |  |  |     do {                                                         \ | 
					
						
							|  |  |  |         STAT_INC(opcode, miss);                                  \ | 
					
						
							|  |  |  |         STAT_INC((INSTNAME), miss);                              \ | 
					
						
							|  |  |  |         /* The counter is always the first cache entry: */       \ | 
					
						
							| 
									
										
										
										
											2024-04-04 08:03:27 -07:00
										 |  |  |         if (ADAPTIVE_COUNTER_TRIGGERS(next_instr->cache)) {       \ | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  |             STAT_INC((INSTNAME), deopt);                         \ | 
					
						
							|  |  |  |         }                                                        \ | 
					
						
							|  |  |  |     } while (0) | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #define UPDATE_MISS_STATS(INSTNAME) ((void)0)
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-11-26 21:46:06 +00:00
										 |  |  | // Try to lock an object in the free threading build, if it's not already
 | 
					
						
							|  |  |  | // locked. Use with a DEOPT_IF() to deopt if the object is already locked.
 | 
					
						
							|  |  |  | // These are no-ops in the default GIL build. The general pattern is:
 | 
					
						
							|  |  |  | //
 | 
					
						
							|  |  |  | // DEOPT_IF(!LOCK_OBJECT(op));
 | 
					
						
							|  |  |  | // if (/* condition fails */) {
 | 
					
						
							|  |  |  | //     UNLOCK_OBJECT(op);
 | 
					
						
							|  |  |  | //     DEOPT_IF(true);
 | 
					
						
							|  |  |  | //  }
 | 
					
						
							|  |  |  | //  ...
 | 
					
						
							|  |  |  | //  UNLOCK_OBJECT(op);
 | 
					
						
							|  |  |  | //
 | 
					
						
							|  |  |  | // NOTE: The object must be unlocked on every exit code path and you should
 | 
					
						
							|  |  |  | // avoid any potentially escaping calls (like PyStackRef_CLOSE) while the
 | 
					
						
							|  |  |  | // object is locked.
 | 
					
						
							|  |  |  | #ifdef Py_GIL_DISABLED
 | 
					
						
							| 
									
										
										
										
											2024-12-18 20:49:00 +05:30
										 |  |  | #  define LOCK_OBJECT(op) PyMutex_LockFast(&(_PyObject_CAST(op))->ob_mutex)
 | 
					
						
							| 
									
										
										
										
											2024-11-26 21:46:06 +00:00
										 |  |  | #  define UNLOCK_OBJECT(op) PyMutex_Unlock(&(_PyObject_CAST(op))->ob_mutex)
 | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #  define LOCK_OBJECT(op) (1)
 | 
					
						
							|  |  |  | #  define UNLOCK_OBJECT(op) ((void)0)
 | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #define GLOBALS() frame->f_globals
 | 
					
						
							|  |  |  | #define BUILTINS() frame->f_builtins
 | 
					
						
							|  |  |  | #define LOCALS() frame->f_locals
 | 
					
						
							| 
									
										
										
										
											2023-06-14 13:46:37 +01:00
										 |  |  | #define CONSTS() _PyFrame_GetCode(frame)->co_consts
 | 
					
						
							|  |  |  | #define NAMES() _PyFrame_GetCode(frame)->co_names
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | 
 | 
					
						
							|  |  |  | #define DTRACE_FUNCTION_ENTRY()  \
 | 
					
						
							|  |  |  |     if (PyDTrace_FUNCTION_ENTRY_ENABLED()) { \ | 
					
						
							|  |  |  |         dtrace_function_entry(frame); \ | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-04 08:03:27 -07:00
										 |  |  | /* This takes a uint16_t instead of a _Py_BackoffCounter,
 | 
					
						
							|  |  |  |  * because it is used directly on the cache entry in generated code, | 
					
						
							|  |  |  |  * which is always an integral type. */ | 
					
						
							|  |  |  | #define ADAPTIVE_COUNTER_TRIGGERS(COUNTER) \
 | 
					
						
							|  |  |  |     backoff_counter_triggers(forge_backoff_counter((COUNTER))) | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-04 08:03:27 -07:00
										 |  |  | #define ADVANCE_ADAPTIVE_COUNTER(COUNTER) \
 | 
					
						
							|  |  |  |     do { \ | 
					
						
							|  |  |  |         (COUNTER) = advance_backoff_counter((COUNTER)); \ | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  |     } while (0); | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-04 08:03:27 -07:00
										 |  |  | #define PAUSE_ADAPTIVE_COUNTER(COUNTER) \
 | 
					
						
							|  |  |  |     do { \ | 
					
						
							|  |  |  |         (COUNTER) = pause_backoff_counter((COUNTER)); \ | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  |     } while (0); | 
					
						
							| 
									
										
										
										
											2024-11-04 11:13:32 -08:00
										 |  |  | 
 | 
					
						
							|  |  |  | #ifdef ENABLE_SPECIALIZATION_FT
 | 
					
						
							|  |  |  | /* Multiple threads may execute these concurrently if thread-local bytecode is
 | 
					
						
							|  |  |  |  * disabled and they all execute the main copy of the bytecode. Specialization | 
					
						
							|  |  |  |  * is disabled in that case so the value is unused, but the RMW cycle should be | 
					
						
							|  |  |  |  * free of data races. | 
					
						
							|  |  |  |  */ | 
					
						
							|  |  |  | #define RECORD_BRANCH_TAKEN(bitset, flag) \
 | 
					
						
							|  |  |  |     FT_ATOMIC_STORE_UINT16_RELAXED(       \ | 
					
						
							|  |  |  |         bitset, (FT_ATOMIC_LOAD_UINT16_RELAXED(bitset) << 1) | (flag)) | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #define RECORD_BRANCH_TAKEN(bitset, flag)
 | 
					
						
							| 
									
										
										
										
											2024-07-30 13:53:47 -04:00
										 |  |  | #endif
 | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-07-20 13:37:19 -07:00
										 |  |  | #define UNBOUNDLOCAL_ERROR_MSG \
 | 
					
						
							|  |  |  |     "cannot access local variable '%s' where it is not associated with a value" | 
					
						
							|  |  |  | #define UNBOUNDFREE_ERROR_MSG \
 | 
					
						
							|  |  |  |     "cannot access free variable '%s' where it is not associated with a value" \ | 
					
						
							|  |  |  |     " in enclosing scope" | 
					
						
							| 
									
										
										
										
											2023-01-18 10:41:07 -08:00
										 |  |  | #define NAME_ERROR_MSG "name '%.200s' is not defined"
 | 
					
						
							| 
									
										
										
										
											2023-02-08 11:40:10 -08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-04-12 12:04:55 +01:00
										 |  |  | // If a trace function sets a new f_lineno and
 | 
					
						
							|  |  |  | // *then* raises, we use the destination when searching
 | 
					
						
							|  |  |  | // for an exception handler, displaying the traceback, and so on
 | 
					
						
							|  |  |  | #define INSTRUMENTED_JUMP(src, dest, event) \
 | 
					
						
							|  |  |  | do { \ | 
					
						
							| 
									
										
										
										
											2024-05-03 11:49:24 -07:00
										 |  |  |     if (tstate->tracing) {\ | 
					
						
							|  |  |  |         next_instr = dest; \ | 
					
						
							|  |  |  |     } else { \ | 
					
						
							|  |  |  |         _PyFrame_SetStackPointer(frame, stack_pointer); \ | 
					
						
							| 
									
										
										
										
											2025-01-06 17:54:47 +00:00
										 |  |  |         next_instr = _Py_call_instrumentation_jump(this_instr, tstate, event, frame, src, dest); \ | 
					
						
							| 
									
										
										
										
											2024-05-03 11:49:24 -07:00
										 |  |  |         stack_pointer = _PyFrame_GetStackPointer(frame); \ | 
					
						
							|  |  |  |         if (next_instr == NULL) { \ | 
					
						
							|  |  |  |             next_instr = (dest)+1; \ | 
					
						
							| 
									
										
										
										
											2025-02-06 23:21:57 +08:00
										 |  |  |             JUMP_TO_LABEL(error); \ | 
					
						
							| 
									
										
										
										
											2024-05-03 11:49:24 -07:00
										 |  |  |         } \ | 
					
						
							| 
									
										
										
										
											2023-04-12 12:04:55 +01:00
										 |  |  |     } \ | 
					
						
							|  |  |  | } while (0); | 
					
						
							| 
									
										
										
										
											2023-06-26 19:02:57 -07:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-07-17 11:02:58 -07:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-08-16 16:26:43 -07:00
										 |  |  | static inline int _Py_EnterRecursivePy(PyThreadState *tstate) { | 
					
						
							|  |  |  |     return (tstate->py_recursion_remaining-- <= 0) && | 
					
						
							|  |  |  |         _Py_CheckRecursiveCallPy(tstate); | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2023-08-17 11:29:58 -07:00
										 |  |  | 
 | 
					
						
							|  |  |  | static inline void _Py_LeaveRecursiveCallPy(PyThreadState *tstate)  { | 
					
						
							|  |  |  |     tstate->py_recursion_remaining++; | 
					
						
							|  |  |  | } | 
					
						
							| 
									
										
										
										
											2023-08-31 11:34:52 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | /* Implementation of "macros" that modify the instruction pointer,
 | 
					
						
							|  |  |  |  * stack pointer, or frame pointer. | 
					
						
							| 
									
										
										
										
											2023-11-01 13:13:02 -07:00
										 |  |  |  * These need to treated differently by tier 1 and 2. | 
					
						
							|  |  |  |  * The Tier 1 version is here; Tier 2 is inlined in ceval.c. */ | 
					
						
							| 
									
										
										
										
											2023-08-31 11:34:52 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-26 14:43:10 +01:00
										 |  |  | #define LOAD_IP(OFFSET) do { \
 | 
					
						
							|  |  |  |         next_instr = frame->instr_ptr + (OFFSET); \ | 
					
						
							|  |  |  |     } while (0) | 
					
						
							| 
									
										
										
										
											2023-08-31 11:34:52 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-11-01 13:13:02 -07:00
										 |  |  | /* There's no STORE_IP(), it's inlined by the code generator. */ | 
					
						
							| 
									
										
										
										
											2023-08-31 11:34:52 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | #define LOAD_SP() \
 | 
					
						
							| 
									
										
										
										
											2024-07-26 12:24:12 +01:00
										 |  |  | stack_pointer = _PyFrame_GetStackPointer(frame) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #define SAVE_SP() \
 | 
					
						
							|  |  |  | _PyFrame_SetStackPointer(frame, stack_pointer) | 
					
						
							| 
									
										
										
										
											2023-08-31 11:34:52 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-11-01 13:13:02 -07:00
										 |  |  | /* Tier-switching macros. */ | 
					
						
							| 
									
										
										
										
											2023-08-31 11:34:52 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-02-20 09:39:55 +00:00
										 |  |  | #ifdef _Py_JIT
 | 
					
						
							|  |  |  | #define GOTO_TIER_TWO(EXECUTOR)                        \
 | 
					
						
							|  |  |  | do {                                                   \ | 
					
						
							| 
									
										
										
										
											2024-03-28 18:23:08 -04:00
										 |  |  |     OPT_STAT_INC(traces_executed);                     \ | 
					
						
							| 
									
										
										
										
											2025-02-07 09:52:49 -08:00
										 |  |  |     _PyExecutorObject *_executor = (EXECUTOR);         \ | 
					
						
							|  |  |  |     jit_func jitted = _executor->jit_code;             \ | 
					
						
							|  |  |  |     /* Keep the shim frame alive via the executor: */  \ | 
					
						
							|  |  |  |     Py_INCREF(_executor);                              \ | 
					
						
							| 
									
										
										
										
											2024-02-20 09:39:55 +00:00
										 |  |  |     next_instr = jitted(frame, stack_pointer, tstate); \ | 
					
						
							| 
									
										
										
										
											2025-02-07 09:52:49 -08:00
										 |  |  |     Py_DECREF(_executor);                              \ | 
					
						
							|  |  |  |     Py_CLEAR(tstate->previous_executor);               \ | 
					
						
							| 
									
										
										
										
											2024-02-20 09:39:55 +00:00
										 |  |  |     frame = tstate->current_frame;                     \ | 
					
						
							| 
									
										
										
										
											2025-02-07 09:52:49 -08:00
										 |  |  |     stack_pointer = _PyFrame_GetStackPointer(frame);   \ | 
					
						
							| 
									
										
										
										
											2024-02-20 09:39:55 +00:00
										 |  |  |     if (next_instr == NULL) {                          \ | 
					
						
							| 
									
										
										
										
											2025-01-31 17:13:20 +00:00
										 |  |  |         next_instr = frame->instr_ptr;                 \ | 
					
						
							| 
									
										
										
										
											2025-02-13 02:18:36 +08:00
										 |  |  |         JUMP_TO_LABEL(error);                          \ | 
					
						
							| 
									
										
										
										
											2024-02-20 09:39:55 +00:00
										 |  |  |     }                                                  \ | 
					
						
							|  |  |  |     DISPATCH();                                        \ | 
					
						
							|  |  |  | } while (0) | 
					
						
							|  |  |  | #else
 | 
					
						
							|  |  |  | #define GOTO_TIER_TWO(EXECUTOR) \
 | 
					
						
							|  |  |  | do { \ | 
					
						
							| 
									
										
										
										
											2024-03-28 18:23:08 -04:00
										 |  |  |     OPT_STAT_INC(traces_executed); \ | 
					
						
							| 
									
										
										
										
											2024-02-20 09:39:55 +00:00
										 |  |  |     next_uop = (EXECUTOR)->trace; \ | 
					
						
							| 
									
										
										
										
											2024-07-01 13:17:40 -07:00
										 |  |  |     assert(next_uop->opcode == _START_EXECUTOR); \ | 
					
						
							| 
									
										
										
										
											2024-02-20 09:39:55 +00:00
										 |  |  |     goto enter_tier_two; \ | 
					
						
							|  |  |  | } while (0) | 
					
						
							|  |  |  | #endif
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-12 17:44:59 +00:00
										 |  |  | #define GOTO_TIER_ONE(TARGET)                                         \
 | 
					
						
							|  |  |  |     do                                                                \ | 
					
						
							|  |  |  |     {                                                                 \ | 
					
						
							|  |  |  |         next_instr = (TARGET);                                        \ | 
					
						
							|  |  |  |         OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); \ | 
					
						
							|  |  |  |         _PyFrame_SetStackPointer(frame, stack_pointer);               \ | 
					
						
							|  |  |  |         Py_CLEAR(tstate->previous_executor);                          \ | 
					
						
							|  |  |  |         stack_pointer = _PyFrame_GetStackPointer(frame);              \ | 
					
						
							|  |  |  |         if (next_instr == NULL)                                       \ | 
					
						
							|  |  |  |         {                                                             \ | 
					
						
							|  |  |  |             next_instr = frame->instr_ptr;                            \ | 
					
						
							|  |  |  |             goto error;                                               \ | 
					
						
							|  |  |  |         }                                                             \ | 
					
						
							|  |  |  |         DISPATCH();                                                   \ | 
					
						
							|  |  |  |     } while (0) | 
					
						
							| 
									
										
										
										
											2023-08-31 11:34:52 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-07 09:52:49 -08:00
										 |  |  | #define CURRENT_OPARG()    (next_uop[-1].oparg)
 | 
					
						
							| 
									
										
										
										
											2024-11-09 11:35:33 +08:00
										 |  |  | #define CURRENT_OPERAND0() (next_uop[-1].operand0)
 | 
					
						
							|  |  |  | #define CURRENT_OPERAND1() (next_uop[-1].operand1)
 | 
					
						
							| 
									
										
										
										
											2025-02-07 09:52:49 -08:00
										 |  |  | #define CURRENT_TARGET()   (next_uop[-1].target)
 | 
					
						
							| 
									
										
										
										
											2024-03-26 09:35:11 +00:00
										 |  |  | 
 | 
					
						
							|  |  |  | #define JUMP_TO_JUMP_TARGET() goto jump_to_jump_target
 | 
					
						
							|  |  |  | #define JUMP_TO_ERROR() goto jump_to_error_target
 | 
					
						
							| 
									
										
										
										
											2024-06-27 03:10:43 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | /* Stackref macros */ | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /* How much scratch space to give stackref to PyObject* conversion. */ | 
					
						
							|  |  |  | #define MAX_STACKREF_SCRATCH 10
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #define STACKREFS_TO_PYOBJECTS(ARGS, ARG_COUNT, NAME) \
 | 
					
						
							|  |  |  |     /* +1 because vectorcall might use -1 to write self */ \ | 
					
						
							|  |  |  |     PyObject *NAME##_temp[MAX_STACKREF_SCRATCH+1]; \ | 
					
						
							|  |  |  |     PyObject **NAME = _PyObjectArray_FromStackRefArray(ARGS, ARG_COUNT, NAME##_temp + 1); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #define STACKREFS_TO_PYOBJECTS_CLEANUP(NAME) \
 | 
					
						
							|  |  |  |     /* +1 because we +1 previously */ \ | 
					
						
							|  |  |  |     _PyObjectArray_Free(NAME - 1, NAME##_temp); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | #define CONVERSION_FAILED(NAME) ((NAME) == NULL)
 |