mirror of
https://github.com/python/cpython.git
synced 2026-02-06 09:50:43 +00:00
gh-144289: Remove ENABLE_SPECIALIZATION_FT (gh-144290)
Now that the specializing interpreter works with free threading, replace ENABLE_SPECIALIZATION_FT with ENABLE_SPECIALIZATION and replace requires_specialization_ft with requires_specialization. Also limit the uniquely referenced check to FOR_ITER_RANGE. It's not necessary for FOR_ITER_GEN and would cause test_for_iter_gen to fail.
This commit is contained in:
parent
6b4538192d
commit
6ea3f8cd7f
13 changed files with 167 additions and 189 deletions
|
|
@ -169,11 +169,11 @@ dummy_func(
|
|||
}
|
||||
|
||||
op(_QUICKEN_RESUME, (--)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (tstate->tracing == 0 && this_instr->op.code == RESUME) {
|
||||
FT_ATOMIC_STORE_UINT8_RELAXED(this_instr->op.code, RESUME_CHECK);
|
||||
}
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
tier1 op(_MAYBE_INSTRUMENT, (--)) {
|
||||
|
|
@ -463,7 +463,7 @@ dummy_func(
|
|||
};
|
||||
|
||||
specializing op(_SPECIALIZE_TO_BOOL, (counter/1, value -- value)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_ToBool(value, next_instr);
|
||||
|
|
@ -471,7 +471,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(TO_BOOL);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_TO_BOOL, (value -- res)) {
|
||||
|
|
@ -1131,7 +1131,7 @@ dummy_func(
|
|||
};
|
||||
|
||||
specializing op(_SPECIALIZE_STORE_SUBSCR, (counter/1, container, sub -- container, sub)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_StoreSubscr(container, sub, next_instr);
|
||||
|
|
@ -1139,7 +1139,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(STORE_SUBSCR);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_STORE_SUBSCR, (v, container, sub -- )) {
|
||||
|
|
@ -1351,7 +1351,7 @@ dummy_func(
|
|||
};
|
||||
|
||||
specializing op(_SPECIALIZE_SEND, (counter/1, receiver, unused -- receiver, unused)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_Send(receiver, next_instr);
|
||||
|
|
@ -1359,7 +1359,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(SEND);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_SEND, (receiver, v -- receiver, retval)) {
|
||||
|
|
@ -1620,7 +1620,7 @@ dummy_func(
|
|||
};
|
||||
|
||||
specializing op(_SPECIALIZE_UNPACK_SEQUENCE, (counter/1, seq -- seq)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_UnpackSequence(seq, next_instr, oparg);
|
||||
|
|
@ -1628,7 +1628,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(UNPACK_SEQUENCE);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
(void)seq;
|
||||
(void)counter;
|
||||
}
|
||||
|
|
@ -1705,7 +1705,7 @@ dummy_func(
|
|||
};
|
||||
|
||||
specializing op(_SPECIALIZE_STORE_ATTR, (counter/1, owner -- owner)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
|
||||
next_instr = this_instr;
|
||||
|
|
@ -1714,7 +1714,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(STORE_ATTR);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_STORE_ATTR, (v, owner --)) {
|
||||
|
|
@ -1823,7 +1823,7 @@ dummy_func(
|
|||
};
|
||||
|
||||
specializing op(_SPECIALIZE_LOAD_GLOBAL, (counter/1 -- )) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
|
|
@ -1832,7 +1832,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(LOAD_GLOBAL);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
// res[1] because we need a pointer to res to pass it to _PyEval_LoadGlobalStackRef
|
||||
|
|
@ -2213,7 +2213,7 @@ dummy_func(
|
|||
};
|
||||
|
||||
specializing op(_SPECIALIZE_LOAD_SUPER_ATTR, (counter/1, global_super_st, class_st, unused -- global_super_st, class_st, unused)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
int load_method = oparg & 1;
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
|
|
@ -2222,7 +2222,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(LOAD_SUPER_ATTR);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
tier1 op(_LOAD_SUPER_ATTR, (global_super_st, class_st, self_st -- attr)) {
|
||||
|
|
@ -2345,7 +2345,7 @@ dummy_func(
|
|||
};
|
||||
|
||||
specializing op(_SPECIALIZE_LOAD_ATTR, (counter/1, owner -- owner)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
|
|
@ -2354,7 +2354,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(LOAD_ATTR);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_LOAD_ATTR, (owner -- attr, self_or_null[oparg&1])) {
|
||||
|
|
@ -2746,7 +2746,7 @@ dummy_func(
|
|||
};
|
||||
|
||||
specializing op(_SPECIALIZE_COMPARE_OP, (counter/1, left, right -- left, right)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
|
||||
|
|
@ -2754,7 +2754,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(COMPARE_OP);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_COMPARE_OP, (left, right -- res)) {
|
||||
|
|
@ -2875,7 +2875,7 @@ dummy_func(
|
|||
}
|
||||
|
||||
specializing op(_SPECIALIZE_CONTAINS_OP, (counter/1, left, right -- left, right)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_ContainsOp(right, next_instr);
|
||||
|
|
@ -2883,7 +2883,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(CONTAINS_OP);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
macro(CONTAINS_OP) = _SPECIALIZE_CONTAINS_OP + _CONTAINS_OP + POP_TOP + POP_TOP;
|
||||
|
|
@ -3259,7 +3259,7 @@ dummy_func(
|
|||
};
|
||||
|
||||
specializing op(_SPECIALIZE_FOR_ITER, (counter/1, iter, null_or_index -- iter, null_or_index)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_ForIter(iter, null_or_index, next_instr, oparg);
|
||||
|
|
@ -3267,7 +3267,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(FOR_ITER);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
replaced op(_FOR_ITER, (iter, null_or_index -- iter, null_or_index, next)) {
|
||||
|
|
@ -3742,7 +3742,7 @@ dummy_func(
|
|||
};
|
||||
|
||||
specializing op(_SPECIALIZE_CALL, (counter/1, callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_Call(callable, self_or_null, next_instr, oparg + !PyStackRef_IsNull(self_or_null));
|
||||
|
|
@ -3750,7 +3750,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(CALL);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
op(_MAYBE_EXPAND_METHOD, (callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) {
|
||||
|
|
@ -4758,7 +4758,7 @@ dummy_func(
|
|||
_PUSH_FRAME;
|
||||
|
||||
specializing op(_SPECIALIZE_CALL_KW, (counter/1, callable, self_or_null, unused[oparg], unused -- callable, self_or_null, unused[oparg], unused)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_CallKw(callable, next_instr, oparg + !PyStackRef_IsNull(self_or_null));
|
||||
|
|
@ -4766,7 +4766,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(CALL_KW);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
macro(CALL_KW) =
|
||||
|
|
@ -4921,7 +4921,7 @@ dummy_func(
|
|||
}
|
||||
|
||||
specializing op(_SPECIALIZE_CALL_FUNCTION_EX, (counter/1, func, unused, unused, unused -- func, unused, unused, unused)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_CallFunctionEx(func, next_instr);
|
||||
|
|
@ -4929,7 +4929,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(CALL_FUNCTION_EX);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
macro(CALL_FUNCTION_EX) =
|
||||
|
|
@ -5108,7 +5108,7 @@ dummy_func(
|
|||
}
|
||||
|
||||
specializing op(_SPECIALIZE_BINARY_OP, (counter/1, lhs, rhs -- lhs, rhs)) {
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY);
|
||||
|
|
@ -5116,7 +5116,7 @@ dummy_func(
|
|||
}
|
||||
OPCODE_DEFERRED_INC(BINARY_OP);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
assert(NB_ADD <= oparg);
|
||||
assert(oparg <= NB_OPARG_LAST);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1321,8 +1321,6 @@ _PyTier2Interpreter(
|
|||
|
||||
#undef ENABLE_SPECIALIZATION
|
||||
#define ENABLE_SPECIALIZATION 0
|
||||
#undef ENABLE_SPECIALIZATION_FT
|
||||
#define ENABLE_SPECIALIZATION_FT 0
|
||||
|
||||
uint16_t uopcode;
|
||||
#ifdef Py_STATS
|
||||
|
|
|
|||
|
|
@ -351,7 +351,7 @@ GETITEM(PyObject *v, Py_ssize_t i) {
|
|||
(COUNTER) = pause_backoff_counter((COUNTER)); \
|
||||
} while (0);
|
||||
|
||||
#ifdef ENABLE_SPECIALIZATION_FT
|
||||
#ifdef ENABLE_SPECIALIZATION
|
||||
/* Multiple threads may execute these concurrently if thread-local bytecode is
|
||||
* disabled and they all execute the main copy of the bytecode. Specialization
|
||||
* is disabled in that case so the value is unused, but the RMW cycle should be
|
||||
|
|
|
|||
64
Python/generated_cases.c.h
generated
64
Python/generated_cases.c.h
generated
|
|
@ -41,7 +41,7 @@
|
|||
lhs = stack_pointer[-2];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
|
@ -51,7 +51,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(BINARY_OP);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
assert(NB_ADD <= oparg);
|
||||
assert(oparg <= NB_OPARG_LAST);
|
||||
}
|
||||
|
|
@ -1742,7 +1742,7 @@
|
|||
callable = stack_pointer[-2 - oparg];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
|
@ -1752,7 +1752,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(CALL);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
/* Skip 2 cache entries */
|
||||
// _MAYBE_EXPAND_METHOD
|
||||
|
|
@ -2783,7 +2783,7 @@
|
|||
func = stack_pointer[-4];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
|
@ -2793,7 +2793,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(CALL_FUNCTION_EX);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _MAKE_CALLARGS_A_TUPLE
|
||||
{
|
||||
|
|
@ -3111,7 +3111,7 @@
|
|||
callable = stack_pointer[-3 - oparg];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
|
@ -3121,7 +3121,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(CALL_KW);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
/* Skip 2 cache entries */
|
||||
// _MAYBE_EXPAND_METHOD_KW
|
||||
|
|
@ -4771,7 +4771,7 @@
|
|||
left = stack_pointer[-2];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
|
@ -4781,7 +4781,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(COMPARE_OP);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _COMPARE_OP
|
||||
{
|
||||
|
|
@ -5061,7 +5061,7 @@
|
|||
right = stack_pointer[-1];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
|
@ -5071,7 +5071,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(CONTAINS_OP);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _CONTAINS_OP
|
||||
{
|
||||
|
|
@ -5815,7 +5815,7 @@
|
|||
iter = stack_pointer[-2];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
|
@ -5825,7 +5825,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(FOR_ITER);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _FOR_ITER
|
||||
{
|
||||
|
|
@ -7879,7 +7879,7 @@
|
|||
owner = stack_pointer[-1];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
|
|
@ -7890,7 +7890,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(LOAD_ATTR);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
/* Skip 8 cache entries */
|
||||
// _LOAD_ATTR
|
||||
|
|
@ -9213,7 +9213,7 @@
|
|||
{
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
|
||||
next_instr = this_instr;
|
||||
|
|
@ -9224,7 +9224,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(LOAD_GLOBAL);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
/* Skip 1 cache entry */
|
||||
/* Skip 1 cache entry */
|
||||
|
|
@ -9533,7 +9533,7 @@
|
|||
global_super_st = stack_pointer[-3];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
int load_method = oparg & 1;
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
|
|
@ -9544,7 +9544,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(LOAD_SUPER_ATTR);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _LOAD_SUPER_ATTR
|
||||
{
|
||||
|
|
@ -10379,11 +10379,11 @@
|
|||
}
|
||||
// _QUICKEN_RESUME
|
||||
{
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (tstate->tracing == 0 && this_instr->op.code == RESUME) {
|
||||
FT_ATOMIC_STORE_UINT8_RELAXED(this_instr->op.code, RESUME_CHECK);
|
||||
}
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _CHECK_PERIODIC_IF_NOT_YIELD_FROM
|
||||
{
|
||||
|
|
@ -10526,7 +10526,7 @@
|
|||
receiver = stack_pointer[-2];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
|
@ -10536,7 +10536,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(SEND);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _SEND
|
||||
{
|
||||
|
|
@ -10822,7 +10822,7 @@
|
|||
owner = stack_pointer[-1];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
|
||||
next_instr = this_instr;
|
||||
|
|
@ -10833,7 +10833,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(STORE_ATTR);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
/* Skip 3 cache entries */
|
||||
// _STORE_ATTR
|
||||
|
|
@ -11362,7 +11362,7 @@
|
|||
container = stack_pointer[-2];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
|
@ -11372,7 +11372,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(STORE_SUBSCR);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
// _STORE_SUBSCR
|
||||
{
|
||||
|
|
@ -11601,7 +11601,7 @@
|
|||
value = stack_pointer[-1];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
|
@ -11611,7 +11611,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(TO_BOOL);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
/* Skip 2 cache entries */
|
||||
// _TO_BOOL
|
||||
|
|
@ -12056,7 +12056,7 @@
|
|||
seq = stack_pointer[-1];
|
||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||
(void)counter;
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
|
||||
next_instr = this_instr;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
|
|
@ -12066,7 +12066,7 @@
|
|||
}
|
||||
OPCODE_DEFERRED_INC(UNPACK_SEQUENCE);
|
||||
ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
(void)seq;
|
||||
(void)counter;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ do { \
|
|||
void
|
||||
_PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, int enable_counters)
|
||||
{
|
||||
#if ENABLE_SPECIALIZATION_FT
|
||||
#if ENABLE_SPECIALIZATION
|
||||
_Py_BackoffCounter jump_counter, adaptive_counter;
|
||||
if (enable_counters) {
|
||||
PyThreadState *tstate = _PyThreadState_GET();
|
||||
|
|
@ -85,7 +85,7 @@ _PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, int enable_counters
|
|||
oparg = 0;
|
||||
}
|
||||
}
|
||||
#endif /* ENABLE_SPECIALIZATION_FT */
|
||||
#endif /* ENABLE_SPECIALIZATION */
|
||||
}
|
||||
|
||||
#define SIMPLE_FUNCTION 0
|
||||
|
|
@ -431,7 +431,7 @@ _Py_Specialize_LoadSuperAttr(_PyStackRef global_super_st, _PyStackRef cls_st, _P
|
|||
PyObject *global_super = PyStackRef_AsPyObjectBorrow(global_super_st);
|
||||
PyObject *cls = PyStackRef_AsPyObjectBorrow(cls_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[LOAD_SUPER_ATTR] == INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR);
|
||||
if (global_super != (PyObject *)&PySuper_Type) {
|
||||
SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_SHADOWED);
|
||||
|
|
@ -952,7 +952,7 @@ _Py_Specialize_LoadAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *nam
|
|||
{
|
||||
PyObject *owner = PyStackRef_AsPyObjectBorrow(owner_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[LOAD_ATTR] == INLINE_CACHE_ENTRIES_LOAD_ATTR);
|
||||
PyTypeObject *type = Py_TYPE(owner);
|
||||
bool fail;
|
||||
|
|
@ -983,7 +983,7 @@ _Py_Specialize_StoreAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *na
|
|||
{
|
||||
PyObject *owner = PyStackRef_AsPyObjectBorrow(owner_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[STORE_ATTR] == INLINE_CACHE_ENTRIES_STORE_ATTR);
|
||||
PyObject *descr = NULL;
|
||||
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
|
||||
|
|
@ -1293,7 +1293,7 @@ specialize_load_global_lock_held(
|
|||
PyObject *globals, PyObject *builtins,
|
||||
_Py_CODEUNIT *instr, PyObject *name)
|
||||
{
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[LOAD_GLOBAL] == INLINE_CACHE_ENTRIES_LOAD_GLOBAL);
|
||||
/* Use inline cache */
|
||||
_PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)(instr + 1);
|
||||
|
|
@ -1514,7 +1514,7 @@ _Py_Specialize_StoreSubscr(_PyStackRef container_st, _PyStackRef sub_st, _Py_COD
|
|||
PyObject *container = PyStackRef_AsPyObjectBorrow(container_st);
|
||||
PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
PyTypeObject *container_type = Py_TYPE(container);
|
||||
if (container_type == &PyList_Type) {
|
||||
if (PyLong_CheckExact(sub)) {
|
||||
|
|
@ -1802,7 +1802,7 @@ _Py_Specialize_Call(_PyStackRef callable_st, _PyStackRef self_or_null_st, _Py_CO
|
|||
{
|
||||
PyObject *callable = PyStackRef_AsPyObjectBorrow(callable_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[CALL] == INLINE_CACHE_ENTRIES_CALL);
|
||||
assert(_Py_OPCODE(*instr) != INSTRUMENTED_CALL);
|
||||
int fail;
|
||||
|
|
@ -1844,7 +1844,7 @@ _Py_Specialize_CallKw(_PyStackRef callable_st, _Py_CODEUNIT *instr, int nargs)
|
|||
{
|
||||
PyObject *callable = PyStackRef_AsPyObjectBorrow(callable_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[CALL_KW] == INLINE_CACHE_ENTRIES_CALL_KW);
|
||||
assert(_Py_OPCODE(*instr) != INSTRUMENTED_CALL_KW);
|
||||
int fail;
|
||||
|
|
@ -2200,7 +2200,7 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in
|
|||
{
|
||||
PyObject *lhs = PyStackRef_AsPyObjectBorrow(lhs_st);
|
||||
PyObject *rhs = PyStackRef_AsPyObjectBorrow(rhs_st);
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[BINARY_OP] == INLINE_CACHE_ENTRIES_BINARY_OP);
|
||||
|
||||
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(instr + 1);
|
||||
|
|
@ -2369,7 +2369,7 @@ _Py_Specialize_CompareOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *i
|
|||
PyObject *rhs = PyStackRef_AsPyObjectBorrow(rhs_st);
|
||||
uint8_t specialized_op;
|
||||
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[COMPARE_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP);
|
||||
// All of these specializations compute boolean values, so they're all valid
|
||||
// regardless of the fifth-lowest oparg bit.
|
||||
|
|
@ -2429,7 +2429,7 @@ _Py_Specialize_UnpackSequence(_PyStackRef seq_st, _Py_CODEUNIT *instr, int oparg
|
|||
{
|
||||
PyObject *seq = PyStackRef_AsPyObjectBorrow(seq_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[UNPACK_SEQUENCE] ==
|
||||
INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE);
|
||||
if (PyTuple_CheckExact(seq)) {
|
||||
|
|
@ -2534,29 +2534,27 @@ int
|
|||
Py_NO_INLINE void
|
||||
_Py_Specialize_ForIter(_PyStackRef iter, _PyStackRef null_or_index, _Py_CODEUNIT *instr, int oparg)
|
||||
{
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[FOR_ITER] == INLINE_CACHE_ENTRIES_FOR_ITER);
|
||||
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
|
||||
PyTypeObject *tp = Py_TYPE(iter_o);
|
||||
|
||||
if (PyStackRef_IsNull(null_or_index)) {
|
||||
#ifdef Py_GIL_DISABLED
|
||||
// Only specialize for uniquely referenced iterators, so that we know
|
||||
// they're only referenced by this one thread. This is more limiting
|
||||
// than we need (even `it = iter(mylist); for item in it:` won't get
|
||||
// specialized) but we don't have a way to check whether we're the only
|
||||
// _thread_ who has access to the object.
|
||||
if (!_PyObject_IsUniquelyReferenced(iter_o)) {
|
||||
goto failure;
|
||||
}
|
||||
#endif
|
||||
if (tp == &PyRangeIter_Type) {
|
||||
#ifdef Py_GIL_DISABLED
|
||||
// Only specialize for uniquely referenced iterators, so that we know
|
||||
// they're only referenced by this one thread. This is more limiting
|
||||
// than we need (even `it = iter(mylist); for item in it:` won't get
|
||||
// specialized) but we don't have a way to check whether we're the only
|
||||
// _thread_ who has access to the object.
|
||||
if (!_PyObject_IsUniquelyReferenced(iter_o)) {
|
||||
goto failure;
|
||||
}
|
||||
#endif
|
||||
specialize(instr, FOR_ITER_RANGE);
|
||||
return;
|
||||
}
|
||||
else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
|
||||
// Generators are very much not thread-safe, so don't worry about
|
||||
// the specialization not being thread-safe.
|
||||
assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR ||
|
||||
instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR
|
||||
);
|
||||
|
|
@ -2595,7 +2593,7 @@ _Py_Specialize_Send(_PyStackRef receiver_st, _Py_CODEUNIT *instr)
|
|||
{
|
||||
PyObject *receiver = PyStackRef_AsPyObjectBorrow(receiver_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[SEND] == INLINE_CACHE_ENTRIES_SEND);
|
||||
PyTypeObject *tp = Py_TYPE(receiver);
|
||||
if (tp == &PyGen_Type || tp == &PyCoro_Type) {
|
||||
|
|
@ -2618,7 +2616,7 @@ _Py_Specialize_CallFunctionEx(_PyStackRef func_st, _Py_CODEUNIT *instr)
|
|||
{
|
||||
PyObject *func = PyStackRef_AsPyObjectBorrow(func_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[CALL_FUNCTION_EX] == INLINE_CACHE_ENTRIES_CALL_FUNCTION_EX);
|
||||
|
||||
if (Py_TYPE(func) == &PyFunction_Type &&
|
||||
|
|
@ -2685,7 +2683,7 @@ check_type_always_true(PyTypeObject *ty)
|
|||
Py_NO_INLINE void
|
||||
_Py_Specialize_ToBool(_PyStackRef value_o, _Py_CODEUNIT *instr)
|
||||
{
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[TO_BOOL] == INLINE_CACHE_ENTRIES_TO_BOOL);
|
||||
_PyToBoolCache *cache = (_PyToBoolCache *)(instr + 1);
|
||||
PyObject *value = PyStackRef_AsPyObjectBorrow(value_o);
|
||||
|
|
@ -2761,7 +2759,7 @@ _Py_Specialize_ContainsOp(_PyStackRef value_st, _Py_CODEUNIT *instr)
|
|||
{
|
||||
PyObject *value = PyStackRef_AsPyObjectBorrow(value_st);
|
||||
|
||||
assert(ENABLE_SPECIALIZATION_FT);
|
||||
assert(ENABLE_SPECIALIZATION);
|
||||
assert(_PyOpcode_Caches[CONTAINS_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP);
|
||||
if (PyDict_CheckExact(value)) {
|
||||
specialize(instr, CONTAINS_OP_DICT);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue