mirror of
https://github.com/python/cpython.git
synced 2026-01-30 19:22:20 +00:00
gh-144054: shutdown fix for deferred ref counting (GH-144055)
When shutting down, disable deferred refcounting for all GC objects. It is important to do this also for untracked objects, which before this change were getting missed. Small code cleanup: We can remove the shutdown case disable_deferred_refcounting() call inside scan_heap_visitor() if we are careful about it. The key is that frame_disable_deferred_refcounting() might fail if the object is untracked.
This commit is contained in:
parent
31c81ab0a2
commit
795d5c5b44
1 changed files with 34 additions and 18 deletions
|
|
@ -308,17 +308,18 @@ disable_deferred_refcounting(PyObject *op)
|
|||
// should also be disabled when we turn off deferred refcounting.
|
||||
_PyObject_DisablePerThreadRefcounting(op);
|
||||
}
|
||||
|
||||
// Generators and frame objects may contain deferred references to other
|
||||
// objects. If the pointed-to objects are part of cyclic trash, we may
|
||||
// have disabled deferred refcounting on them and need to ensure that we
|
||||
// use strong references, in case the generator or frame object is
|
||||
// resurrected by a finalizer.
|
||||
if (PyGen_CheckExact(op) || PyCoro_CheckExact(op) || PyAsyncGen_CheckExact(op)) {
|
||||
frame_disable_deferred_refcounting(&((PyGenObject *)op)->gi_iframe);
|
||||
}
|
||||
else if (PyFrame_Check(op)) {
|
||||
frame_disable_deferred_refcounting(((PyFrameObject *)op)->f_frame);
|
||||
if (_PyObject_GC_IS_TRACKED(op)) {
|
||||
// Generators and frame objects may contain deferred references to other
|
||||
// objects. If the pointed-to objects are part of cyclic trash, we may
|
||||
// have disabled deferred refcounting on them and need to ensure that we
|
||||
// use strong references, in case the generator or frame object is
|
||||
// resurrected by a finalizer.
|
||||
if (PyGen_CheckExact(op) || PyCoro_CheckExact(op) || PyAsyncGen_CheckExact(op)) {
|
||||
frame_disable_deferred_refcounting(&((PyGenObject *)op)->gi_iframe);
|
||||
}
|
||||
else if (PyFrame_Check(op)) {
|
||||
frame_disable_deferred_refcounting(((PyFrameObject *)op)->f_frame);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1240,19 +1241,30 @@ scan_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area,
|
|||
return true;
|
||||
}
|
||||
|
||||
if (state->reason == _Py_GC_REASON_SHUTDOWN) {
|
||||
// Disable deferred refcounting for reachable objects as well during
|
||||
// interpreter shutdown. This ensures that these objects are collected
|
||||
// immediately when their last reference is removed.
|
||||
disable_deferred_refcounting(op);
|
||||
}
|
||||
|
||||
// object is reachable, restore `ob_tid`; we're done with these objects
|
||||
gc_restore_tid(op);
|
||||
gc_clear_alive(op);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Disable deferred refcounting for reachable objects during interpreter
|
||||
// shutdown. This ensures that these objects are collected immediately when
|
||||
// their last reference is removed. This needs to consider both tracked and
|
||||
// untracked GC objects, since either might have deferred refcounts enabled.
|
||||
static bool
|
||||
scan_heap_disable_deferred(const mi_heap_t *heap, const mi_heap_area_t *area,
|
||||
void *block, size_t block_size, void *args)
|
||||
{
|
||||
PyObject *op = op_from_block_all_gc(block, args);
|
||||
if (op == NULL) {
|
||||
return true;
|
||||
}
|
||||
if (!_Py_IsImmortal(op)) {
|
||||
disable_deferred_refcounting(op);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static int
|
||||
move_legacy_finalizer_reachable(struct collection_state *state);
|
||||
|
||||
|
|
@ -1487,6 +1499,10 @@ deduce_unreachable_heap(PyInterpreterState *interp,
|
|||
// Restores ob_tid for reachable objects.
|
||||
gc_visit_heaps(interp, &scan_heap_visitor, &state->base);
|
||||
|
||||
if (state->reason == _Py_GC_REASON_SHUTDOWN) {
|
||||
gc_visit_heaps(interp, &scan_heap_disable_deferred, &state->base);
|
||||
}
|
||||
|
||||
if (state->legacy_finalizers.head) {
|
||||
// There may be objects reachable from legacy finalizers that are in
|
||||
// the unreachable set. We need to mark them as reachable.
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue