mirror of
https://github.com/python/cpython.git
synced 2025-10-25 18:54:53 +00:00
[3.13] gh-134163: Fix an infinite loop when the process runs out of memory in a try block (GH-138491)
Signed-off-by: yihong0618 <zouzou0208@gmail.com> Co-authored-by: Peter Bierma <zintensitydev@gmail.com>
This commit is contained in:
parent
443d4af2ee
commit
afec6a5460
3 changed files with 38 additions and 1 deletions
|
|
@ -1843,6 +1843,38 @@ def test_memory_error_in_subinterp(self):
|
|||
rc, _, err = script_helper.assert_python_ok("-c", code)
|
||||
self.assertIn(b'MemoryError', err)
|
||||
|
||||
@cpython_only
|
||||
# Python built with Py_TRACE_REFS fail with a fatal error in
|
||||
# _PyRefchain_Trace() on memory allocation error.
|
||||
@unittest.skipIf(support.Py_TRACE_REFS, 'cannot test Py_TRACE_REFS build')
|
||||
def test_exec_set_nomemory_hang(self):
|
||||
import_module("_testcapi")
|
||||
# gh-134163: A MemoryError inside code that was wrapped by a try/except
|
||||
# block would lead to an infinite loop.
|
||||
|
||||
# The frame_lasti needs to be greater than 257 to prevent
|
||||
# PyLong_FromLong() from returning cached integers, which
|
||||
# don't require a memory allocation. Prepend some dummy code
|
||||
# to artificially increase the instruction index.
|
||||
warmup_code = "a = list(range(0, 1))\n" * 20
|
||||
user_input = warmup_code + dedent("""
|
||||
try:
|
||||
import _testcapi
|
||||
_testcapi.set_nomemory(0)
|
||||
b = list(range(1000, 2000))
|
||||
except Exception as e:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
""")
|
||||
with SuppressCrashReport():
|
||||
with script_helper.spawn_python('-c', user_input) as p:
|
||||
p.wait()
|
||||
output = p.stdout.read()
|
||||
|
||||
self.assertIn(p.returncode, (0, 1))
|
||||
self.assertGreater(len(output), 0) # At minimum, should not hang
|
||||
self.assertIn(b"MemoryError", output)
|
||||
|
||||
|
||||
class NameErrorTests(unittest.TestCase):
|
||||
def test_name_error_has_name(self):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue