gh-129005: Remove copies from _pyio using take_bytes (#141539)

Memory usage now matches that of _io for large files.
This commit is contained in:
Cody Maloney 2025-11-18 01:10:32 -08:00 committed by GitHub
parent 4867f717e2
commit 58f3fe0d9b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 8 additions and 9 deletions

View file

@ -1277,7 +1277,8 @@ def test_flush_and_readinto(self):
def _readinto(bufio, n=-1):
b = bytearray(n if n >= 0 else 9999)
n = bufio.readinto(b)
return bytes(b[:n])
b.resize(n)
return b.take_bytes()
self.check_flush_and_read(_readinto)
def test_flush_and_peek(self):

View file

@ -56,9 +56,7 @@ class TestFileMethods(LargeFileTest):
(i.e. > 2 GiB) files.
"""
# _pyio.FileIO.readall() uses a temporary bytearray then casted to bytes,
# so memuse=2 is needed
@bigmemtest(size=size, memuse=2, dry_run=False)
@bigmemtest(size=size, memuse=1, dry_run=False)
def test_large_read(self, _size):
# bpo-24658: Test that a read greater than 2GB does not fail.
with self.open(TESTFN, "rb") as f:
@ -154,7 +152,7 @@ def test_seekable(self):
f.seek(pos)
self.assertTrue(f.seekable())
@bigmemtest(size=size, memuse=2, dry_run=False)
@bigmemtest(size=size, memuse=1, dry_run=False)
def test_seek_readall(self, _size):
# Seek which doesn't change position should readall successfully.
with self.open(TESTFN, 'rb') as f: