Fix Unpacker max_buffer_length handling (#506)

This commit is contained in:
Inada Naoki 2022-05-24 19:46:51 +09:00 committed by GitHub
parent b75e3412fb
commit 500a238028
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 24 additions and 17 deletions

View file

@ -440,34 +440,30 @@ cdef class Unpacker(object):
self.buf_size = buf_size self.buf_size = buf_size
self.buf_tail = tail + _buf_len self.buf_tail = tail + _buf_len
cdef read_from_file(self): cdef int read_from_file(self) except -1:
next_bytes = self.file_like_read( cdef Py_ssize_t remains = self.max_buffer_size - (self.buf_tail - self.buf_head)
min(self.read_size, if remains <= 0:
self.max_buffer_size - (self.buf_tail - self.buf_head) raise BufferFull
))
next_bytes = self.file_like_read(min(self.read_size, remains))
if next_bytes: if next_bytes:
self.append_buffer(PyBytes_AsString(next_bytes), PyBytes_Size(next_bytes)) self.append_buffer(PyBytes_AsString(next_bytes), PyBytes_Size(next_bytes))
else: else:
self.file_like = None self.file_like = None
return 0
cdef object _unpack(self, execute_fn execute, bint iter=0): cdef object _unpack(self, execute_fn execute, bint iter=0):
cdef int ret cdef int ret
cdef object obj cdef object obj
cdef Py_ssize_t prev_head cdef Py_ssize_t prev_head
if self.buf_head >= self.buf_tail and self.file_like is not None:
self.read_from_file()
while 1: while 1:
prev_head = self.buf_head prev_head = self.buf_head
if prev_head >= self.buf_tail: if prev_head < self.buf_tail:
if iter:
raise StopIteration("No more data to unpack.")
else:
raise OutOfData("No more data to unpack.")
ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head) ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head)
self.stream_offset += self.buf_head - prev_head self.stream_offset += self.buf_head - prev_head
else:
ret = 0
if ret == 1: if ret == 1:
obj = unpack_data(&self.ctx) obj = unpack_data(&self.ctx)

View file

@ -423,6 +423,8 @@ class Unpacker(object):
# Read from file # Read from file
remain_bytes = -remain_bytes remain_bytes = -remain_bytes
if remain_bytes + len(self._buffer) > self._max_buffer_size:
raise BufferFull
while remain_bytes > 0: while remain_bytes > 0:
to_read_bytes = max(self._read_size, remain_bytes) to_read_bytes = max(self._read_size, remain_bytes)
read_data = self.file_like.read(to_read_bytes) read_data = self.file_like.read(to_read_bytes)

View file

@ -2,7 +2,7 @@
# coding: utf-8 # coding: utf-8
import io import io
from msgpack import Unpacker, BufferFull from msgpack import Unpacker, BufferFull
from msgpack import pack from msgpack import pack, packb
from msgpack.exceptions import OutOfData from msgpack.exceptions import OutOfData
from pytest import raises from pytest import raises
@ -78,6 +78,15 @@ def test_maxbuffersize():
assert ord("b") == next(unpacker) assert ord("b") == next(unpacker)
def test_maxbuffersize_file():
buff = io.BytesIO(packb(b"a" * 10) + packb([b"a" * 20] * 2))
unpacker = Unpacker(buff, read_size=1, max_buffer_size=19, max_bin_len=20)
assert unpacker.unpack() == b"a" * 10
# assert unpacker.unpack() == [b"a" * 20]*2
with raises(BufferFull):
print(unpacker.unpack())
def test_readbytes(): def test_readbytes():
unpacker = Unpacker(read_size=3) unpacker = Unpacker(read_size=3)
unpacker.feed(b"foobar") unpacker.feed(b"foobar")