mirror of
https://github.com/msgpack/msgpack-python.git
synced 2025-10-20 12:23:16 +00:00
fallback: simplify write_bytes callback implementation
This commit is contained in:
parent
3322a76989
commit
e9c42fa523
1 changed files with 64 additions and 60 deletions
|
@ -264,7 +264,7 @@ class Unpacker(object):
|
|||
def read_bytes(self, n):
|
||||
return self._read(n)
|
||||
|
||||
def _read(self, n, write_bytes=None):
|
||||
def _read(self, n):
|
||||
# (int, Optional[Callable]) -> bytearray
|
||||
remain_bytes = len(self._buffer) - self._buff_i - n
|
||||
|
||||
|
@ -272,8 +272,6 @@ class Unpacker(object):
|
|||
if remain_bytes >= 0:
|
||||
ret = self._buffer[self._buff_i:self._buff_i+n]
|
||||
self._buff_i += n
|
||||
if write_bytes is not None:
|
||||
write_bytes(ret)
|
||||
return ret
|
||||
|
||||
if self._feeding:
|
||||
|
@ -310,15 +308,13 @@ class Unpacker(object):
|
|||
ret = self._buffer[self._buff_i:self._buff_i+n]
|
||||
self._buff_i += n
|
||||
|
||||
if write_bytes is not None:
|
||||
write_bytes(ret)
|
||||
return ret
|
||||
|
||||
def _read_header(self, execute=EX_CONSTRUCT, write_bytes=None):
|
||||
def _read_header(self, execute=EX_CONSTRUCT):
|
||||
typ = TYPE_IMMEDIATE
|
||||
n = 0
|
||||
obj = None
|
||||
c = self._read(1, write_bytes)
|
||||
c = self._read(1)
|
||||
b = ord(c)
|
||||
if b & 0b10000000 == 0:
|
||||
obj = b
|
||||
|
@ -326,7 +322,7 @@ class Unpacker(object):
|
|||
obj = struct.unpack("b", c)[0]
|
||||
elif b & 0b11100000 == 0b10100000:
|
||||
n = b & 0b00011111
|
||||
obj = self._read(n, write_bytes)
|
||||
obj = self._read(n)
|
||||
typ = TYPE_RAW
|
||||
if n > self._max_str_len:
|
||||
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
||||
|
@ -348,120 +344,120 @@ class Unpacker(object):
|
|||
obj = True
|
||||
elif b == 0xc4:
|
||||
typ = TYPE_BIN
|
||||
n = struct.unpack("B", self._read(1, write_bytes))[0]
|
||||
n = struct.unpack("B", self._read(1))[0]
|
||||
if n > self._max_bin_len:
|
||||
raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
|
||||
obj = self._read(n, write_bytes)
|
||||
obj = self._read(n)
|
||||
elif b == 0xc5:
|
||||
typ = TYPE_BIN
|
||||
n = struct.unpack(">H", self._read(2, write_bytes))[0]
|
||||
n = struct.unpack(">H", self._read(2))[0]
|
||||
if n > self._max_bin_len:
|
||||
raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
|
||||
obj = self._read(n, write_bytes)
|
||||
obj = self._read(n)
|
||||
elif b == 0xc6:
|
||||
typ = TYPE_BIN
|
||||
n = struct.unpack(">I", self._read(4, write_bytes))[0]
|
||||
n = struct.unpack(">I", self._read(4))[0]
|
||||
if n > self._max_bin_len:
|
||||
raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
|
||||
obj = self._read(n, write_bytes)
|
||||
obj = self._read(n)
|
||||
elif b == 0xc7: # ext 8
|
||||
typ = TYPE_EXT
|
||||
L, n = struct.unpack('Bb', self._read(2, write_bytes))
|
||||
L, n = struct.unpack('Bb', self._read(2))
|
||||
if L > self._max_ext_len:
|
||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
|
||||
obj = self._read(L, write_bytes)
|
||||
obj = self._read(L)
|
||||
elif b == 0xc8: # ext 16
|
||||
typ = TYPE_EXT
|
||||
L, n = struct.unpack('>Hb', self._read(3, write_bytes))
|
||||
L, n = struct.unpack('>Hb', self._read(3))
|
||||
if L > self._max_ext_len:
|
||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
|
||||
obj = self._read(L, write_bytes)
|
||||
obj = self._read(L)
|
||||
elif b == 0xc9: # ext 32
|
||||
typ = TYPE_EXT
|
||||
L, n = struct.unpack('>Ib', self._read(5, write_bytes))
|
||||
L, n = struct.unpack('>Ib', self._read(5))
|
||||
if L > self._max_ext_len:
|
||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
|
||||
obj = self._read(L, write_bytes)
|
||||
obj = self._read(L)
|
||||
elif b == 0xca:
|
||||
obj = struct.unpack(">f", self._read(4, write_bytes))[0]
|
||||
obj = struct.unpack(">f", self._read(4))[0]
|
||||
elif b == 0xcb:
|
||||
obj = struct.unpack(">d", self._read(8, write_bytes))[0]
|
||||
obj = struct.unpack(">d", self._read(8))[0]
|
||||
elif b == 0xcc:
|
||||
obj = struct.unpack("B", self._read(1, write_bytes))[0]
|
||||
obj = struct.unpack("B", self._read(1))[0]
|
||||
elif b == 0xcd:
|
||||
obj = struct.unpack(">H", self._read(2, write_bytes))[0]
|
||||
obj = struct.unpack(">H", self._read(2))[0]
|
||||
elif b == 0xce:
|
||||
obj = struct.unpack(">I", self._read(4, write_bytes))[0]
|
||||
obj = struct.unpack(">I", self._read(4))[0]
|
||||
elif b == 0xcf:
|
||||
obj = struct.unpack(">Q", self._read(8, write_bytes))[0]
|
||||
obj = struct.unpack(">Q", self._read(8))[0]
|
||||
elif b == 0xd0:
|
||||
obj = struct.unpack("b", self._read(1, write_bytes))[0]
|
||||
obj = struct.unpack("b", self._read(1))[0]
|
||||
elif b == 0xd1:
|
||||
obj = struct.unpack(">h", self._read(2, write_bytes))[0]
|
||||
obj = struct.unpack(">h", self._read(2))[0]
|
||||
elif b == 0xd2:
|
||||
obj = struct.unpack(">i", self._read(4, write_bytes))[0]
|
||||
obj = struct.unpack(">i", self._read(4))[0]
|
||||
elif b == 0xd3:
|
||||
obj = struct.unpack(">q", self._read(8, write_bytes))[0]
|
||||
obj = struct.unpack(">q", self._read(8))[0]
|
||||
elif b == 0xd4: # fixext 1
|
||||
typ = TYPE_EXT
|
||||
if self._max_ext_len < 1:
|
||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len))
|
||||
n, obj = struct.unpack('b1s', self._read(2, write_bytes))
|
||||
n, obj = struct.unpack('b1s', self._read(2))
|
||||
elif b == 0xd5: # fixext 2
|
||||
typ = TYPE_EXT
|
||||
if self._max_ext_len < 2:
|
||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len))
|
||||
n, obj = struct.unpack('b2s', self._read(3, write_bytes))
|
||||
n, obj = struct.unpack('b2s', self._read(3))
|
||||
elif b == 0xd6: # fixext 4
|
||||
typ = TYPE_EXT
|
||||
if self._max_ext_len < 4:
|
||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len))
|
||||
n, obj = struct.unpack('b4s', self._read(5, write_bytes))
|
||||
n, obj = struct.unpack('b4s', self._read(5))
|
||||
elif b == 0xd7: # fixext 8
|
||||
typ = TYPE_EXT
|
||||
if self._max_ext_len < 8:
|
||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len))
|
||||
n, obj = struct.unpack('b8s', self._read(9, write_bytes))
|
||||
n, obj = struct.unpack('b8s', self._read(9))
|
||||
elif b == 0xd8: # fixext 16
|
||||
typ = TYPE_EXT
|
||||
if self._max_ext_len < 16:
|
||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len))
|
||||
n, obj = struct.unpack('b16s', self._read(17, write_bytes))
|
||||
n, obj = struct.unpack('b16s', self._read(17))
|
||||
elif b == 0xd9:
|
||||
typ = TYPE_RAW
|
||||
n = struct.unpack("B", self._read(1, write_bytes))[0]
|
||||
n = struct.unpack("B", self._read(1))[0]
|
||||
if n > self._max_str_len:
|
||||
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
||||
obj = self._read(n, write_bytes)
|
||||
obj = self._read(n)
|
||||
elif b == 0xda:
|
||||
typ = TYPE_RAW
|
||||
n = struct.unpack(">H", self._read(2, write_bytes))[0]
|
||||
n = struct.unpack(">H", self._read(2))[0]
|
||||
if n > self._max_str_len:
|
||||
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
||||
obj = self._read(n, write_bytes)
|
||||
obj = self._read(n)
|
||||
elif b == 0xdb:
|
||||
typ = TYPE_RAW
|
||||
n = struct.unpack(">I", self._read(4, write_bytes))[0]
|
||||
n = struct.unpack(">I", self._read(4))[0]
|
||||
if n > self._max_str_len:
|
||||
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
||||
obj = self._read(n, write_bytes)
|
||||
obj = self._read(n)
|
||||
elif b == 0xdc:
|
||||
n = struct.unpack(">H", self._read(2, write_bytes))[0]
|
||||
n = struct.unpack(">H", self._read(2))[0]
|
||||
if n > self._max_array_len:
|
||||
raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
|
||||
typ = TYPE_ARRAY
|
||||
elif b == 0xdd:
|
||||
n = struct.unpack(">I", self._read(4, write_bytes))[0]
|
||||
n = struct.unpack(">I", self._read(4))[0]
|
||||
if n > self._max_array_len:
|
||||
raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
|
||||
typ = TYPE_ARRAY
|
||||
elif b == 0xde:
|
||||
n = struct.unpack(">H", self._read(2, write_bytes))[0]
|
||||
n = struct.unpack(">H", self._read(2))[0]
|
||||
if n > self._max_map_len:
|
||||
raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
|
||||
typ = TYPE_MAP
|
||||
elif b == 0xdf:
|
||||
n = struct.unpack(">I", self._read(4, write_bytes))[0]
|
||||
n = struct.unpack(">I", self._read(4))[0]
|
||||
if n > self._max_map_len:
|
||||
raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
|
||||
typ = TYPE_MAP
|
||||
|
@ -469,8 +465,8 @@ class Unpacker(object):
|
|||
raise UnpackValueError("Unknown header: 0x%x" % b)
|
||||
return typ, n, obj
|
||||
|
||||
def _unpack(self, execute=EX_CONSTRUCT, write_bytes=None):
|
||||
typ, n, obj = self._read_header(execute, write_bytes)
|
||||
def _unpack(self, execute=EX_CONSTRUCT):
|
||||
typ, n, obj = self._read_header(execute)
|
||||
|
||||
if execute == EX_READ_ARRAY_HEADER:
|
||||
if typ != TYPE_ARRAY:
|
||||
|
@ -485,11 +481,11 @@ class Unpacker(object):
|
|||
if execute == EX_SKIP:
|
||||
for i in xrange(n):
|
||||
# TODO check whether we need to call `list_hook`
|
||||
self._unpack(EX_SKIP, write_bytes)
|
||||
self._unpack(EX_SKIP)
|
||||
return
|
||||
ret = newlist_hint(n)
|
||||
for i in xrange(n):
|
||||
ret.append(self._unpack(EX_CONSTRUCT, write_bytes))
|
||||
ret.append(self._unpack(EX_CONSTRUCT))
|
||||
if self._list_hook is not None:
|
||||
ret = self._list_hook(ret)
|
||||
# TODO is the interaction between `list_hook` and `use_list` ok?
|
||||
|
@ -498,19 +494,19 @@ class Unpacker(object):
|
|||
if execute == EX_SKIP:
|
||||
for i in xrange(n):
|
||||
# TODO check whether we need to call hooks
|
||||
self._unpack(EX_SKIP, write_bytes)
|
||||
self._unpack(EX_SKIP, write_bytes)
|
||||
self._unpack(EX_SKIP)
|
||||
self._unpack(EX_SKIP)
|
||||
return
|
||||
if self._object_pairs_hook is not None:
|
||||
ret = self._object_pairs_hook(
|
||||
(self._unpack(EX_CONSTRUCT, write_bytes),
|
||||
self._unpack(EX_CONSTRUCT, write_bytes))
|
||||
(self._unpack(EX_CONSTRUCT),
|
||||
self._unpack(EX_CONSTRUCT))
|
||||
for _ in xrange(n))
|
||||
else:
|
||||
ret = {}
|
||||
for _ in xrange(n):
|
||||
key = self._unpack(EX_CONSTRUCT, write_bytes)
|
||||
ret[key] = self._unpack(EX_CONSTRUCT, write_bytes)
|
||||
key = self._unpack(EX_CONSTRUCT)
|
||||
ret[key] = self._unpack(EX_CONSTRUCT)
|
||||
if self._object_hook is not None:
|
||||
ret = self._object_hook(ret)
|
||||
return ret
|
||||
|
@ -532,7 +528,7 @@ class Unpacker(object):
|
|||
|
||||
def __next__(self):
|
||||
try:
|
||||
ret = self._unpack(EX_CONSTRUCT, None)
|
||||
ret = self._unpack(EX_CONSTRUCT)
|
||||
self._consume()
|
||||
return ret
|
||||
except OutOfData:
|
||||
|
@ -542,21 +538,29 @@ class Unpacker(object):
|
|||
next = __next__
|
||||
|
||||
def skip(self, write_bytes=None):
|
||||
self._unpack(EX_SKIP, write_bytes)
|
||||
self._unpack(EX_SKIP)
|
||||
if write_bytes is not None:
|
||||
write_bytes(self._buffer[self._buf_checkpoint:self._buff_i])
|
||||
self._consume()
|
||||
|
||||
def unpack(self, write_bytes=None):
|
||||
ret = self._unpack(EX_CONSTRUCT, write_bytes)
|
||||
ret = self._unpack(EX_CONSTRUCT)
|
||||
if write_bytes is not None:
|
||||
write_bytes(self._buffer[self._buf_checkpoint:self._buff_i])
|
||||
self._consume()
|
||||
return ret
|
||||
|
||||
def read_array_header(self, write_bytes=None):
|
||||
ret = self._unpack(EX_READ_ARRAY_HEADER, write_bytes)
|
||||
ret = self._unpack(EX_READ_ARRAY_HEADER)
|
||||
if write_bytes is not None:
|
||||
write_bytes(self._buffer[self._buf_checkpoint:self._buff_i])
|
||||
self._consume()
|
||||
return ret
|
||||
|
||||
def read_map_header(self, write_bytes=None):
|
||||
ret = self._unpack(EX_READ_MAP_HEADER, write_bytes)
|
||||
ret = self._unpack(EX_READ_MAP_HEADER)
|
||||
if write_bytes is not None:
|
||||
write_bytes(self._buffer[self._buf_checkpoint:self._buff_i])
|
||||
self._consume()
|
||||
return ret
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue