mirror of
https://github.com/msgpack/msgpack-python.git
synced 2025-10-24 14:23:19 +00:00
Remove deprecated exception classes (#323)
This commit is contained in:
parent
1bf62ba6f8
commit
07f0beeabb
4 changed files with 84 additions and 95 deletions
|
|
@ -5,7 +5,6 @@ from cpython cimport *
|
||||||
from cpython.version cimport PY_MAJOR_VERSION
|
from cpython.version cimport PY_MAJOR_VERSION
|
||||||
from cpython.exc cimport PyErr_WarnEx
|
from cpython.exc cimport PyErr_WarnEx
|
||||||
|
|
||||||
from msgpack.exceptions import PackValueError, PackOverflowError
|
|
||||||
from msgpack import ExtType
|
from msgpack import ExtType
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -165,7 +164,7 @@ cdef class Packer(object):
|
||||||
cdef Py_buffer view
|
cdef Py_buffer view
|
||||||
|
|
||||||
if nest_limit < 0:
|
if nest_limit < 0:
|
||||||
raise PackValueError("recursion limit exceeded.")
|
raise ValueError("recursion limit exceeded.")
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
if o is None:
|
if o is None:
|
||||||
|
|
@ -191,7 +190,7 @@ cdef class Packer(object):
|
||||||
default_used = True
|
default_used = True
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
raise PackOverflowError("Integer value out of range")
|
raise OverflowError("Integer value out of range")
|
||||||
elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o):
|
elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o):
|
||||||
longval = o
|
longval = o
|
||||||
ret = msgpack_pack_long(&self.pk, longval)
|
ret = msgpack_pack_long(&self.pk, longval)
|
||||||
|
|
@ -205,7 +204,7 @@ cdef class Packer(object):
|
||||||
elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o):
|
elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o):
|
||||||
L = len(o)
|
L = len(o)
|
||||||
if L > ITEM_LIMIT:
|
if L > ITEM_LIMIT:
|
||||||
raise PackValueError("%s is too large" % type(o).__name__)
|
raise ValueError("%s is too large" % type(o).__name__)
|
||||||
rawval = o
|
rawval = o
|
||||||
ret = msgpack_pack_bin(&self.pk, L)
|
ret = msgpack_pack_bin(&self.pk, L)
|
||||||
if ret == 0:
|
if ret == 0:
|
||||||
|
|
@ -214,12 +213,12 @@ cdef class Packer(object):
|
||||||
if self.encoding == NULL and self.unicode_errors == NULL:
|
if self.encoding == NULL and self.unicode_errors == NULL:
|
||||||
ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT);
|
ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT);
|
||||||
if ret == -2:
|
if ret == -2:
|
||||||
raise PackValueError("unicode string is too large")
|
raise ValueError("unicode string is too large")
|
||||||
else:
|
else:
|
||||||
o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors)
|
o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors)
|
||||||
L = len(o)
|
L = len(o)
|
||||||
if L > ITEM_LIMIT:
|
if L > ITEM_LIMIT:
|
||||||
raise PackValueError("unicode string is too large")
|
raise ValueError("unicode string is too large")
|
||||||
ret = msgpack_pack_raw(&self.pk, L)
|
ret = msgpack_pack_raw(&self.pk, L)
|
||||||
if ret == 0:
|
if ret == 0:
|
||||||
rawval = o
|
rawval = o
|
||||||
|
|
@ -228,7 +227,7 @@ cdef class Packer(object):
|
||||||
d = <dict>o
|
d = <dict>o
|
||||||
L = len(d)
|
L = len(d)
|
||||||
if L > ITEM_LIMIT:
|
if L > ITEM_LIMIT:
|
||||||
raise PackValueError("dict is too large")
|
raise ValueError("dict is too large")
|
||||||
ret = msgpack_pack_map(&self.pk, L)
|
ret = msgpack_pack_map(&self.pk, L)
|
||||||
if ret == 0:
|
if ret == 0:
|
||||||
for k, v in d.iteritems():
|
for k, v in d.iteritems():
|
||||||
|
|
@ -239,7 +238,7 @@ cdef class Packer(object):
|
||||||
elif not strict_types and PyDict_Check(o):
|
elif not strict_types and PyDict_Check(o):
|
||||||
L = len(o)
|
L = len(o)
|
||||||
if L > ITEM_LIMIT:
|
if L > ITEM_LIMIT:
|
||||||
raise PackValueError("dict is too large")
|
raise ValueError("dict is too large")
|
||||||
ret = msgpack_pack_map(&self.pk, L)
|
ret = msgpack_pack_map(&self.pk, L)
|
||||||
if ret == 0:
|
if ret == 0:
|
||||||
for k, v in o.items():
|
for k, v in o.items():
|
||||||
|
|
@ -253,13 +252,13 @@ cdef class Packer(object):
|
||||||
rawval = o.data
|
rawval = o.data
|
||||||
L = len(o.data)
|
L = len(o.data)
|
||||||
if L > ITEM_LIMIT:
|
if L > ITEM_LIMIT:
|
||||||
raise PackValueError("EXT data is too large")
|
raise ValueError("EXT data is too large")
|
||||||
ret = msgpack_pack_ext(&self.pk, longval, L)
|
ret = msgpack_pack_ext(&self.pk, longval, L)
|
||||||
ret = msgpack_pack_raw_body(&self.pk, rawval, L)
|
ret = msgpack_pack_raw_body(&self.pk, rawval, L)
|
||||||
elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)):
|
elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)):
|
||||||
L = len(o)
|
L = len(o)
|
||||||
if L > ITEM_LIMIT:
|
if L > ITEM_LIMIT:
|
||||||
raise PackValueError("list is too large")
|
raise ValueError("list is too large")
|
||||||
ret = msgpack_pack_array(&self.pk, L)
|
ret = msgpack_pack_array(&self.pk, L)
|
||||||
if ret == 0:
|
if ret == 0:
|
||||||
for v in o:
|
for v in o:
|
||||||
|
|
@ -267,11 +266,11 @@ cdef class Packer(object):
|
||||||
if ret != 0: break
|
if ret != 0: break
|
||||||
elif PyMemoryView_Check(o):
|
elif PyMemoryView_Check(o):
|
||||||
if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0:
|
if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0:
|
||||||
raise PackValueError("could not get buffer for memoryview")
|
raise ValueError("could not get buffer for memoryview")
|
||||||
L = view.len
|
L = view.len
|
||||||
if L > ITEM_LIMIT:
|
if L > ITEM_LIMIT:
|
||||||
PyBuffer_Release(&view);
|
PyBuffer_Release(&view);
|
||||||
raise PackValueError("memoryview is too large")
|
raise ValueError("memoryview is too large")
|
||||||
ret = msgpack_pack_bin(&self.pk, L)
|
ret = msgpack_pack_bin(&self.pk, L)
|
||||||
if ret == 0:
|
if ret == 0:
|
||||||
ret = msgpack_pack_raw_body(&self.pk, <char*>view.buf, L)
|
ret = msgpack_pack_raw_body(&self.pk, <char*>view.buf, L)
|
||||||
|
|
@ -304,7 +303,7 @@ cdef class Packer(object):
|
||||||
|
|
||||||
def pack_array_header(self, long long size):
|
def pack_array_header(self, long long size):
|
||||||
if size > ITEM_LIMIT:
|
if size > ITEM_LIMIT:
|
||||||
raise PackValueError
|
raise ValueError
|
||||||
cdef int ret = msgpack_pack_array(&self.pk, size)
|
cdef int ret = msgpack_pack_array(&self.pk, size)
|
||||||
if ret == -1:
|
if ret == -1:
|
||||||
raise MemoryError
|
raise MemoryError
|
||||||
|
|
@ -317,7 +316,7 @@ cdef class Packer(object):
|
||||||
|
|
||||||
def pack_map_header(self, long long size):
|
def pack_map_header(self, long long size):
|
||||||
if size > ITEM_LIMIT:
|
if size > ITEM_LIMIT:
|
||||||
raise PackValueError
|
raise ValueError
|
||||||
cdef int ret = msgpack_pack_map(&self.pk, size)
|
cdef int ret = msgpack_pack_map(&self.pk, size)
|
||||||
if ret == -1:
|
if ret == -1:
|
||||||
raise MemoryError
|
raise MemoryError
|
||||||
|
|
|
||||||
|
|
@ -35,7 +35,6 @@ ctypedef unsigned long long uint64_t
|
||||||
from msgpack.exceptions import (
|
from msgpack.exceptions import (
|
||||||
BufferFull,
|
BufferFull,
|
||||||
OutOfData,
|
OutOfData,
|
||||||
UnpackValueError,
|
|
||||||
ExtraData,
|
ExtraData,
|
||||||
)
|
)
|
||||||
from msgpack import ExtType
|
from msgpack import ExtType
|
||||||
|
|
@ -208,7 +207,7 @@ def unpackb(object packed, object object_hook=None, object list_hook=None,
|
||||||
raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off))
|
raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off))
|
||||||
return obj
|
return obj
|
||||||
unpack_clear(&ctx)
|
unpack_clear(&ctx)
|
||||||
raise UnpackValueError("Unpack failed: error = %d" % (ret,))
|
raise ValueError("Unpack failed: error = %d" % (ret,))
|
||||||
|
|
||||||
|
|
||||||
def unpack(object stream, **kwargs):
|
def unpack(object stream, **kwargs):
|
||||||
|
|
@ -460,7 +459,6 @@ cdef class Unpacker(object):
|
||||||
else:
|
else:
|
||||||
raise OutOfData("No more data to unpack.")
|
raise OutOfData("No more data to unpack.")
|
||||||
|
|
||||||
try:
|
|
||||||
ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head)
|
ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head)
|
||||||
self.stream_offset += self.buf_head - prev_head
|
self.stream_offset += self.buf_head - prev_head
|
||||||
if write_bytes is not None:
|
if write_bytes is not None:
|
||||||
|
|
@ -479,9 +477,7 @@ cdef class Unpacker(object):
|
||||||
else:
|
else:
|
||||||
raise OutOfData("No more data to unpack.")
|
raise OutOfData("No more data to unpack.")
|
||||||
else:
|
else:
|
||||||
raise UnpackValueError("Unpack failed: error = %d" % (ret,))
|
raise ValueError("Unpack failed: error = %d" % (ret,))
|
||||||
except ValueError as e:
|
|
||||||
raise UnpackValueError(e)
|
|
||||||
|
|
||||||
def read_bytes(self, Py_ssize_t nbytes):
|
def read_bytes(self, Py_ssize_t nbytes):
|
||||||
"""Read a specified number of raw bytes from the stream"""
|
"""Read a specified number of raw bytes from the stream"""
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,10 @@
|
||||||
class UnpackException(Exception):
|
class UnpackException(Exception):
|
||||||
"""Deprecated. Use Exception instead to catch all exception during unpacking."""
|
"""Base class for some exceptions raised while unpacking.
|
||||||
|
|
||||||
|
NOTE: unpack may raise exception other than subclass of
|
||||||
|
UnpackException. If you want to catch all error, catch
|
||||||
|
Exception instead.
|
||||||
|
"""
|
||||||
|
|
||||||
class BufferFull(UnpackException):
|
class BufferFull(UnpackException):
|
||||||
pass
|
pass
|
||||||
|
|
@ -10,11 +14,16 @@ class OutOfData(UnpackException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class UnpackValueError(UnpackException, ValueError):
|
# Deprecated. Use ValueError instead
|
||||||
"""Deprecated. Use ValueError instead."""
|
UnpackValueError = ValueError
|
||||||
|
|
||||||
|
|
||||||
class ExtraData(UnpackValueError):
|
class ExtraData(UnpackValueError):
|
||||||
|
"""ExtraData is raised when there is trailing data.
|
||||||
|
|
||||||
|
This exception is raised while only one-shot (not streaming)
|
||||||
|
unpack.
|
||||||
|
"""
|
||||||
def __init__(self, unpacked, extra):
|
def __init__(self, unpacked, extra):
|
||||||
self.unpacked = unpacked
|
self.unpacked = unpacked
|
||||||
self.extra = extra
|
self.extra = extra
|
||||||
|
|
@ -23,19 +32,7 @@ class ExtraData(UnpackValueError):
|
||||||
return "unpack(b) received extra data."
|
return "unpack(b) received extra data."
|
||||||
|
|
||||||
|
|
||||||
class PackException(Exception):
|
#Deprecated. Use Exception instead to catch all exception during packing.
|
||||||
"""Deprecated. Use Exception instead to catch all exception during packing."""
|
PackException = Exception
|
||||||
|
PackValueError = ValueError
|
||||||
|
PackOverflowError = OverflowError
|
||||||
class PackValueError(PackException, ValueError):
|
|
||||||
"""PackValueError is raised when type of input data is supported but it's value is unsupported.
|
|
||||||
|
|
||||||
Deprecated. Use ValueError instead.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class PackOverflowError(PackValueError, OverflowError):
|
|
||||||
"""PackOverflowError is raised when integer value is out of range of msgpack support [-2**31, 2**32).
|
|
||||||
|
|
||||||
Deprecated. Use ValueError instead.
|
|
||||||
"""
|
|
||||||
|
|
|
||||||
|
|
@ -52,9 +52,6 @@ else:
|
||||||
from msgpack.exceptions import (
|
from msgpack.exceptions import (
|
||||||
BufferFull,
|
BufferFull,
|
||||||
OutOfData,
|
OutOfData,
|
||||||
UnpackValueError,
|
|
||||||
PackValueError,
|
|
||||||
PackOverflowError,
|
|
||||||
ExtraData)
|
ExtraData)
|
||||||
|
|
||||||
from msgpack import ExtType
|
from msgpack import ExtType
|
||||||
|
|
@ -120,7 +117,7 @@ def unpackb(packed, **kwargs):
|
||||||
try:
|
try:
|
||||||
ret = unpacker._unpack()
|
ret = unpacker._unpack()
|
||||||
except OutOfData:
|
except OutOfData:
|
||||||
raise UnpackValueError("Data is not enough.")
|
raise ValueError("Data is not enough.")
|
||||||
if unpacker._got_extradata():
|
if unpacker._got_extradata():
|
||||||
raise ExtraData(ret, unpacker._get_extradata())
|
raise ExtraData(ret, unpacker._get_extradata())
|
||||||
return ret
|
return ret
|
||||||
|
|
@ -370,18 +367,18 @@ class Unpacker(object):
|
||||||
n = b & 0b00011111
|
n = b & 0b00011111
|
||||||
typ = TYPE_RAW
|
typ = TYPE_RAW
|
||||||
if n > self._max_str_len:
|
if n > self._max_str_len:
|
||||||
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
||||||
obj = self._read(n)
|
obj = self._read(n)
|
||||||
elif b & 0b11110000 == 0b10010000:
|
elif b & 0b11110000 == 0b10010000:
|
||||||
n = b & 0b00001111
|
n = b & 0b00001111
|
||||||
typ = TYPE_ARRAY
|
typ = TYPE_ARRAY
|
||||||
if n > self._max_array_len:
|
if n > self._max_array_len:
|
||||||
raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
|
raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
|
||||||
elif b & 0b11110000 == 0b10000000:
|
elif b & 0b11110000 == 0b10000000:
|
||||||
n = b & 0b00001111
|
n = b & 0b00001111
|
||||||
typ = TYPE_MAP
|
typ = TYPE_MAP
|
||||||
if n > self._max_map_len:
|
if n > self._max_map_len:
|
||||||
raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
|
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
|
||||||
elif b == 0xc0:
|
elif b == 0xc0:
|
||||||
obj = None
|
obj = None
|
||||||
elif b == 0xc2:
|
elif b == 0xc2:
|
||||||
|
|
@ -394,7 +391,7 @@ class Unpacker(object):
|
||||||
n = self._buffer[self._buff_i]
|
n = self._buffer[self._buff_i]
|
||||||
self._buff_i += 1
|
self._buff_i += 1
|
||||||
if n > self._max_bin_len:
|
if n > self._max_bin_len:
|
||||||
raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
|
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
|
||||||
obj = self._read(n)
|
obj = self._read(n)
|
||||||
elif b == 0xc5:
|
elif b == 0xc5:
|
||||||
typ = TYPE_BIN
|
typ = TYPE_BIN
|
||||||
|
|
@ -402,7 +399,7 @@ class Unpacker(object):
|
||||||
n = _unpack_from(">H", self._buffer, self._buff_i)[0]
|
n = _unpack_from(">H", self._buffer, self._buff_i)[0]
|
||||||
self._buff_i += 2
|
self._buff_i += 2
|
||||||
if n > self._max_bin_len:
|
if n > self._max_bin_len:
|
||||||
raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
|
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
|
||||||
obj = self._read(n)
|
obj = self._read(n)
|
||||||
elif b == 0xc6:
|
elif b == 0xc6:
|
||||||
typ = TYPE_BIN
|
typ = TYPE_BIN
|
||||||
|
|
@ -410,7 +407,7 @@ class Unpacker(object):
|
||||||
n = _unpack_from(">I", self._buffer, self._buff_i)[0]
|
n = _unpack_from(">I", self._buffer, self._buff_i)[0]
|
||||||
self._buff_i += 4
|
self._buff_i += 4
|
||||||
if n > self._max_bin_len:
|
if n > self._max_bin_len:
|
||||||
raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
|
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
|
||||||
obj = self._read(n)
|
obj = self._read(n)
|
||||||
elif b == 0xc7: # ext 8
|
elif b == 0xc7: # ext 8
|
||||||
typ = TYPE_EXT
|
typ = TYPE_EXT
|
||||||
|
|
@ -418,7 +415,7 @@ class Unpacker(object):
|
||||||
L, n = _unpack_from('Bb', self._buffer, self._buff_i)
|
L, n = _unpack_from('Bb', self._buffer, self._buff_i)
|
||||||
self._buff_i += 2
|
self._buff_i += 2
|
||||||
if L > self._max_ext_len:
|
if L > self._max_ext_len:
|
||||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
|
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
|
||||||
obj = self._read(L)
|
obj = self._read(L)
|
||||||
elif b == 0xc8: # ext 16
|
elif b == 0xc8: # ext 16
|
||||||
typ = TYPE_EXT
|
typ = TYPE_EXT
|
||||||
|
|
@ -426,7 +423,7 @@ class Unpacker(object):
|
||||||
L, n = _unpack_from('>Hb', self._buffer, self._buff_i)
|
L, n = _unpack_from('>Hb', self._buffer, self._buff_i)
|
||||||
self._buff_i += 3
|
self._buff_i += 3
|
||||||
if L > self._max_ext_len:
|
if L > self._max_ext_len:
|
||||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
|
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
|
||||||
obj = self._read(L)
|
obj = self._read(L)
|
||||||
elif b == 0xc9: # ext 32
|
elif b == 0xc9: # ext 32
|
||||||
typ = TYPE_EXT
|
typ = TYPE_EXT
|
||||||
|
|
@ -434,7 +431,7 @@ class Unpacker(object):
|
||||||
L, n = _unpack_from('>Ib', self._buffer, self._buff_i)
|
L, n = _unpack_from('>Ib', self._buffer, self._buff_i)
|
||||||
self._buff_i += 5
|
self._buff_i += 5
|
||||||
if L > self._max_ext_len:
|
if L > self._max_ext_len:
|
||||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
|
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
|
||||||
obj = self._read(L)
|
obj = self._read(L)
|
||||||
elif b == 0xca:
|
elif b == 0xca:
|
||||||
self._reserve(4)
|
self._reserve(4)
|
||||||
|
|
@ -479,35 +476,35 @@ class Unpacker(object):
|
||||||
elif b == 0xd4: # fixext 1
|
elif b == 0xd4: # fixext 1
|
||||||
typ = TYPE_EXT
|
typ = TYPE_EXT
|
||||||
if self._max_ext_len < 1:
|
if self._max_ext_len < 1:
|
||||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len))
|
raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len))
|
||||||
self._reserve(2)
|
self._reserve(2)
|
||||||
n, obj = _unpack_from("b1s", self._buffer, self._buff_i)
|
n, obj = _unpack_from("b1s", self._buffer, self._buff_i)
|
||||||
self._buff_i += 2
|
self._buff_i += 2
|
||||||
elif b == 0xd5: # fixext 2
|
elif b == 0xd5: # fixext 2
|
||||||
typ = TYPE_EXT
|
typ = TYPE_EXT
|
||||||
if self._max_ext_len < 2:
|
if self._max_ext_len < 2:
|
||||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len))
|
raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len))
|
||||||
self._reserve(3)
|
self._reserve(3)
|
||||||
n, obj = _unpack_from("b2s", self._buffer, self._buff_i)
|
n, obj = _unpack_from("b2s", self._buffer, self._buff_i)
|
||||||
self._buff_i += 3
|
self._buff_i += 3
|
||||||
elif b == 0xd6: # fixext 4
|
elif b == 0xd6: # fixext 4
|
||||||
typ = TYPE_EXT
|
typ = TYPE_EXT
|
||||||
if self._max_ext_len < 4:
|
if self._max_ext_len < 4:
|
||||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len))
|
raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len))
|
||||||
self._reserve(5)
|
self._reserve(5)
|
||||||
n, obj = _unpack_from("b4s", self._buffer, self._buff_i)
|
n, obj = _unpack_from("b4s", self._buffer, self._buff_i)
|
||||||
self._buff_i += 5
|
self._buff_i += 5
|
||||||
elif b == 0xd7: # fixext 8
|
elif b == 0xd7: # fixext 8
|
||||||
typ = TYPE_EXT
|
typ = TYPE_EXT
|
||||||
if self._max_ext_len < 8:
|
if self._max_ext_len < 8:
|
||||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len))
|
raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len))
|
||||||
self._reserve(9)
|
self._reserve(9)
|
||||||
n, obj = _unpack_from("b8s", self._buffer, self._buff_i)
|
n, obj = _unpack_from("b8s", self._buffer, self._buff_i)
|
||||||
self._buff_i += 9
|
self._buff_i += 9
|
||||||
elif b == 0xd8: # fixext 16
|
elif b == 0xd8: # fixext 16
|
||||||
typ = TYPE_EXT
|
typ = TYPE_EXT
|
||||||
if self._max_ext_len < 16:
|
if self._max_ext_len < 16:
|
||||||
raise UnpackValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len))
|
raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len))
|
||||||
self._reserve(17)
|
self._reserve(17)
|
||||||
n, obj = _unpack_from("b16s", self._buffer, self._buff_i)
|
n, obj = _unpack_from("b16s", self._buffer, self._buff_i)
|
||||||
self._buff_i += 17
|
self._buff_i += 17
|
||||||
|
|
@ -517,7 +514,7 @@ class Unpacker(object):
|
||||||
n = self._buffer[self._buff_i]
|
n = self._buffer[self._buff_i]
|
||||||
self._buff_i += 1
|
self._buff_i += 1
|
||||||
if n > self._max_str_len:
|
if n > self._max_str_len:
|
||||||
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
||||||
obj = self._read(n)
|
obj = self._read(n)
|
||||||
elif b == 0xda:
|
elif b == 0xda:
|
||||||
typ = TYPE_RAW
|
typ = TYPE_RAW
|
||||||
|
|
@ -525,7 +522,7 @@ class Unpacker(object):
|
||||||
n, = _unpack_from(">H", self._buffer, self._buff_i)
|
n, = _unpack_from(">H", self._buffer, self._buff_i)
|
||||||
self._buff_i += 2
|
self._buff_i += 2
|
||||||
if n > self._max_str_len:
|
if n > self._max_str_len:
|
||||||
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
||||||
obj = self._read(n)
|
obj = self._read(n)
|
||||||
elif b == 0xdb:
|
elif b == 0xdb:
|
||||||
typ = TYPE_RAW
|
typ = TYPE_RAW
|
||||||
|
|
@ -533,7 +530,7 @@ class Unpacker(object):
|
||||||
n, = _unpack_from(">I", self._buffer, self._buff_i)
|
n, = _unpack_from(">I", self._buffer, self._buff_i)
|
||||||
self._buff_i += 4
|
self._buff_i += 4
|
||||||
if n > self._max_str_len:
|
if n > self._max_str_len:
|
||||||
raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
|
||||||
obj = self._read(n)
|
obj = self._read(n)
|
||||||
elif b == 0xdc:
|
elif b == 0xdc:
|
||||||
typ = TYPE_ARRAY
|
typ = TYPE_ARRAY
|
||||||
|
|
@ -541,30 +538,30 @@ class Unpacker(object):
|
||||||
n, = _unpack_from(">H", self._buffer, self._buff_i)
|
n, = _unpack_from(">H", self._buffer, self._buff_i)
|
||||||
self._buff_i += 2
|
self._buff_i += 2
|
||||||
if n > self._max_array_len:
|
if n > self._max_array_len:
|
||||||
raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
|
raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
|
||||||
elif b == 0xdd:
|
elif b == 0xdd:
|
||||||
typ = TYPE_ARRAY
|
typ = TYPE_ARRAY
|
||||||
self._reserve(4)
|
self._reserve(4)
|
||||||
n, = _unpack_from(">I", self._buffer, self._buff_i)
|
n, = _unpack_from(">I", self._buffer, self._buff_i)
|
||||||
self._buff_i += 4
|
self._buff_i += 4
|
||||||
if n > self._max_array_len:
|
if n > self._max_array_len:
|
||||||
raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
|
raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
|
||||||
elif b == 0xde:
|
elif b == 0xde:
|
||||||
self._reserve(2)
|
self._reserve(2)
|
||||||
n, = _unpack_from(">H", self._buffer, self._buff_i)
|
n, = _unpack_from(">H", self._buffer, self._buff_i)
|
||||||
self._buff_i += 2
|
self._buff_i += 2
|
||||||
if n > self._max_map_len:
|
if n > self._max_map_len:
|
||||||
raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
|
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
|
||||||
typ = TYPE_MAP
|
typ = TYPE_MAP
|
||||||
elif b == 0xdf:
|
elif b == 0xdf:
|
||||||
self._reserve(4)
|
self._reserve(4)
|
||||||
n, = _unpack_from(">I", self._buffer, self._buff_i)
|
n, = _unpack_from(">I", self._buffer, self._buff_i)
|
||||||
self._buff_i += 4
|
self._buff_i += 4
|
||||||
if n > self._max_map_len:
|
if n > self._max_map_len:
|
||||||
raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
|
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
|
||||||
typ = TYPE_MAP
|
typ = TYPE_MAP
|
||||||
else:
|
else:
|
||||||
raise UnpackValueError("Unknown header: 0x%x" % b)
|
raise ValueError("Unknown header: 0x%x" % b)
|
||||||
return typ, n, obj
|
return typ, n, obj
|
||||||
|
|
||||||
def _unpack(self, execute=EX_CONSTRUCT):
|
def _unpack(self, execute=EX_CONSTRUCT):
|
||||||
|
|
@ -572,11 +569,11 @@ class Unpacker(object):
|
||||||
|
|
||||||
if execute == EX_READ_ARRAY_HEADER:
|
if execute == EX_READ_ARRAY_HEADER:
|
||||||
if typ != TYPE_ARRAY:
|
if typ != TYPE_ARRAY:
|
||||||
raise UnpackValueError("Expected array")
|
raise ValueError("Expected array")
|
||||||
return n
|
return n
|
||||||
if execute == EX_READ_MAP_HEADER:
|
if execute == EX_READ_MAP_HEADER:
|
||||||
if typ != TYPE_MAP:
|
if typ != TYPE_MAP:
|
||||||
raise UnpackValueError("Expected map")
|
raise ValueError("Expected map")
|
||||||
return n
|
return n
|
||||||
# TODO should we eliminate the recursion?
|
# TODO should we eliminate the recursion?
|
||||||
if typ == TYPE_ARRAY:
|
if typ == TYPE_ARRAY:
|
||||||
|
|
@ -754,7 +751,7 @@ class Packer(object):
|
||||||
list_types = (list, tuple)
|
list_types = (list, tuple)
|
||||||
while True:
|
while True:
|
||||||
if nest_limit < 0:
|
if nest_limit < 0:
|
||||||
raise PackValueError("recursion limit exceeded")
|
raise ValueError("recursion limit exceeded")
|
||||||
if obj is None:
|
if obj is None:
|
||||||
return self._buffer.write(b"\xc0")
|
return self._buffer.write(b"\xc0")
|
||||||
if check(obj, bool):
|
if check(obj, bool):
|
||||||
|
|
@ -786,11 +783,11 @@ class Packer(object):
|
||||||
obj = self._default(obj)
|
obj = self._default(obj)
|
||||||
default_used = True
|
default_used = True
|
||||||
continue
|
continue
|
||||||
raise PackOverflowError("Integer value out of range")
|
raise OverflowError("Integer value out of range")
|
||||||
if check(obj, (bytes, bytearray)):
|
if check(obj, (bytes, bytearray)):
|
||||||
n = len(obj)
|
n = len(obj)
|
||||||
if n >= 2**32:
|
if n >= 2**32:
|
||||||
raise PackValueError("%s is too large" % type(obj).__name__)
|
raise ValueError("%s is too large" % type(obj).__name__)
|
||||||
self._pack_bin_header(n)
|
self._pack_bin_header(n)
|
||||||
return self._buffer.write(obj)
|
return self._buffer.write(obj)
|
||||||
if check(obj, Unicode):
|
if check(obj, Unicode):
|
||||||
|
|
@ -801,13 +798,13 @@ class Packer(object):
|
||||||
obj = obj.encode(self._encoding, self._unicode_errors)
|
obj = obj.encode(self._encoding, self._unicode_errors)
|
||||||
n = len(obj)
|
n = len(obj)
|
||||||
if n >= 2**32:
|
if n >= 2**32:
|
||||||
raise PackValueError("String is too large")
|
raise ValueError("String is too large")
|
||||||
self._pack_raw_header(n)
|
self._pack_raw_header(n)
|
||||||
return self._buffer.write(obj)
|
return self._buffer.write(obj)
|
||||||
if check(obj, memoryview):
|
if check(obj, memoryview):
|
||||||
n = len(obj) * obj.itemsize
|
n = len(obj) * obj.itemsize
|
||||||
if n >= 2**32:
|
if n >= 2**32:
|
||||||
raise PackValueError("Memoryview is too large")
|
raise ValueError("Memoryview is too large")
|
||||||
self._pack_bin_header(n)
|
self._pack_bin_header(n)
|
||||||
return self._buffer.write(obj)
|
return self._buffer.write(obj)
|
||||||
if check(obj, float):
|
if check(obj, float):
|
||||||
|
|
@ -874,7 +871,7 @@ class Packer(object):
|
||||||
|
|
||||||
def pack_array_header(self, n):
|
def pack_array_header(self, n):
|
||||||
if n >= 2**32:
|
if n >= 2**32:
|
||||||
raise PackValueError
|
raise ValueError
|
||||||
self._pack_array_header(n)
|
self._pack_array_header(n)
|
||||||
if self._autoreset:
|
if self._autoreset:
|
||||||
ret = self._buffer.getvalue()
|
ret = self._buffer.getvalue()
|
||||||
|
|
@ -883,7 +880,7 @@ class Packer(object):
|
||||||
|
|
||||||
def pack_map_header(self, n):
|
def pack_map_header(self, n):
|
||||||
if n >= 2**32:
|
if n >= 2**32:
|
||||||
raise PackValueError
|
raise ValueError
|
||||||
self._pack_map_header(n)
|
self._pack_map_header(n)
|
||||||
if self._autoreset:
|
if self._autoreset:
|
||||||
ret = self._buffer.getvalue()
|
ret = self._buffer.getvalue()
|
||||||
|
|
@ -899,7 +896,7 @@ class Packer(object):
|
||||||
raise TypeError("data must have bytes type")
|
raise TypeError("data must have bytes type")
|
||||||
L = len(data)
|
L = len(data)
|
||||||
if L > 0xffffffff:
|
if L > 0xffffffff:
|
||||||
raise PackValueError("Too large data")
|
raise ValueError("Too large data")
|
||||||
if L == 1:
|
if L == 1:
|
||||||
self._buffer.write(b'\xd4')
|
self._buffer.write(b'\xd4')
|
||||||
elif L == 2:
|
elif L == 2:
|
||||||
|
|
@ -926,7 +923,7 @@ class Packer(object):
|
||||||
return self._buffer.write(struct.pack(">BH", 0xdc, n))
|
return self._buffer.write(struct.pack(">BH", 0xdc, n))
|
||||||
if n <= 0xffffffff:
|
if n <= 0xffffffff:
|
||||||
return self._buffer.write(struct.pack(">BI", 0xdd, n))
|
return self._buffer.write(struct.pack(">BI", 0xdd, n))
|
||||||
raise PackValueError("Array is too large")
|
raise ValueError("Array is too large")
|
||||||
|
|
||||||
def _pack_map_header(self, n):
|
def _pack_map_header(self, n):
|
||||||
if n <= 0x0f:
|
if n <= 0x0f:
|
||||||
|
|
@ -935,7 +932,7 @@ class Packer(object):
|
||||||
return self._buffer.write(struct.pack(">BH", 0xde, n))
|
return self._buffer.write(struct.pack(">BH", 0xde, n))
|
||||||
if n <= 0xffffffff:
|
if n <= 0xffffffff:
|
||||||
return self._buffer.write(struct.pack(">BI", 0xdf, n))
|
return self._buffer.write(struct.pack(">BI", 0xdf, n))
|
||||||
raise PackValueError("Dict is too large")
|
raise ValueError("Dict is too large")
|
||||||
|
|
||||||
def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT):
|
def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT):
|
||||||
self._pack_map_header(n)
|
self._pack_map_header(n)
|
||||||
|
|
@ -953,7 +950,7 @@ class Packer(object):
|
||||||
elif n <= 0xffffffff:
|
elif n <= 0xffffffff:
|
||||||
self._buffer.write(struct.pack(">BI", 0xdb, n))
|
self._buffer.write(struct.pack(">BI", 0xdb, n))
|
||||||
else:
|
else:
|
||||||
raise PackValueError('Raw is too large')
|
raise ValueError('Raw is too large')
|
||||||
|
|
||||||
def _pack_bin_header(self, n):
|
def _pack_bin_header(self, n):
|
||||||
if not self._use_bin_type:
|
if not self._use_bin_type:
|
||||||
|
|
@ -965,7 +962,7 @@ class Packer(object):
|
||||||
elif n <= 0xffffffff:
|
elif n <= 0xffffffff:
|
||||||
return self._buffer.write(struct.pack(">BI", 0xc6, n))
|
return self._buffer.write(struct.pack(">BI", 0xc6, n))
|
||||||
else:
|
else:
|
||||||
raise PackValueError('Bin is too large')
|
raise ValueError('Bin is too large')
|
||||||
|
|
||||||
def bytes(self):
|
def bytes(self):
|
||||||
"""Return internal buffer contents as bytes object"""
|
"""Return internal buffer contents as bytes object"""
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue