mirror of
https://github.com/msgpack/msgpack-python.git
synced 2026-02-06 17:59:52 +00:00
Compare commits
229 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f9806368ae | ||
|
|
c1ecd23dbf | ||
|
|
af45640970 | ||
|
|
c2546eabc4 | ||
|
|
ef4f83df16 | ||
|
|
19b5d33ded | ||
|
|
0f3c4be465 | ||
|
|
c2a9f1fda5 | ||
|
|
d9873dab04 | ||
|
|
42f056f3cf | ||
|
|
e6445d3b92 | ||
|
|
fe9e620a60 | ||
|
|
cdc7644503 | ||
|
|
868aa2cd83 | ||
|
|
0eeabfb453 | ||
|
|
4587393b1a | ||
|
|
20a2b8eaa2 | ||
|
|
9d0c7f2f9c | ||
|
|
9e26d80ab2 | ||
|
|
6e11368f5d | ||
|
|
0b1c47b06b | ||
|
|
9cea8b6da2 | ||
|
|
33e0e86f4e | ||
|
|
e0f0e145f1 | ||
|
|
e1068087e0 | ||
|
|
3da5818a3a | ||
|
|
72e65feb0e | ||
|
|
bf2413f915 | ||
|
|
a97b31437d | ||
|
|
52f8bc2e55 | ||
|
|
526ec9c923 | ||
|
|
b389ccf2f7 | ||
|
|
3e9a2a7419 | ||
|
|
0602baf3ea | ||
|
|
2eca765533 | ||
|
|
e77672200b | ||
|
|
9aedf8ed7f | ||
|
|
bf7bf88ad0 | ||
|
|
039022cecb | ||
|
|
140864249f | ||
|
|
c78026102c | ||
|
|
2982e9ff72 | ||
|
|
acd0684392 | ||
|
|
ecf03748c7 | ||
|
|
b1b0edaeed | ||
|
|
e1d3d5d5c3 | ||
|
|
4e10c10aaa | ||
|
|
41d6239c0a | ||
|
|
ef15f4a62c | ||
|
|
423c6df265 | ||
|
|
7b75b4f368 | ||
|
|
715126c67b | ||
|
|
7cfced5150 | ||
|
|
427736bbcc | ||
|
|
e5249f877c | ||
|
|
c8d0751fe3 | ||
|
|
feec06206c | ||
|
|
45f848695c | ||
|
|
802cbc9495 | ||
|
|
0516c2c2a9 | ||
|
|
35b2d246cf | ||
|
|
4c55f809fe | ||
|
|
aa9ce3e2bb | ||
|
|
dcb775031c | ||
|
|
e3ef909c47 | ||
|
|
1008229553 | ||
|
|
b82d0b62f1 | ||
|
|
c3995669f1 | ||
|
|
44a8060383 | ||
|
|
edca770071 | ||
|
|
9d45926a59 | ||
|
|
b5acfd5383 | ||
|
|
caadbf2df5 | ||
|
|
a34dc945bf | ||
|
|
63837a44d8 | ||
|
|
500a238028 | ||
|
|
b75e3412fb | ||
|
|
b901b179d1 | ||
|
|
6a721faa77 | ||
|
|
849c806381 | ||
|
|
cb50b2081b | ||
|
|
89ea57747e | ||
|
|
bdf0511e29 | ||
|
|
6129789e9f | ||
|
|
e29b423de7 | ||
|
|
724e6200fd | ||
|
|
e464cb44fa | ||
|
|
cfa05d3fdc | ||
|
|
8e358617e7 | ||
|
|
b3f7254192 | ||
|
|
9b84e490e7 | ||
|
|
09187421eb | ||
|
|
38dba9634e | ||
|
|
010de11bed | ||
|
|
44fd577705 | ||
|
|
4ace82f108 | ||
|
|
38357b928a | ||
|
|
4b0819dca9 | ||
|
|
1e728a2e0b | ||
|
|
cfae52437b | ||
|
|
02e1f7623c | ||
|
|
3b71818bb0 | ||
|
|
431ef45c8e | ||
|
|
c0516c603f | ||
|
|
f34fca7fb5 | ||
|
|
051f9ded1f | ||
|
|
94336cf914 | ||
|
|
753b3706d8 | ||
|
|
8029f95516 | ||
|
|
edd5603661 | ||
|
|
d893697eab | ||
|
|
7d6b4dfb51 | ||
|
|
2df517999b | ||
|
|
44bc2bd439 | ||
|
|
8fb709f2e0 | ||
|
|
772c830841 | ||
|
|
5614dd5a89 | ||
|
|
d9ead81021 | ||
|
|
3508ca524e | ||
|
|
c1b1a23f62 | ||
|
|
b04690012d | ||
|
|
4e10222b51 | ||
|
|
692e0ee8ff | ||
|
|
2bfc2d0566 | ||
|
|
2849f5582a | ||
|
|
12506d8d91 | ||
|
|
fa7d7447fc | ||
|
|
64f59884a1 | ||
|
|
fcb19a0e1a | ||
|
|
cd6561db52 | ||
|
|
f0952f1dd6 | ||
|
|
9d79351e99 | ||
|
|
ff1f5f89d9 | ||
|
|
0dad821169 | ||
|
|
24950990f4 | ||
|
|
1bd6fc36d0 | ||
|
|
030bb2f1f7 | ||
|
|
ebfe55e637 | ||
|
|
42f5ecfd51 | ||
|
|
5e1fe818e3 | ||
|
|
9e5ec95e02 | ||
|
|
887d3a7d22 | ||
|
|
aab29ff277 | ||
|
|
a05fc5e7c5 | ||
|
|
3df431cafd | ||
|
|
c60e6c7a6f | ||
|
|
2186455d15 | ||
|
|
5fd6119093 | ||
|
|
d10f12db8f | ||
|
|
c356035a57 | ||
|
|
5399f8180d | ||
|
|
d8e3cf0563 | ||
|
|
0fc0eb2f16 | ||
|
|
5ba496c79a | ||
|
|
f6f6f328eb | ||
|
|
7a8ce0f9ca | ||
|
|
235c6036ea | ||
|
|
7e9905bdfa | ||
|
|
de320488ae | ||
|
|
9f4b2d53b7 | ||
|
|
9ae43709e4 | ||
|
|
af4eea430e | ||
|
|
bc8c86203a | ||
|
|
10e5e39ff9 | ||
|
|
e557e17cbd | ||
|
|
641406902e | ||
|
|
2c6668941f | ||
|
|
e419cd8e2d | ||
|
|
83ebb63c44 | ||
|
|
a0480c7602 | ||
|
|
e1ed0044bf | ||
|
|
cc3a8665d6 | ||
|
|
891f2d8743 | ||
|
|
b458e9a6a2 | ||
|
|
997b524f06 | ||
|
|
144f276e88 | ||
|
|
fd3f004863 | ||
|
|
c25e2a0984 | ||
|
|
3146ebd330 | ||
|
|
b98b8cab99 | ||
|
|
05ff11dbcc | ||
|
|
737f08a885 | ||
|
|
381c2eff5f | ||
|
|
8f513af999 | ||
|
|
280308e8ce | ||
|
|
9951b89455 | ||
|
|
464fe277e1 | ||
|
|
28b5f46a34 | ||
|
|
f46523b1af | ||
|
|
197e30723a | ||
|
|
b8bf3c950c | ||
|
|
b1d658e7a0 | ||
|
|
cc7fd5722b | ||
|
|
bbdfd4d92e | ||
|
|
93b5953eae | ||
|
|
04cf8fc7f4 | ||
|
|
760e30b77e | ||
|
|
8ae6320072 | ||
|
|
ab789813b8 | ||
|
|
e76091a82c | ||
|
|
dc1b993079 | ||
|
|
e9086a34e4 | ||
|
|
3c9c6edbc8 | ||
|
|
ab2415eaa0 | ||
|
|
44254dd35e | ||
|
|
8b6ce53cce | ||
|
|
2f808b6e01 | ||
|
|
d782464c91 | ||
|
|
2b5f59166b | ||
|
|
39f8aa78c7 | ||
|
|
07f0beeabb | ||
|
|
1bf62ba6f8 | ||
|
|
9e210bfc1a | ||
|
|
a8b3e97fe5 | ||
|
|
3b80233592 | ||
|
|
ae90b26c30 | ||
|
|
08e65bdd03 | ||
|
|
9d11249d89 | ||
|
|
6c8e539eec | ||
|
|
f6f9597249 | ||
|
|
91ec9e1daf | ||
|
|
b077a21f89 | ||
|
|
205f7d39b2 | ||
|
|
70b5f21b34 | ||
|
|
d1060de293 | ||
|
|
aa41e2fef7 | ||
|
|
5f684aed82 | ||
|
|
b10cf78f54 | ||
|
|
984116bd18 |
63 changed files with 3200 additions and 2543 deletions
33
.github/workflows/docs.yaml
vendored
Normal file
33
.github/workflows/docs.yaml
vendored
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
name: docs
|
||||||
|
|
||||||
|
on: ["push", "pull_request"]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
docs:
|
||||||
|
# We want to run on external PRs, but not on our own internal PRs as they'll be run
|
||||||
|
# by the push to the branch.
|
||||||
|
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.x'
|
||||||
|
cache: "pip"
|
||||||
|
cache-dependency-path: |
|
||||||
|
requirements.txt
|
||||||
|
docs/requirements.txt
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
pip install -r requirements.txt
|
||||||
|
make cython
|
||||||
|
|
||||||
|
- name: Sphinx Documentation Generator
|
||||||
|
run: |
|
||||||
|
pip install -r docs/requirements.txt
|
||||||
|
make docs
|
||||||
22
.github/workflows/lint.yaml
vendored
Normal file
22
.github/workflows/lint.yaml
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
name: lint
|
||||||
|
|
||||||
|
on: ["push", "pull_request"]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
# We want to run on external PRs, but not on our own internal PRs as they'll be run
|
||||||
|
# by the push to the branch.
|
||||||
|
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: ruff check
|
||||||
|
run: |
|
||||||
|
pipx run ruff check --diff msgpack/ test/ setup.py
|
||||||
|
|
||||||
|
- name: ruff format
|
||||||
|
run: |
|
||||||
|
pipx run ruff format --diff msgpack/ test/ setup.py
|
||||||
61
.github/workflows/test.yml
vendored
Normal file
61
.github/workflows/test.yml
vendored
Normal file
|
|
@ -0,0 +1,61 @@
|
||||||
|
name: Run tests
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
pull_request:
|
||||||
|
create:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: ["ubuntu-latest", "windows-latest", "windows-11-arm", "macos-latest"]
|
||||||
|
py: ["3.14", "3.14t", "3.13", "3.12", "3.11", "3.10"]
|
||||||
|
exclude:
|
||||||
|
- os: windows-11-arm
|
||||||
|
py: "3.10"
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
name: Run test with Python ${{ matrix.py }} on ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.py }}
|
||||||
|
allow-prereleases: true
|
||||||
|
cache: "pip"
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
python -m pip install -r requirements.txt pytest
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
make cython
|
||||||
|
pip install .
|
||||||
|
|
||||||
|
- name: Test (C extension)
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
pytest -v test
|
||||||
|
|
||||||
|
- name: Test (pure Python fallback)
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
MSGPACK_PUREPYTHON=1 pytest -v test
|
||||||
|
|
||||||
|
- name: build packages
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
python -m build -nv
|
||||||
|
|
||||||
|
- name: upload packages
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: dist-${{ matrix.os }}-${{ matrix.py }}
|
||||||
|
path: dist
|
||||||
88
.github/workflows/wheel.yml
vendored
Normal file
88
.github/workflows/wheel.yml
vendored
Normal file
|
|
@ -0,0 +1,88 @@
|
||||||
|
name: Build sdist and Wheels
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
release:
|
||||||
|
types:
|
||||||
|
- published
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build_wheels:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
# macos-13 is for intel
|
||||||
|
os: ["ubuntu-24.04", "ubuntu-24.04-arm", "windows-latest", "windows-11-arm", "macos-13", "macos-latest"]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
name: Build wheels on ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
cache: "pip"
|
||||||
|
- name: Cythonize
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
pip install -r requirements.txt
|
||||||
|
make cython
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
uses: pypa/cibuildwheel@v3.3.0
|
||||||
|
env:
|
||||||
|
CIBW_TEST_REQUIRES: "pytest"
|
||||||
|
CIBW_TEST_COMMAND: "pytest {package}/test"
|
||||||
|
CIBW_SKIP: "pp* cp38-* cp39-* cp310-win_arm64"
|
||||||
|
|
||||||
|
- name: Build sdist
|
||||||
|
if: runner.os == 'Linux' && runner.arch == 'X64'
|
||||||
|
run: |
|
||||||
|
pip install build
|
||||||
|
python -m build -s -o wheelhouse
|
||||||
|
|
||||||
|
- name: Upload Wheels to artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: wheels-${{ matrix.os }}
|
||||||
|
path: wheelhouse
|
||||||
|
|
||||||
|
# combine all wheels into one artifact
|
||||||
|
combine_wheels:
|
||||||
|
needs: [build_wheels]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
# unpacks all CIBW artifacts into dist/
|
||||||
|
pattern: wheels-*
|
||||||
|
path: dist
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Upload Wheels to artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: wheels-all
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
# https://github.com/pypa/cibuildwheel/blob/main/examples/github-deploy.yml
|
||||||
|
upload_pypi:
|
||||||
|
needs: [build_wheels]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
environment: pypi
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
if: github.event_name == 'release' && github.event.action == 'published'
|
||||||
|
# or, alternatively, upload to PyPI on every tag starting with 'v' (remove on: release above to use this)
|
||||||
|
# if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
# unpacks all CIBW artifacts into dist/
|
||||||
|
pattern: wheels-*
|
||||||
|
path: dist
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
#with:
|
||||||
|
# To test: repository-url: https://test.pypi.org/legacy/
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -2,11 +2,13 @@ MANIFEST
|
||||||
build/*
|
build/*
|
||||||
dist/*
|
dist/*
|
||||||
.tox
|
.tox
|
||||||
|
.python-version
|
||||||
*.pyc
|
*.pyc
|
||||||
*.pyo
|
*.pyo
|
||||||
*.so
|
*.so
|
||||||
*~
|
*~
|
||||||
msgpack/__version__.py
|
msgpack/__version__.py
|
||||||
|
msgpack/*.c
|
||||||
msgpack/*.cpp
|
msgpack/*.cpp
|
||||||
*.egg-info
|
*.egg-info
|
||||||
/venv
|
/venv
|
||||||
|
|
|
||||||
24
.readthedocs.yaml
Normal file
24
.readthedocs.yaml
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
# Read the Docs configuration file for Sphinx projects.
|
||||||
|
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details.
|
||||||
|
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
build:
|
||||||
|
os: ubuntu-22.04
|
||||||
|
tools:
|
||||||
|
python: "3.11"
|
||||||
|
apt_packages:
|
||||||
|
- build-essential
|
||||||
|
jobs:
|
||||||
|
pre_install:
|
||||||
|
- pip install -r requirements.txt
|
||||||
|
- make cython
|
||||||
|
|
||||||
|
python:
|
||||||
|
install:
|
||||||
|
- method: pip
|
||||||
|
path: .
|
||||||
|
- requirements: docs/requirements.txt
|
||||||
|
|
||||||
|
sphinx:
|
||||||
|
configuration: docs/conf.py
|
||||||
45
.travis.yml
45
.travis.yml
|
|
@ -1,45 +0,0 @@
|
||||||
sudo: false
|
|
||||||
language: python
|
|
||||||
cache: pip
|
|
||||||
|
|
||||||
python:
|
|
||||||
- "2.7"
|
|
||||||
- "3.5"
|
|
||||||
- "3.6"
|
|
||||||
- "3.7-dev"
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- sudo: required
|
|
||||||
language: python
|
|
||||||
services:
|
|
||||||
- docker
|
|
||||||
env:
|
|
||||||
- DOCKER_IMAGE=quay.io/pypa/manylinux1_i686
|
|
||||||
install:
|
|
||||||
- pip install -U pip
|
|
||||||
- pip install cython
|
|
||||||
- cython --cplus msgpack/_packer.pyx msgpack/_unpacker.pyx
|
|
||||||
- docker pull $DOCKER_IMAGE
|
|
||||||
script:
|
|
||||||
- docker run --rm -v `pwd`:/io -w /io $DOCKER_IMAGE /io/docker/runtests.sh
|
|
||||||
- python: "pypy"
|
|
||||||
install:
|
|
||||||
- pip install -e .
|
|
||||||
script:
|
|
||||||
- py.test -v test
|
|
||||||
|
|
||||||
|
|
||||||
install:
|
|
||||||
- pip install -U pip
|
|
||||||
- pip install cython
|
|
||||||
- cython --cplus msgpack/_packer.pyx msgpack/_unpacker.pyx
|
|
||||||
- pip install -e .
|
|
||||||
|
|
||||||
script:
|
|
||||||
- python -c 'import sys; print(hex(sys.maxsize))'
|
|
||||||
- python -c 'from msgpack import _packer, _unpacker'
|
|
||||||
- py.test -v test
|
|
||||||
- MSGPACK_PUREPYTHON=x py.test -v test
|
|
||||||
|
|
||||||
# vim: sw=2 ts=2
|
|
||||||
220
ChangeLog.rst
220
ChangeLog.rst
|
|
@ -1,3 +1,223 @@
|
||||||
|
1.1.2
|
||||||
|
=====
|
||||||
|
|
||||||
|
Release Date: 2025-10-08
|
||||||
|
|
||||||
|
This release does not change source code. It updates only building wheels:
|
||||||
|
|
||||||
|
* Update Cython to v3.1.4
|
||||||
|
* Update cibuildwheel to v3.2.0
|
||||||
|
* Drop Python 3.8
|
||||||
|
* Add Python 3.14
|
||||||
|
* Add windows-arm
|
||||||
|
|
||||||
|
1.1.1
|
||||||
|
=====
|
||||||
|
|
||||||
|
Release Date: 2025-06-13
|
||||||
|
|
||||||
|
* No change from 1.1.1rc1.
|
||||||
|
|
||||||
|
1.1.1rc1
|
||||||
|
========
|
||||||
|
|
||||||
|
Release Date: 2025-06-06
|
||||||
|
|
||||||
|
* Update Cython to 3.1.1 and cibuildwheel to 2.23.3.
|
||||||
|
|
||||||
|
1.1.0
|
||||||
|
=====
|
||||||
|
|
||||||
|
Release Date: 2024-09-10
|
||||||
|
|
||||||
|
* use ``PyLong_*`` instead of ``PyInt_*`` for compatibility with
|
||||||
|
future Cython. (#620)
|
||||||
|
|
||||||
|
1.1.0rc2
|
||||||
|
========
|
||||||
|
|
||||||
|
Release Date: 2024-08-19
|
||||||
|
|
||||||
|
* Update Cython to 3.0.11 for better Python 3.13 support.
|
||||||
|
* Update cibuildwheel to 2.20.0 to build Python 3.13 wheels.
|
||||||
|
|
||||||
|
1.1.0rc1
|
||||||
|
========
|
||||||
|
|
||||||
|
Release Date: 2024-05-07
|
||||||
|
|
||||||
|
* Update Cython to 3.0.10 to reduce C warnings and future support for Python 3.13.
|
||||||
|
* Stop using C++ mode in Cython to reduce compile error on some compilers.
|
||||||
|
* ``Packer()`` has ``buf_size`` option to specify initial size of
|
||||||
|
internal buffer to reduce reallocation.
|
||||||
|
* The default internal buffer size of ``Packer()`` is reduced from
|
||||||
|
1MiB to 256KiB to optimize for common use cases. Use ``buf_size``
|
||||||
|
if you are packing large data.
|
||||||
|
* ``Timestamp.to_datetime()`` and ``Timestamp.from_datetime()`` become
|
||||||
|
more accurate by avoiding floating point calculations. (#591)
|
||||||
|
* The Cython code for ``Unpacker`` has been slightly rewritten for maintainability.
|
||||||
|
* The fallback implementation of ``Packer()`` and ``Unpacker()`` now uses keyword-only
|
||||||
|
arguments to improve compatibility with the Cython implementation.
|
||||||
|
|
||||||
|
1.0.8
|
||||||
|
=====
|
||||||
|
|
||||||
|
Release Date: 2024-03-01
|
||||||
|
|
||||||
|
* Update Cython to 3.0.8. This fixes memory leak when iterating
|
||||||
|
``Unpacker`` object on Python 3.12.
|
||||||
|
* Do not include C/Cython files in binary wheels.
|
||||||
|
|
||||||
|
|
||||||
|
1.0.7
|
||||||
|
=====
|
||||||
|
|
||||||
|
Release Date: 2023-09-28
|
||||||
|
|
||||||
|
* Fix build error of extension module on Windows. (#567)
|
||||||
|
* ``setup.py`` doesn't skip build error of extension module. (#568)
|
||||||
|
|
||||||
|
|
||||||
|
1.0.6
|
||||||
|
=====
|
||||||
|
|
||||||
|
Release Date: 2023-09-21
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
v1.0.6 Wheels for Windows don't contain extension module.
|
||||||
|
Please upgrade to v1.0.7 or newer.
|
||||||
|
|
||||||
|
* Add Python 3.12 wheels (#517)
|
||||||
|
* Remove Python 2.7, 3.6, and 3.7 support
|
||||||
|
|
||||||
|
|
||||||
|
1.0.5
|
||||||
|
=====
|
||||||
|
|
||||||
|
Release Date: 2023-03-08
|
||||||
|
|
||||||
|
* Use ``__BYTE_ORDER__`` instead of ``__BYTE_ORDER`` for portability. (#513, #514)
|
||||||
|
* Add Python 3.11 wheels (#517)
|
||||||
|
* fallback: Fix packing multidimensional memoryview (#527)
|
||||||
|
|
||||||
|
1.0.4
|
||||||
|
=====
|
||||||
|
|
||||||
|
Release Date: 2022-06-03
|
||||||
|
|
||||||
|
* Support Python 3.11 (beta).
|
||||||
|
* Don't define `__*_ENDIAN__` macro on Unix. by @methane in https://github.com/msgpack/msgpack-python/pull/495
|
||||||
|
* Use PyFloat_Pack8() on Python 3.11a7 by @vstinner in https://github.com/msgpack/msgpack-python/pull/499
|
||||||
|
* Fix Unpacker max_buffer_length handling by @methane in https://github.com/msgpack/msgpack-python/pull/506
|
||||||
|
|
||||||
|
1.0.3
|
||||||
|
=====
|
||||||
|
|
||||||
|
Release Date: 2021-11-24 JST
|
||||||
|
|
||||||
|
* Fix Docstring (#459)
|
||||||
|
* Fix error formatting (#463)
|
||||||
|
* Improve error message about strict_map_key (#485)
|
||||||
|
|
||||||
|
1.0.2
|
||||||
|
=====
|
||||||
|
|
||||||
|
* Fix year 2038 problem regression in 1.0.1. (#451)
|
||||||
|
|
||||||
|
1.0.1
|
||||||
|
=====
|
||||||
|
|
||||||
|
* Add Python 3.9 and linux/arm64 wheels. (#439)
|
||||||
|
* Fixed Unpacker.tell() after read_bytes() (#426)
|
||||||
|
* Fixed unpacking datetime before epoch on Windows (#433)
|
||||||
|
* Fixed fallback Packer didn't check DateTime.tzinfo (#434)
|
||||||
|
|
||||||
|
1.0.0
|
||||||
|
=====
|
||||||
|
|
||||||
|
Release Date: 2020-02-17
|
||||||
|
|
||||||
|
* Remove Python 2 support from the ``msgpack/_cmsgpack``.
|
||||||
|
``msgpack/fallback`` still supports Python 2.
|
||||||
|
* Remove ``encoding`` option from the Packer and Unpacker.
|
||||||
|
* Unpacker: The default value of ``max_buffer_size`` is changed to 100MiB.
|
||||||
|
* Unpacker: ``strict_map_key`` is True by default now.
|
||||||
|
* Unpacker: String map keys are interned.
|
||||||
|
* Drop old buffer protocol support.
|
||||||
|
* Support Timestamp type.
|
||||||
|
* Support serializing and decerializing ``datetime`` object
|
||||||
|
with tzinfo.
|
||||||
|
* Unpacker: ``Fix Unpacker.read_bytes()`` in fallback implementation. (#352)
|
||||||
|
|
||||||
|
|
||||||
|
0.6.2
|
||||||
|
=====
|
||||||
|
|
||||||
|
Release Date: 2019-09-20
|
||||||
|
|
||||||
|
* Support Python 3.8.
|
||||||
|
* Update Cython to 0.29.13 for support Python 3.8.
|
||||||
|
* Some small optimizations.
|
||||||
|
|
||||||
|
|
||||||
|
0.6.1
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Date: 2019-01-25
|
||||||
|
|
||||||
|
This release is for mitigating pain caused by v0.6.0 reduced max input limits
|
||||||
|
for security reason.
|
||||||
|
|
||||||
|
* ``unpackb(data)`` configures ``max_*_len`` options from ``len(data)``,
|
||||||
|
instead of static default sizes.
|
||||||
|
|
||||||
|
* ``Unpacker(max_buffer_len=N)`` configures ``max_*_len`` options from ``N``,
|
||||||
|
instead of static default sizes.
|
||||||
|
|
||||||
|
* ``max_bin_len``, ``max_str_len``, and ``max_ext_len`` are deprecated.
|
||||||
|
Since this is minor release, it's document only deprecation.
|
||||||
|
|
||||||
|
|
||||||
|
0.6.0
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Date: 2018-11-30
|
||||||
|
|
||||||
|
This release contains some backward incompatible changes for security reason (DoS).
|
||||||
|
|
||||||
|
Important changes
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
* unpacker: Default value of input limits are smaller than before to avoid DoS attack.
|
||||||
|
If you need to handle large data, you need to specify limits manually. (#319)
|
||||||
|
|
||||||
|
* Unpacker doesn't wrap underlying ``ValueError`` (including ``UnicodeError``) into
|
||||||
|
``UnpackValueError``. If you want to catch all exception during unpack, you need
|
||||||
|
to use ``try ... except Exception`` with minimum try code block. (#323, #233)
|
||||||
|
|
||||||
|
* ``PackValueError`` and ``PackOverflowError`` are also removed. You need to catch
|
||||||
|
normal ``ValueError`` and ``OverflowError``. (#323, #233)
|
||||||
|
|
||||||
|
* Unpacker has ``strict_map_key`` option now. When it is true, only bytes and str
|
||||||
|
(unicode in Python 2) are allowed for map keys. It is recommended to avoid
|
||||||
|
hashdos. Default value of this option is False for backward compatibility reason.
|
||||||
|
But it will be changed True in 1.0. (#296, #334)
|
||||||
|
|
||||||
|
Other changes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
* Extension modules are merged. There is ``msgpack._cmsgpack`` instead of
|
||||||
|
``msgpack._packer`` and ``msgpack._unpacker``. (#314, #328)
|
||||||
|
|
||||||
|
* Add ``Unpacker.getbuffer()`` method. (#320)
|
||||||
|
|
||||||
|
* unpacker: ``msgpack.StackError`` is raised when input data contains too
|
||||||
|
nested data. (#331)
|
||||||
|
|
||||||
|
* unpacker: ``msgpack.FormatError`` is raised when input data is not valid
|
||||||
|
msgpack format. (#331)
|
||||||
|
|
||||||
|
|
||||||
0.5.6
|
0.5.6
|
||||||
======
|
======
|
||||||
|
|
||||||
|
|
|
||||||
17
DEVELOP.md
Normal file
17
DEVELOP.md
Normal file
|
|
@ -0,0 +1,17 @@
|
||||||
|
# Developer's note
|
||||||
|
|
||||||
|
### Build
|
||||||
|
|
||||||
|
```
|
||||||
|
$ make cython
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### Test
|
||||||
|
|
||||||
|
MessagePack uses `pytest` for testing.
|
||||||
|
Run test with following command:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ make test
|
||||||
|
```
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
include setup.py
|
include setup.py
|
||||||
include COPYING
|
include COPYING
|
||||||
include README.rst
|
include README.md
|
||||||
recursive-include msgpack *.h *.c *.pyx *.cpp
|
recursive-include msgpack *.h *.c *.pyx
|
||||||
recursive-include test *.py
|
recursive-include test *.py
|
||||||
|
|
|
||||||
41
Makefile
41
Makefile
|
|
@ -1,13 +1,32 @@
|
||||||
|
PYTHON_SOURCES = msgpack test setup.py
|
||||||
|
|
||||||
.PHONY: all
|
.PHONY: all
|
||||||
all: cython
|
all: cython
|
||||||
python setup.py build_ext -i -f
|
python setup.py build_ext -i -f
|
||||||
|
|
||||||
|
.PHONY: format
|
||||||
|
format:
|
||||||
|
ruff format $(PYTHON_SOURCES)
|
||||||
|
|
||||||
|
.PHONY: lint
|
||||||
|
lint:
|
||||||
|
ruff check $(PYTHON_SOURCES)
|
||||||
|
|
||||||
|
.PHONY: doc
|
||||||
|
doc:
|
||||||
|
cd docs && sphinx-build -n -v -W --keep-going -b html -d doctrees . html
|
||||||
|
|
||||||
|
.PHONY: pyupgrade
|
||||||
|
pyupgrade:
|
||||||
|
@find $(PYTHON_SOURCES) -name '*.py' -type f -exec pyupgrade --py37-plus '{}' \;
|
||||||
|
|
||||||
.PHONY: cython
|
.PHONY: cython
|
||||||
cython:
|
cython:
|
||||||
cython --cplus msgpack/*.pyx
|
cython msgpack/_cmsgpack.pyx
|
||||||
|
|
||||||
.PHONY: test
|
.PHONY: test
|
||||||
test:
|
test: cython
|
||||||
|
pip install -e .
|
||||||
pytest -v test
|
pytest -v test
|
||||||
MSGPACK_PUREPYTHON=1 pytest -v test
|
MSGPACK_PUREPYTHON=1 pytest -v test
|
||||||
|
|
||||||
|
|
@ -18,17 +37,23 @@ serve-doc: all
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean:
|
clean:
|
||||||
rm -rf build
|
rm -rf build
|
||||||
rm -f msgpack/_packer.cpp
|
rm -f msgpack/_cmsgpack.cpp
|
||||||
rm -f msgpack/_unpacker.cpp
|
rm -f msgpack/_cmsgpack.*.so
|
||||||
|
rm -f msgpack/_cmsgpack.*.pyd
|
||||||
rm -rf msgpack/__pycache__
|
rm -rf msgpack/__pycache__
|
||||||
rm -rf test/__pycache__
|
rm -rf test/__pycache__
|
||||||
|
|
||||||
.PHONY: update-docker
|
.PHONY: update-docker
|
||||||
update-docker:
|
update-docker:
|
||||||
docker pull quay.io/pypa/manylinux1_i686
|
docker pull quay.io/pypa/manylinux2014_i686
|
||||||
docker pull quay.io/pypa/manylinux1_x86_64
|
docker pull quay.io/pypa/manylinux2014_x86_64
|
||||||
|
docker pull quay.io/pypa/manylinux2014_aarch64
|
||||||
|
|
||||||
.PHONY: linux-wheel
|
.PHONY: linux-wheel
|
||||||
linux-wheel:
|
linux-wheel:
|
||||||
docker run --rm -ti -v `pwd`:/project -w /project quay.io/pypa/manylinux1_i686 bash docker/buildwheel.sh
|
docker run --rm -v `pwd`:/project -w /project quay.io/pypa/manylinux2014_i686 bash docker/buildwheel.sh
|
||||||
docker run --rm -ti -v `pwd`:/project -w /project quay.io/pypa/manylinux1_x86_64 bash docker/buildwheel.sh
|
docker run --rm -v `pwd`:/project -w /project quay.io/pypa/manylinux2014_x86_64 bash docker/buildwheel.sh
|
||||||
|
|
||||||
|
.PHONY: linux-arm64-wheel
|
||||||
|
linux-arm64-wheel:
|
||||||
|
docker run --rm -v `pwd`:/project -w /project quay.io/pypa/manylinux2014_aarch64 bash docker/buildwheel.sh
|
||||||
|
|
|
||||||
242
README.md
Normal file
242
README.md
Normal file
|
|
@ -0,0 +1,242 @@
|
||||||
|
# MessagePack for Python
|
||||||
|
|
||||||
|
[](https://github.com/msgpack/msgpack-python/actions/workflows/wheel.yml)
|
||||||
|
[](https://msgpack-python.readthedocs.io/en/latest/?badge=latest)
|
||||||
|
|
||||||
|
## What is this?
|
||||||
|
|
||||||
|
[MessagePack](https://msgpack.org/) is an efficient binary serialization format.
|
||||||
|
It lets you exchange data among multiple languages like JSON.
|
||||||
|
But it's faster and smaller.
|
||||||
|
This package provides CPython bindings for reading and writing MessagePack data.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ pip install msgpack
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pure Python implementation
|
||||||
|
|
||||||
|
The extension module in msgpack (`msgpack._cmsgpack`) does not support PyPy.
|
||||||
|
|
||||||
|
But msgpack provides a pure Python implementation (`msgpack.fallback`) for PyPy.
|
||||||
|
|
||||||
|
|
||||||
|
### Windows
|
||||||
|
|
||||||
|
If you can't use a binary distribution, you need to install Visual Studio
|
||||||
|
or the Windows SDK on Windows.
|
||||||
|
Without the extension, the pure Python implementation on CPython runs slowly.
|
||||||
|
|
||||||
|
|
||||||
|
## How to use
|
||||||
|
|
||||||
|
### One-shot pack & unpack
|
||||||
|
|
||||||
|
Use `packb` for packing and `unpackb` for unpacking.
|
||||||
|
msgpack provides `dumps` and `loads` as aliases for compatibility with
|
||||||
|
`json` and `pickle`.
|
||||||
|
|
||||||
|
`pack` and `dump` pack to a file-like object.
|
||||||
|
`unpack` and `load` unpack from a file-like object.
|
||||||
|
|
||||||
|
```pycon
|
||||||
|
>>> import msgpack
|
||||||
|
>>> msgpack.packb([1, 2, 3])
|
||||||
|
'\x93\x01\x02\x03'
|
||||||
|
>>> msgpack.unpackb(_)
|
||||||
|
[1, 2, 3]
|
||||||
|
```
|
||||||
|
|
||||||
|
Read the docstring for options.
|
||||||
|
|
||||||
|
|
||||||
|
### Streaming unpacking
|
||||||
|
|
||||||
|
`Unpacker` is a "streaming unpacker". It unpacks multiple objects from one
|
||||||
|
stream (or from bytes provided through its `feed` method).
|
||||||
|
|
||||||
|
```py
|
||||||
|
import msgpack
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
|
buf = BytesIO()
|
||||||
|
for i in range(100):
|
||||||
|
buf.write(msgpack.packb(i))
|
||||||
|
|
||||||
|
buf.seek(0)
|
||||||
|
|
||||||
|
unpacker = msgpack.Unpacker(buf)
|
||||||
|
for unpacked in unpacker:
|
||||||
|
print(unpacked)
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### Packing/unpacking of custom data types
|
||||||
|
|
||||||
|
It is also possible to pack/unpack custom data types. Here is an example for
|
||||||
|
`datetime.datetime`.
|
||||||
|
|
||||||
|
```py
|
||||||
|
import datetime
|
||||||
|
import msgpack
|
||||||
|
|
||||||
|
useful_dict = {
|
||||||
|
"id": 1,
|
||||||
|
"created": datetime.datetime.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
def decode_datetime(obj):
|
||||||
|
if '__datetime__' in obj:
|
||||||
|
obj = datetime.datetime.strptime(obj["as_str"], "%Y%m%dT%H:%M:%S.%f")
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def encode_datetime(obj):
|
||||||
|
if isinstance(obj, datetime.datetime):
|
||||||
|
return {'__datetime__': True, 'as_str': obj.strftime("%Y%m%dT%H:%M:%S.%f")}
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
packed_dict = msgpack.packb(useful_dict, default=encode_datetime)
|
||||||
|
this_dict_again = msgpack.unpackb(packed_dict, object_hook=decode_datetime)
|
||||||
|
```
|
||||||
|
|
||||||
|
`Unpacker`'s `object_hook` callback receives a dict; the
|
||||||
|
`object_pairs_hook` callback may instead be used to receive a list of
|
||||||
|
key-value pairs.
|
||||||
|
|
||||||
|
NOTE: msgpack can encode datetime with tzinfo into standard ext type for now.
|
||||||
|
See `datetime` option in `Packer` docstring.
|
||||||
|
|
||||||
|
|
||||||
|
### Extended types
|
||||||
|
|
||||||
|
It is also possible to pack/unpack custom data types using the **ext** type.
|
||||||
|
|
||||||
|
```pycon
|
||||||
|
>>> import msgpack
|
||||||
|
>>> import array
|
||||||
|
>>> def default(obj):
|
||||||
|
... if isinstance(obj, array.array) and obj.typecode == 'd':
|
||||||
|
... return msgpack.ExtType(42, obj.tostring())
|
||||||
|
... raise TypeError("Unknown type: %r" % (obj,))
|
||||||
|
...
|
||||||
|
>>> def ext_hook(code, data):
|
||||||
|
... if code == 42:
|
||||||
|
... a = array.array('d')
|
||||||
|
... a.fromstring(data)
|
||||||
|
... return a
|
||||||
|
... return ExtType(code, data)
|
||||||
|
...
|
||||||
|
>>> data = array.array('d', [1.2, 3.4])
|
||||||
|
>>> packed = msgpack.packb(data, default=default)
|
||||||
|
>>> unpacked = msgpack.unpackb(packed, ext_hook=ext_hook)
|
||||||
|
>>> data == unpacked
|
||||||
|
True
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### Advanced unpacking control
|
||||||
|
|
||||||
|
As an alternative to iteration, `Unpacker` objects provide `unpack`,
|
||||||
|
`skip`, `read_array_header`, and `read_map_header` methods. The former two
|
||||||
|
read an entire message from the stream, respectively deserializing and returning
|
||||||
|
the result, or ignoring it. The latter two methods return the number of elements
|
||||||
|
in the upcoming container, so that each element in an array, or key-value pair
|
||||||
|
in a map, can be unpacked or skipped individually.
|
||||||
|
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
### String and binary types in the old MessagePack spec
|
||||||
|
|
||||||
|
Early versions of msgpack didn't distinguish string and binary types.
|
||||||
|
The type for representing both string and binary types was named **raw**.
|
||||||
|
|
||||||
|
You can pack into and unpack from this old spec using `use_bin_type=False`
|
||||||
|
and `raw=True` options.
|
||||||
|
|
||||||
|
```pycon
|
||||||
|
>>> import msgpack
|
||||||
|
>>> msgpack.unpackb(msgpack.packb([b'spam', 'eggs'], use_bin_type=False), raw=True)
|
||||||
|
[b'spam', b'eggs']
|
||||||
|
>>> msgpack.unpackb(msgpack.packb([b'spam', 'eggs'], use_bin_type=True), raw=False)
|
||||||
|
[b'spam', 'eggs']
|
||||||
|
```
|
||||||
|
|
||||||
|
### ext type
|
||||||
|
|
||||||
|
To use the **ext** type, pass a `msgpack.ExtType` object to the packer.
|
||||||
|
|
||||||
|
```pycon
|
||||||
|
>>> import msgpack
|
||||||
|
>>> packed = msgpack.packb(msgpack.ExtType(42, b'xyzzy'))
|
||||||
|
>>> msgpack.unpackb(packed)
|
||||||
|
ExtType(code=42, data='xyzzy')
|
||||||
|
```
|
||||||
|
|
||||||
|
You can use it with `default` and `ext_hook`. See below.
|
||||||
|
|
||||||
|
|
||||||
|
### Security
|
||||||
|
|
||||||
|
When unpacking data received from an unreliable source, msgpack provides
|
||||||
|
two security options.
|
||||||
|
|
||||||
|
`max_buffer_size` (default: `100*1024*1024`) limits the internal buffer size.
|
||||||
|
It is also used to limit preallocated list sizes.
|
||||||
|
|
||||||
|
`strict_map_key` (default: `True`) limits the type of map keys to bytes and str.
|
||||||
|
While the MessagePack spec doesn't limit map key types,
|
||||||
|
there is a risk of a hash DoS.
|
||||||
|
If you need to support other types for map keys, use `strict_map_key=False`.
|
||||||
|
|
||||||
|
|
||||||
|
### Performance tips
|
||||||
|
|
||||||
|
CPython's GC starts when the number of allocated objects grows.
|
||||||
|
This means unpacking may trigger unnecessary GC.
|
||||||
|
You can use `gc.disable()` when unpacking a large message.
|
||||||
|
|
||||||
|
A list is the default sequence type in Python.
|
||||||
|
However, a tuple is lighter than a list.
|
||||||
|
You can use `use_list=False` while unpacking when performance is important.
|
||||||
|
|
||||||
|
|
||||||
|
## Major breaking changes in the history
|
||||||
|
|
||||||
|
### msgpack 0.5
|
||||||
|
|
||||||
|
The package name on PyPI was changed from `msgpack-python` to `msgpack` in 0.5.
|
||||||
|
|
||||||
|
When upgrading from msgpack-0.4 or earlier, do `pip uninstall msgpack-python` before
|
||||||
|
`pip install -U msgpack`.
|
||||||
|
|
||||||
|
|
||||||
|
### msgpack 1.0
|
||||||
|
|
||||||
|
* Python 2 support
|
||||||
|
|
||||||
|
* The extension module no longer supports Python 2.
|
||||||
|
The pure Python implementation (`msgpack.fallback`) is used for Python 2.
|
||||||
|
|
||||||
|
* msgpack 1.0.6 drops official support of Python 2.7, as pip and
|
||||||
|
GitHub Action "setup-python" no longer supports Python 2.7.
|
||||||
|
|
||||||
|
* Packer
|
||||||
|
|
||||||
|
* Packer uses `use_bin_type=True` by default.
|
||||||
|
Bytes are encoded in the bin type in MessagePack.
|
||||||
|
* The `encoding` option is removed. UTF-8 is always used.
|
||||||
|
|
||||||
|
* Unpacker
|
||||||
|
|
||||||
|
* Unpacker uses `raw=False` by default. It assumes str values are valid UTF-8 strings
|
||||||
|
and decodes them to Python str (Unicode) objects.
|
||||||
|
* `encoding` option is removed. You can use `raw=True` to support old format (e.g. unpack into bytes, not str).
|
||||||
|
* The default value of `max_buffer_size` is changed from 0 to 100 MiB to avoid DoS attacks.
|
||||||
|
You need to pass `max_buffer_size=0` if you have large but safe data.
|
||||||
|
* The default value of `strict_map_key` is changed to True to avoid hash DoS.
|
||||||
|
You need to pass `strict_map_key=False` if you have data that contain map keys
|
||||||
|
whose type is neither bytes nor str.
|
||||||
336
README.rst
336
README.rst
|
|
@ -1,336 +0,0 @@
|
||||||
======================
|
|
||||||
MessagePack for Python
|
|
||||||
======================
|
|
||||||
|
|
||||||
.. image:: https://travis-ci.org/msgpack/msgpack-python.svg?branch=master
|
|
||||||
:target: https://travis-ci.org/msgpack/msgpack-python
|
|
||||||
:alt: Build Status
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/msgpack-python/badge/?version=latest
|
|
||||||
:target: https://msgpack-python.readthedocs.io/en/latest/?badge=latest
|
|
||||||
:alt: Documentation Status
|
|
||||||
|
|
||||||
|
|
||||||
What's this
|
|
||||||
-----------
|
|
||||||
|
|
||||||
`MessagePack <https://msgpack.org/>`_ is an efficient binary serialization format.
|
|
||||||
It lets you exchange data among multiple languages like JSON.
|
|
||||||
But it's faster and smaller.
|
|
||||||
This package provides CPython bindings for reading and writing MessagePack data.
|
|
||||||
|
|
||||||
|
|
||||||
Very important notes for existing users
|
|
||||||
---------------------------------------
|
|
||||||
|
|
||||||
PyPI package name
|
|
||||||
^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
TL;DR: When upgrading from msgpack-0.4 or earlier, don't do `pip install -U msgpack-python`.
|
|
||||||
Do `pip uninstall msgpack-python; pip install msgpack` instead.
|
|
||||||
|
|
||||||
Package name on PyPI was changed to msgpack from 0.5.
|
|
||||||
I upload transitional package (msgpack-python 0.5 which depending on msgpack)
|
|
||||||
for smooth transition from msgpack-python to msgpack.
|
|
||||||
|
|
||||||
Sadly, this doesn't work for upgrade install. After `pip install -U msgpack-python`,
|
|
||||||
msgpack is removed and `import msgpack` fail.
|
|
||||||
|
|
||||||
|
|
||||||
Deprecating encoding option
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
encoding and unicode_errors options are deprecated.
|
|
||||||
|
|
||||||
In case of packer, use UTF-8 always. Storing other than UTF-8 is not recommended.
|
|
||||||
|
|
||||||
For backward compatibility, you can use ``use_bin_type=False`` and pack ``bytes``
|
|
||||||
object into msgpack raw type.
|
|
||||||
|
|
||||||
In case of unpacker, there is new ``raw`` option. It is ``True`` by default
|
|
||||||
for backward compatibility, but it is changed to ``False`` in near future.
|
|
||||||
You can use ``raw=False`` instead of ``encoding='utf-8'``.
|
|
||||||
|
|
||||||
Planned backward incompatible changes
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
When msgpack 1.0, I planning these breaking changes:
|
|
||||||
|
|
||||||
* packer and unpacker: Remove ``encoding`` and ``unicode_errors`` option.
|
|
||||||
* packer: Change default of ``use_bin_type`` option from False to True.
|
|
||||||
* unpacker: Change default of ``raw`` option from True to False.
|
|
||||||
* unpacker: Reduce all ``max_xxx_len`` options for typical usage.
|
|
||||||
* unpacker: Remove ``write_bytes`` option from all methods.
|
|
||||||
|
|
||||||
To avoid these breaking changes breaks your application, please:
|
|
||||||
|
|
||||||
* Don't use deprecated options.
|
|
||||||
* Pass ``use_bin_type`` and ``raw`` options explicitly.
|
|
||||||
* If your application handle large (>1MB) data, specify ``max_xxx_len`` options too.
|
|
||||||
|
|
||||||
|
|
||||||
Install
|
|
||||||
-------
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
$ pip install msgpack
|
|
||||||
|
|
||||||
PyPy
|
|
||||||
^^^^
|
|
||||||
|
|
||||||
msgpack provides a pure Python implementation. PyPy can use this.
|
|
||||||
|
|
||||||
Windows
|
|
||||||
^^^^^^^
|
|
||||||
|
|
||||||
When you can't use a binary distribution, you need to install Visual Studio
|
|
||||||
or Windows SDK on Windows.
|
|
||||||
Without extension, using pure Python implementation on CPython runs slowly.
|
|
||||||
|
|
||||||
For Python 2.7, `Microsoft Visual C++ Compiler for Python 2.7 <https://www.microsoft.com/en-us/download/details.aspx?id=44266>`_
|
|
||||||
is recommended solution.
|
|
||||||
|
|
||||||
For Python 3.5, `Microsoft Visual Studio 2015 <https://www.visualstudio.com/en-us/products/vs-2015-product-editions.aspx>`_
|
|
||||||
Community Edition or Express Edition can be used to build extension module.
|
|
||||||
|
|
||||||
|
|
||||||
How to use
|
|
||||||
----------
|
|
||||||
|
|
||||||
One-shot pack & unpack
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Use ``packb`` for packing and ``unpackb`` for unpacking.
|
|
||||||
msgpack provides ``dumps`` and ``loads`` as an alias for compatibility with
|
|
||||||
``json`` and ``pickle``.
|
|
||||||
|
|
||||||
``pack`` and ``dump`` packs to a file-like object.
|
|
||||||
``unpack`` and ``load`` unpacks from a file-like object.
|
|
||||||
|
|
||||||
.. code-block:: pycon
|
|
||||||
|
|
||||||
>>> import msgpack
|
|
||||||
>>> msgpack.packb([1, 2, 3], use_bin_type=True)
|
|
||||||
'\x93\x01\x02\x03'
|
|
||||||
>>> msgpack.unpackb(_, raw=False)
|
|
||||||
[1, 2, 3]
|
|
||||||
|
|
||||||
``unpack`` unpacks msgpack's array to Python's list, but can also unpack to tuple:
|
|
||||||
|
|
||||||
.. code-block:: pycon
|
|
||||||
|
|
||||||
>>> msgpack.unpackb(b'\x93\x01\x02\x03', use_list=False, raw=False)
|
|
||||||
(1, 2, 3)
|
|
||||||
|
|
||||||
You should always specify the ``use_list`` keyword argument for backward compatibility.
|
|
||||||
See performance issues relating to `use_list option`_ below.
|
|
||||||
|
|
||||||
Read the docstring for other options.
|
|
||||||
|
|
||||||
|
|
||||||
Streaming unpacking
|
|
||||||
^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
``Unpacker`` is a "streaming unpacker". It unpacks multiple objects from one
|
|
||||||
stream (or from bytes provided through its ``feed`` method).
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
import msgpack
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
buf = BytesIO()
|
|
||||||
for i in range(100):
|
|
||||||
buf.write(msgpack.packb(range(i), use_bin_type=True))
|
|
||||||
|
|
||||||
buf.seek(0)
|
|
||||||
|
|
||||||
unpacker = msgpack.Unpacker(buf, raw=False)
|
|
||||||
for unpacked in unpacker:
|
|
||||||
print(unpacked)
|
|
||||||
|
|
||||||
|
|
||||||
Packing/unpacking of custom data type
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
It is also possible to pack/unpack custom data types. Here is an example for
|
|
||||||
``datetime.datetime``.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import msgpack
|
|
||||||
|
|
||||||
useful_dict = {
|
|
||||||
"id": 1,
|
|
||||||
"created": datetime.datetime.now(),
|
|
||||||
}
|
|
||||||
|
|
||||||
def decode_datetime(obj):
|
|
||||||
if b'__datetime__' in obj:
|
|
||||||
obj = datetime.datetime.strptime(obj["as_str"], "%Y%m%dT%H:%M:%S.%f")
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def encode_datetime(obj):
|
|
||||||
if isinstance(obj, datetime.datetime):
|
|
||||||
return {'__datetime__': True, 'as_str': obj.strftime("%Y%m%dT%H:%M:%S.%f")}
|
|
||||||
return obj
|
|
||||||
|
|
||||||
|
|
||||||
packed_dict = msgpack.packb(useful_dict, default=encode_datetime, use_bin_type=True)
|
|
||||||
this_dict_again = msgpack.unpackb(packed_dict, object_hook=decode_datetime, raw=False)
|
|
||||||
|
|
||||||
``Unpacker``'s ``object_hook`` callback receives a dict; the
|
|
||||||
``object_pairs_hook`` callback may instead be used to receive a list of
|
|
||||||
key-value pairs.
|
|
||||||
|
|
||||||
|
|
||||||
Extended types
|
|
||||||
^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
It is also possible to pack/unpack custom data types using the **ext** type.
|
|
||||||
|
|
||||||
.. code-block:: pycon
|
|
||||||
|
|
||||||
>>> import msgpack
|
|
||||||
>>> import array
|
|
||||||
>>> def default(obj):
|
|
||||||
... if isinstance(obj, array.array) and obj.typecode == 'd':
|
|
||||||
... return msgpack.ExtType(42, obj.tostring())
|
|
||||||
... raise TypeError("Unknown type: %r" % (obj,))
|
|
||||||
...
|
|
||||||
>>> def ext_hook(code, data):
|
|
||||||
... if code == 42:
|
|
||||||
... a = array.array('d')
|
|
||||||
... a.fromstring(data)
|
|
||||||
... return a
|
|
||||||
... return ExtType(code, data)
|
|
||||||
...
|
|
||||||
>>> data = array.array('d', [1.2, 3.4])
|
|
||||||
>>> packed = msgpack.packb(data, default=default, use_bin_type=True)
|
|
||||||
>>> unpacked = msgpack.unpackb(packed, ext_hook=ext_hook, raw=False)
|
|
||||||
>>> data == unpacked
|
|
||||||
True
|
|
||||||
|
|
||||||
|
|
||||||
Advanced unpacking control
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
As an alternative to iteration, ``Unpacker`` objects provide ``unpack``,
|
|
||||||
``skip``, ``read_array_header`` and ``read_map_header`` methods. The former two
|
|
||||||
read an entire message from the stream, respectively de-serialising and returning
|
|
||||||
the result, or ignoring it. The latter two methods return the number of elements
|
|
||||||
in the upcoming container, so that each element in an array, or key-value pair
|
|
||||||
in a map, can be unpacked or skipped individually.
|
|
||||||
|
|
||||||
Each of these methods may optionally write the packed data it reads to a
|
|
||||||
callback function:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
def distribute(unpacker, get_worker):
|
|
||||||
nelems = unpacker.read_map_header()
|
|
||||||
for i in range(nelems):
|
|
||||||
# Select a worker for the given key
|
|
||||||
key = unpacker.unpack()
|
|
||||||
worker = get_worker(key)
|
|
||||||
|
|
||||||
# Send the value as a packed message to worker
|
|
||||||
bytestream = BytesIO()
|
|
||||||
unpacker.skip(bytestream.write)
|
|
||||||
worker.send(bytestream.getvalue())
|
|
||||||
|
|
||||||
|
|
||||||
Notes
|
|
||||||
-----
|
|
||||||
|
|
||||||
string and binary type
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Early versions of msgpack didn't distinguish string and binary types (like Python 1).
|
|
||||||
The type for representing both string and binary types was named **raw**.
|
|
||||||
|
|
||||||
For backward compatibility reasons, msgpack-python will still default all
|
|
||||||
strings to byte strings, unless you specify the ``use_bin_type=True`` option in
|
|
||||||
the packer. If you do so, it will use a non-standard type called **bin** to
|
|
||||||
serialize byte arrays, and **raw** becomes to mean **str**. If you want to
|
|
||||||
distinguish **bin** and **raw** in the unpacker, specify ``raw=False``.
|
|
||||||
|
|
||||||
Note that Python 2 defaults to byte-arrays over Unicode strings:
|
|
||||||
|
|
||||||
.. code-block:: pycon
|
|
||||||
|
|
||||||
>>> import msgpack
|
|
||||||
>>> msgpack.unpackb(msgpack.packb([b'spam', u'eggs']))
|
|
||||||
['spam', 'eggs']
|
|
||||||
>>> msgpack.unpackb(msgpack.packb([b'spam', u'eggs'], use_bin_type=True),
|
|
||||||
raw=False)
|
|
||||||
['spam', u'eggs']
|
|
||||||
|
|
||||||
This is the same code in Python 3 (same behaviour, but Python 3 has a
|
|
||||||
different default):
|
|
||||||
|
|
||||||
.. code-block:: pycon
|
|
||||||
|
|
||||||
>>> import msgpack
|
|
||||||
>>> msgpack.unpackb(msgpack.packb([b'spam', u'eggs']))
|
|
||||||
[b'spam', b'eggs']
|
|
||||||
>>> msgpack.unpackb(msgpack.packb([b'spam', u'eggs'], use_bin_type=True),
|
|
||||||
raw=False)
|
|
||||||
[b'spam', 'eggs']
|
|
||||||
|
|
||||||
|
|
||||||
ext type
|
|
||||||
^^^^^^^^
|
|
||||||
|
|
||||||
To use the **ext** type, pass ``msgpack.ExtType`` object to packer.
|
|
||||||
|
|
||||||
.. code-block:: pycon
|
|
||||||
|
|
||||||
>>> import msgpack
|
|
||||||
>>> packed = msgpack.packb(msgpack.ExtType(42, b'xyzzy'))
|
|
||||||
>>> msgpack.unpackb(packed)
|
|
||||||
ExtType(code=42, data='xyzzy')
|
|
||||||
|
|
||||||
You can use it with ``default`` and ``ext_hook``. See below.
|
|
||||||
|
|
||||||
|
|
||||||
Note about performance
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
GC
|
|
||||||
^^
|
|
||||||
|
|
||||||
CPython's GC starts when growing allocated object.
|
|
||||||
This means unpacking may cause useless GC.
|
|
||||||
You can use ``gc.disable()`` when unpacking large message.
|
|
||||||
|
|
||||||
use_list option
|
|
||||||
^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
List is the default sequence type of Python.
|
|
||||||
But tuple is lighter than list.
|
|
||||||
You can use ``use_list=False`` while unpacking when performance is important.
|
|
||||||
|
|
||||||
Python's dict can't use list as key and MessagePack allows array for key of mapping.
|
|
||||||
``use_list=False`` allows unpacking such message.
|
|
||||||
Another way to unpacking such object is using ``object_pairs_hook``.
|
|
||||||
|
|
||||||
|
|
||||||
Development
|
|
||||||
-----------
|
|
||||||
|
|
||||||
Test
|
|
||||||
^^^^
|
|
||||||
|
|
||||||
MessagePack uses `pytest` for testing.
|
|
||||||
Run test with following command:
|
|
||||||
|
|
||||||
$ make test
|
|
||||||
|
|
||||||
|
|
||||||
..
|
|
||||||
vim: filetype=rst
|
|
||||||
5
SECURITY.md
Normal file
5
SECURITY.md
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
## Security contact information
|
||||||
|
|
||||||
|
To report a security vulnerability, please use the
|
||||||
|
[Tidelift security contact](https://tidelift.com/security).
|
||||||
|
Tidelift will coordinate the fix and disclosure.
|
||||||
45
appveyor.yml
45
appveyor.yml
|
|
@ -1,45 +0,0 @@
|
||||||
environment:
|
|
||||||
matrix:
|
|
||||||
# For Python versions available on Appveyor, see
|
|
||||||
# http://www.appveyor.com/docs/installed-software#python
|
|
||||||
- PYTHON: "C:\\Python36"
|
|
||||||
|
|
||||||
install:
|
|
||||||
# We need wheel installed to build wheels
|
|
||||||
- "%PYTHON%\\python.exe -m pip install -U cython"
|
|
||||||
- "%PYTHON%\\Scripts\\cython --cplus msgpack/_packer.pyx msgpack/_unpacker.pyx"
|
|
||||||
|
|
||||||
build: off
|
|
||||||
|
|
||||||
test_script:
|
|
||||||
# Put your test command here.
|
|
||||||
# Note that you must use the environment variable %PYTHON% to refer to
|
|
||||||
# the interpreter you're using - Appveyor does not do anything special
|
|
||||||
# to put the Python version you want to use on PATH.
|
|
||||||
- set PYTHON="C:\\Python27"
|
|
||||||
- ci\\runtests.bat
|
|
||||||
- set PYTHON="C:\\Python27-x64"
|
|
||||||
- ci\\runtests.bat
|
|
||||||
- set PYTHON="C:\\Python35"
|
|
||||||
- ci\\runtests.bat
|
|
||||||
- set PYTHON="C:\\Python35-x64"
|
|
||||||
- ci\\runtests.bat
|
|
||||||
- set PYTHON="C:\\Python36"
|
|
||||||
- ci\\runtests.bat
|
|
||||||
- set PYTHON="C:\\Python36-x64"
|
|
||||||
- ci\\runtests.bat
|
|
||||||
|
|
||||||
after_test:
|
|
||||||
# This step builds your wheels.
|
|
||||||
# Again, you need to use %PYTHON% to get the correct interpreter
|
|
||||||
|
|
||||||
artifacts:
|
|
||||||
# bdist_wheel puts your built wheel in the dist directory
|
|
||||||
- path: dist\*.whl
|
|
||||||
|
|
||||||
#on_success:
|
|
||||||
# You can use this step to upload your artifacts to a public website.
|
|
||||||
# See Appveyor's documentation for more details. Or you can simply
|
|
||||||
# access your wheels from the Appveyor "artifacts" tab for your build.
|
|
||||||
|
|
||||||
# vim: set shiftwidth=2
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
from msgpack import fallback
|
from msgpack import fallback
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from msgpack import _unpacker, _packer
|
from msgpack import _cmsgpack
|
||||||
|
|
||||||
has_ext = True
|
has_ext = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
has_ext = False
|
has_ext = False
|
||||||
|
|
@ -9,26 +11,28 @@ import timeit
|
||||||
|
|
||||||
def profile(name, func):
|
def profile(name, func):
|
||||||
times = timeit.repeat(func, number=1000, repeat=4)
|
times = timeit.repeat(func, number=1000, repeat=4)
|
||||||
times = ', '.join(["%8f" % t for t in times])
|
times = ", ".join(["%8f" % t for t in times])
|
||||||
print("%-30s %40s" % (name, times))
|
print("%-30s %40s" % (name, times))
|
||||||
|
|
||||||
|
|
||||||
def simple(name, data):
|
def simple(name, data):
|
||||||
if has_ext:
|
if has_ext:
|
||||||
packer = _packer.Packer()
|
packer = _cmsgpack.Packer()
|
||||||
profile("packing %s (ext)" % name, lambda: packer.pack(data))
|
profile("packing %s (ext)" % name, lambda: packer.pack(data))
|
||||||
packer = fallback.Packer()
|
packer = fallback.Packer()
|
||||||
profile('packing %s (fallback)' % name, lambda: packer.pack(data))
|
profile("packing %s (fallback)" % name, lambda: packer.pack(data))
|
||||||
|
|
||||||
data = packer.pack(data)
|
data = packer.pack(data)
|
||||||
if has_ext:
|
if has_ext:
|
||||||
profile('unpacking %s (ext)' % name, lambda: _unpacker.unpackb(data))
|
profile("unpacking %s (ext)" % name, lambda: _cmsgpack.unpackb(data))
|
||||||
profile('unpacking %s (fallback)' % name, lambda: fallback.unpackb(data))
|
profile("unpacking %s (fallback)" % name, lambda: fallback.unpackb(data))
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
simple("integers", [7]*10000)
|
simple("integers", [7] * 10000)
|
||||||
simple("bytes", [b'x'*n for n in range(100)]*10)
|
simple("bytes", [b"x" * n for n in range(100)] * 10)
|
||||||
simple("lists", [[]]*10000)
|
simple("lists", [[]] * 10000)
|
||||||
simple("dicts", [{}]*10000)
|
simple("dicts", [{}] * 10000)
|
||||||
|
|
||||||
|
|
||||||
main()
|
main()
|
||||||
|
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
%PYTHON%\python.exe -m pip install -U pip wheel pytest
|
|
||||||
%PYTHON%\python.exe setup.py build_ext -i
|
|
||||||
%PYTHON%\python.exe setup.py install
|
|
||||||
%PYTHON%\python.exe -c "import sys; print(hex(sys.maxsize))"
|
|
||||||
%PYTHON%\python.exe -c "from msgpack import _packer, _unpacker"
|
|
||||||
%PYTHON%\python.exe setup.py bdist_wheel
|
|
||||||
%PYTHON%\python.exe -m pytest -v test
|
|
||||||
SET EL=%ERRORLEVEL%
|
|
||||||
exit /b %EL%
|
|
||||||
|
|
@ -1,11 +1,22 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
DOCKER_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
source "$DOCKER_DIR/shared.env"
|
||||||
|
|
||||||
set -e -x
|
set -e -x
|
||||||
|
|
||||||
ARCH=`uname -p`
|
ARCH=`uname -p`
|
||||||
echo "arch=$ARCH"
|
echo "arch=$ARCH"
|
||||||
|
|
||||||
for V in cp36-cp36m cp35-cp35m cp27-cp27m cp27-cp27mu; do
|
ls /opt/python
|
||||||
|
|
||||||
|
for V in "${PYTHON_VERSIONS[@]}"; do
|
||||||
PYBIN=/opt/python/$V/bin
|
PYBIN=/opt/python/$V/bin
|
||||||
rm -rf build/ # Avoid lib build by narrow Python is used by wide python
|
rm -rf build/ # Avoid lib build by narrow Python is used by wide python
|
||||||
$PYBIN/python setup.py bdist_wheel -p manylinux1_${ARCH}
|
$PYBIN/python -m build -w
|
||||||
|
done
|
||||||
|
|
||||||
|
cd dist
|
||||||
|
for whl in *.whl; do
|
||||||
|
auditwheel repair "$whl"
|
||||||
|
rm "$whl"
|
||||||
done
|
done
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,17 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
DOCKER_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||||
|
source "$DOCKER_DIR/shared.env"
|
||||||
|
|
||||||
set -e -x
|
set -e -x
|
||||||
|
|
||||||
for V in cp36-cp36m cp35-cp35m cp27-cp27m cp27-cp27mu; do
|
for V in "${PYTHON_VERSIONS[@]}"; do
|
||||||
PYBIN=/opt/python/$V/bin
|
PYBIN=/opt/python/$V/bin
|
||||||
$PYBIN/python setup.py install
|
$PYBIN/python setup.py install
|
||||||
rm -rf build/ # Avoid lib build by narrow Python is used by wide python
|
rm -rf build/ # Avoid lib build by narrow Python is used by wide python
|
||||||
$PYBIN/pip install pytest
|
$PYBIN/pip install pytest
|
||||||
pushd test # prevent importing msgpack package in current directory.
|
pushd test # prevent importing msgpack package in current directory.
|
||||||
$PYBIN/python -c 'import sys; print(hex(sys.maxsize))'
|
$PYBIN/python -c 'import sys; print(hex(sys.maxsize))'
|
||||||
$PYBIN/python -c 'from msgpack import _packer, _unpacker'
|
$PYBIN/python -c 'from msgpack import _cmsgpack' # Ensure extension is available
|
||||||
$PYBIN/pytest -v .
|
$PYBIN/pytest -v .
|
||||||
popd
|
popd
|
||||||
done
|
done
|
||||||
|
|
|
||||||
7
docker/shared.env
Normal file
7
docker/shared.env
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
PYTHON_VERSIONS=(
|
||||||
|
cp310-cp310
|
||||||
|
cp39-cp39
|
||||||
|
cp38-cp38
|
||||||
|
cp37-cp37m
|
||||||
|
cp36-cp36m
|
||||||
|
)
|
||||||
|
|
@ -153,7 +153,7 @@ doctest:
|
||||||
"results in $(BUILDDIR)/doctest/output.txt."
|
"results in $(BUILDDIR)/doctest/output.txt."
|
||||||
|
|
||||||
serve: html
|
serve: html
|
||||||
cd _build/html && python3 -m http.server
|
python3 -m http.server -d _build/html
|
||||||
|
|
||||||
zip: html
|
zip: html
|
||||||
cd _build/html && zip -r ../../../msgpack-doc.zip .
|
cd _build/html && zip -r ../../../msgpack-doc.zip .
|
||||||
|
|
|
||||||
1
docs/_static/README.txt
vendored
Normal file
1
docs/_static/README.txt
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Sphinx will copy the contents of docs/_static/ directory to the build location.
|
||||||
32
docs/advanced.rst
Normal file
32
docs/advanced.rst
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
Advanced usage
|
||||||
|
===============
|
||||||
|
|
||||||
|
Packer
|
||||||
|
------
|
||||||
|
|
||||||
|
autoreset
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
When you used ``autoreset=False`` option of :class:`~msgpack.Packer`,
|
||||||
|
``pack()`` method doesn't return packed ``bytes``.
|
||||||
|
|
||||||
|
You can use :meth:`~msgpack.Packer.bytes` or :meth:`~msgpack.Packer.getbuffer` to
|
||||||
|
get packed data.
|
||||||
|
|
||||||
|
``bytes()`` returns ``bytes`` object. ``getbuffer()`` returns some bytes-like
|
||||||
|
object. It's concrete type is implement detail and it will be changed in future
|
||||||
|
versions.
|
||||||
|
|
||||||
|
You can reduce temporary bytes object by using ``Unpacker.getbuffer()``.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
packer = Packer(use_bin_type=True, autoreset=False)
|
||||||
|
|
||||||
|
packer.pack([1, 2])
|
||||||
|
packer.pack([3, 4])
|
||||||
|
|
||||||
|
with open('data.bin', 'wb') as f:
|
||||||
|
f.write(packer.getbuffer())
|
||||||
|
|
||||||
|
packer.reset() # reset internal buffer
|
||||||
12
docs/api.rst
12
docs/api.rst
|
|
@ -5,19 +5,19 @@ API reference
|
||||||
|
|
||||||
.. autofunction:: pack
|
.. autofunction:: pack
|
||||||
|
|
||||||
:func:`dump` is alias for :func:`pack`
|
``dump()`` is an alias for :func:`pack`
|
||||||
|
|
||||||
.. autofunction:: packb
|
.. autofunction:: packb
|
||||||
|
|
||||||
:func:`dumps` is alias for :func:`packb`
|
``dumps()`` is an alias for :func:`packb`
|
||||||
|
|
||||||
.. autofunction:: unpack
|
.. autofunction:: unpack
|
||||||
|
|
||||||
:func:`load` is alias for :func:`unpack`
|
``load()`` is an alias for :func:`unpack`
|
||||||
|
|
||||||
.. autofunction:: unpackb
|
.. autofunction:: unpackb
|
||||||
|
|
||||||
:func:`loads` is alias for :func:`unpackb`
|
``loads()`` is an alias for :func:`unpackb`
|
||||||
|
|
||||||
.. autoclass:: Packer
|
.. autoclass:: Packer
|
||||||
:members:
|
:members:
|
||||||
|
|
@ -27,6 +27,10 @@ API reference
|
||||||
|
|
||||||
.. autoclass:: ExtType
|
.. autoclass:: ExtType
|
||||||
|
|
||||||
|
.. autoclass:: Timestamp
|
||||||
|
:members:
|
||||||
|
:special-members: __init__
|
||||||
|
|
||||||
exceptions
|
exceptions
|
||||||
----------
|
----------
|
||||||
|
|
||||||
|
|
|
||||||
170
docs/conf.py
170
docs/conf.py
|
|
@ -1,5 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# msgpack documentation build configuration file, created by
|
# msgpack documentation build configuration file, created by
|
||||||
# sphinx-quickstart on Sun Feb 24 14:20:50 2013.
|
# sphinx-quickstart on Sun Feb 24 14:20:50 2013.
|
||||||
#
|
#
|
||||||
|
|
@ -11,37 +9,37 @@
|
||||||
# All configuration values have a default; values that are commented out
|
# All configuration values have a default; values that are commented out
|
||||||
# serve to show the default.
|
# serve to show the default.
|
||||||
|
|
||||||
import sys, os
|
|
||||||
|
|
||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
#sys.path.insert(0, os.path.abspath('.'))
|
# import os
|
||||||
|
# import sys
|
||||||
|
# sys.path.insert(0, os.path.abspath('..'))
|
||||||
|
|
||||||
# -- General configuration -----------------------------------------------------
|
# -- General configuration -----------------------------------------------------
|
||||||
|
|
||||||
# If your documentation needs a minimal Sphinx version, state it here.
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
#needs_sphinx = '1.0'
|
# needs_sphinx = '1.0'
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
|
extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['_templates']
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = '.rst'
|
source_suffix = ".rst"
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
#source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = 'index'
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'msgpack'
|
project = "msgpack"
|
||||||
copyright = u'2013, INADA Naoki'
|
copyright = "Inada Naoki"
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
|
@ -49,176 +47,170 @@ copyright = u'2013, INADA Naoki'
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
version = release = '0.5'
|
version = release = "1.0"
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
#language = None
|
# language = None
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
# non-false value, then it is used:
|
# non-false value, then it is used:
|
||||||
#today = ''
|
# today = ''
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
#today_fmt = '%B %d, %Y'
|
# today_fmt = '%B %d, %Y'
|
||||||
today_fmt = "%Y-%m-%d"
|
today_fmt = "%Y-%m-%d"
|
||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
# directories to ignore when looking for source files.
|
# directories to ignore when looking for source files.
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = ["_build"]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
#default_role = None
|
# default_role = None
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
#add_function_parentheses = True
|
# add_function_parentheses = True
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
# If true, the current module name will be prepended to all description
|
||||||
# unit titles (such as .. function::).
|
# unit titles (such as .. function::).
|
||||||
#add_module_names = True
|
# add_module_names = True
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
# output. They are ignored by default.
|
# output. They are ignored by default.
|
||||||
#show_authors = False
|
# show_authors = False
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = 'sphinx'
|
pygments_style = "sphinx"
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
#modindex_common_prefix = []
|
# modindex_common_prefix = []
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ---------------------------------------------------
|
# -- Options for HTML output ---------------------------------------------------
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
html_theme = 'sphinxdoc'
|
html_theme = "sphinx_rtd_theme"
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
# documentation.
|
# documentation.
|
||||||
#html_theme_options = {}
|
# html_theme_options = {}
|
||||||
|
|
||||||
# Add any paths that contain custom themes here, relative to this directory.
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
#html_theme_path = []
|
# html_theme_path = []
|
||||||
|
|
||||||
# The name for this set of Sphinx documents. If None, it defaults to
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
# "<project> v<release> documentation".
|
# "<project> v<release> documentation".
|
||||||
#html_title = None
|
# html_title = None
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
#html_short_title = None
|
# html_short_title = None
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
# of the sidebar.
|
# of the sidebar.
|
||||||
#html_logo = None
|
# html_logo = None
|
||||||
|
|
||||||
# The name of an image file (within the static path) to use as favicon of the
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
# pixels large.
|
# pixels large.
|
||||||
#html_favicon = None
|
# html_favicon = None
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
html_static_path = ['_static']
|
html_static_path = ["_static"]
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
#html_last_updated_fmt = '%b %d, %Y'
|
# html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
# typographically correct entities.
|
# typographically correct entities.
|
||||||
#html_use_smartypants = True
|
# html_use_smartypants = True
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
# Custom sidebar templates, maps document names to template names.
|
||||||
#html_sidebars = {}
|
# html_sidebars = {}
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
# template names.
|
# template names.
|
||||||
#html_additional_pages = {}
|
# html_additional_pages = {}
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#html_domain_indices = True
|
# html_domain_indices = True
|
||||||
|
|
||||||
# If false, no index is generated.
|
# If false, no index is generated.
|
||||||
#html_use_index = True
|
# html_use_index = True
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
# If true, the index is split into individual pages for each letter.
|
||||||
#html_split_index = False
|
# html_split_index = False
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
# If true, links to the reST sources are added to the pages.
|
||||||
#html_show_sourcelink = True
|
# html_show_sourcelink = True
|
||||||
|
|
||||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||||
#html_show_sphinx = True
|
# html_show_sphinx = True
|
||||||
|
|
||||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||||
#html_show_copyright = True
|
# html_show_copyright = True
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
# base URL from which the finished HTML is served.
|
# base URL from which the finished HTML is served.
|
||||||
#html_use_opensearch = ''
|
# html_use_opensearch = ''
|
||||||
|
|
||||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
#html_file_suffix = None
|
# html_file_suffix = None
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = 'msgpackdoc'
|
htmlhelp_basename = "msgpackdoc"
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output --------------------------------------------------
|
# -- Options for LaTeX output --------------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
#'papersize': 'letterpaper',
|
#'papersize': 'letterpaper',
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
#'pointsize': '10pt',
|
||||||
#'pointsize': '10pt',
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#'preamble': '',
|
||||||
# Additional stuff for the LaTeX preamble.
|
|
||||||
#'preamble': '',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
('index', 'msgpack.tex', u'msgpack Documentation',
|
("index", "msgpack.tex", "msgpack Documentation", "Author", "manual"),
|
||||||
u'Author', 'manual'),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
# the title page.
|
# the title page.
|
||||||
#latex_logo = None
|
# latex_logo = None
|
||||||
|
|
||||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
# not chapters.
|
# not chapters.
|
||||||
#latex_use_parts = False
|
# latex_use_parts = False
|
||||||
|
|
||||||
# If true, show page references after internal links.
|
# If true, show page references after internal links.
|
||||||
#latex_show_pagerefs = False
|
# latex_show_pagerefs = False
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#latex_show_urls = False
|
# latex_show_urls = False
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
#latex_appendices = []
|
# latex_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#latex_domain_indices = True
|
# latex_domain_indices = True
|
||||||
|
|
||||||
|
|
||||||
# -- Options for manual page output --------------------------------------------
|
# -- Options for manual page output --------------------------------------------
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [
|
man_pages = [("index", "msgpack", "msgpack Documentation", ["Author"], 1)]
|
||||||
('index', 'msgpack', u'msgpack Documentation',
|
|
||||||
[u'Author'], 1)
|
|
||||||
]
|
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#man_show_urls = False
|
# man_show_urls = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for Texinfo output ------------------------------------------------
|
# -- Options for Texinfo output ------------------------------------------------
|
||||||
|
|
@ -227,59 +219,65 @@ man_pages = [
|
||||||
# (source start file, target name, title, author,
|
# (source start file, target name, title, author,
|
||||||
# dir menu entry, description, category)
|
# dir menu entry, description, category)
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
('index', 'msgpack', u'msgpack Documentation',
|
(
|
||||||
u'Author', 'msgpack', 'One line description of project.',
|
"index",
|
||||||
'Miscellaneous'),
|
"msgpack",
|
||||||
|
"msgpack Documentation",
|
||||||
|
"Author",
|
||||||
|
"msgpack",
|
||||||
|
"One line description of project.",
|
||||||
|
"Miscellaneous",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
#texinfo_appendices = []
|
# texinfo_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#texinfo_domain_indices = True
|
# texinfo_domain_indices = True
|
||||||
|
|
||||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||||
#texinfo_show_urls = 'footnote'
|
# texinfo_show_urls = 'footnote'
|
||||||
|
|
||||||
|
|
||||||
# -- Options for Epub output ---------------------------------------------------
|
# -- Options for Epub output ---------------------------------------------------
|
||||||
|
|
||||||
# Bibliographic Dublin Core info.
|
# Bibliographic Dublin Core info.
|
||||||
epub_title = u'msgpack'
|
epub_title = "msgpack"
|
||||||
epub_author = u'Author'
|
epub_author = "Author"
|
||||||
epub_publisher = u'Author'
|
epub_publisher = "Author"
|
||||||
epub_copyright = u'2013, Author'
|
epub_copyright = "2013, Author"
|
||||||
|
|
||||||
# The language of the text. It defaults to the language option
|
# The language of the text. It defaults to the language option
|
||||||
# or en if the language is not set.
|
# or en if the language is not set.
|
||||||
#epub_language = ''
|
# epub_language = ''
|
||||||
|
|
||||||
# The scheme of the identifier. Typical schemes are ISBN or URL.
|
# The scheme of the identifier. Typical schemes are ISBN or URL.
|
||||||
#epub_scheme = ''
|
# epub_scheme = ''
|
||||||
|
|
||||||
# The unique identifier of the text. This can be a ISBN number
|
# The unique identifier of the text. This can be a ISBN number
|
||||||
# or the project homepage.
|
# or the project homepage.
|
||||||
#epub_identifier = ''
|
# epub_identifier = ''
|
||||||
|
|
||||||
# A unique identification for the text.
|
# A unique identification for the text.
|
||||||
#epub_uid = ''
|
# epub_uid = ''
|
||||||
|
|
||||||
# A tuple containing the cover image and cover page html template filenames.
|
# A tuple containing the cover image and cover page html template filenames.
|
||||||
#epub_cover = ()
|
# epub_cover = ()
|
||||||
|
|
||||||
# HTML files that should be inserted before the pages created by sphinx.
|
# HTML files that should be inserted before the pages created by sphinx.
|
||||||
# The format is a list of tuples containing the path and title.
|
# The format is a list of tuples containing the path and title.
|
||||||
#epub_pre_files = []
|
# epub_pre_files = []
|
||||||
|
|
||||||
# HTML files shat should be inserted after the pages created by sphinx.
|
# HTML files shat should be inserted after the pages created by sphinx.
|
||||||
# The format is a list of tuples containing the path and title.
|
# The format is a list of tuples containing the path and title.
|
||||||
#epub_post_files = []
|
# epub_post_files = []
|
||||||
|
|
||||||
# A list of files that should not be packed into the epub file.
|
# A list of files that should not be packed into the epub file.
|
||||||
#epub_exclude_files = []
|
# epub_exclude_files = []
|
||||||
|
|
||||||
# The depth of the table of contents in toc.ncx.
|
# The depth of the table of contents in toc.ncx.
|
||||||
#epub_tocdepth = 3
|
# epub_tocdepth = 3
|
||||||
|
|
||||||
# Allow duplicate toc entries.
|
# Allow duplicate toc entries.
|
||||||
#epub_tocdup = True
|
# epub_tocdup = True
|
||||||
|
|
|
||||||
|
|
@ -8,3 +8,4 @@ language data exchange.
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
api
|
api
|
||||||
|
advanced
|
||||||
|
|
|
||||||
2
docs/requirements.txt
Normal file
2
docs/requirements.txt
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
sphinx~=7.3.7
|
||||||
|
sphinx-rtd-theme~=2.0.0
|
||||||
|
|
@ -1,31 +1,20 @@
|
||||||
# coding: utf-8
|
# ruff: noqa: F401
|
||||||
from msgpack._version import version
|
|
||||||
from msgpack.exceptions import *
|
|
||||||
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
|
|
||||||
class ExtType(namedtuple('ExtType', 'code data')):
|
|
||||||
"""ExtType represents ext type in msgpack."""
|
|
||||||
def __new__(cls, code, data):
|
|
||||||
if not isinstance(code, int):
|
|
||||||
raise TypeError("code must be int")
|
|
||||||
if not isinstance(data, bytes):
|
|
||||||
raise TypeError("data must be bytes")
|
|
||||||
if not 0 <= code <= 127:
|
|
||||||
raise ValueError("code must be 0~127")
|
|
||||||
return super(ExtType, cls).__new__(cls, code, data)
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
if os.environ.get('MSGPACK_PUREPYTHON'):
|
|
||||||
from msgpack.fallback import Packer, unpackb, Unpacker
|
from .exceptions import * # noqa: F403
|
||||||
|
from .ext import ExtType, Timestamp
|
||||||
|
|
||||||
|
version = (1, 1, 2)
|
||||||
|
__version__ = "1.1.2"
|
||||||
|
|
||||||
|
|
||||||
|
if os.environ.get("MSGPACK_PUREPYTHON"):
|
||||||
|
from .fallback import Packer, Unpacker, unpackb
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
from msgpack._packer import Packer
|
from ._cmsgpack import Packer, Unpacker, unpackb
|
||||||
from msgpack._unpacker import unpackb, Unpacker
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from msgpack.fallback import Packer, unpackb, Unpacker
|
from .fallback import Packer, Unpacker, unpackb
|
||||||
|
|
||||||
|
|
||||||
def pack(o, stream, **kwargs):
|
def pack(o, stream, **kwargs):
|
||||||
|
|
|
||||||
12
msgpack/_cmsgpack.pyx
Normal file
12
msgpack/_cmsgpack.pyx
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
#cython: embedsignature=True, c_string_encoding=ascii, language_level=3
|
||||||
|
#cython: freethreading_compatible = True
|
||||||
|
import cython
|
||||||
|
from cpython.datetime cimport import_datetime, datetime_new
|
||||||
|
import_datetime()
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
cdef object utc = datetime.timezone.utc
|
||||||
|
cdef object epoch = datetime_new(1970, 1, 1, 0, 0, 0, 0, tz=utc)
|
||||||
|
|
||||||
|
include "_packer.pyx"
|
||||||
|
include "_unpacker.pyx"
|
||||||
|
|
@ -1,21 +1,19 @@
|
||||||
# coding: utf-8
|
|
||||||
#cython: embedsignature=True, c_string_encoding=ascii
|
|
||||||
|
|
||||||
from cpython cimport *
|
from cpython cimport *
|
||||||
from cpython.version cimport PY_MAJOR_VERSION
|
from cpython.bytearray cimport PyByteArray_Check, PyByteArray_CheckExact
|
||||||
from cpython.exc cimport PyErr_WarnEx
|
from cpython.datetime cimport (
|
||||||
|
PyDateTime_CheckExact, PyDelta_CheckExact,
|
||||||
|
datetime_tzinfo, timedelta_days, timedelta_seconds, timedelta_microseconds,
|
||||||
|
)
|
||||||
|
|
||||||
from msgpack.exceptions import PackValueError, PackOverflowError
|
cdef ExtType
|
||||||
from msgpack import ExtType
|
cdef Timestamp
|
||||||
|
|
||||||
|
from .ext import ExtType, Timestamp
|
||||||
|
|
||||||
|
|
||||||
cdef extern from "Python.h":
|
cdef extern from "Python.h":
|
||||||
|
|
||||||
int PyMemoryView_Check(object obj)
|
int PyMemoryView_Check(object obj)
|
||||||
int PyByteArray_Check(object obj)
|
|
||||||
int PyByteArray_CheckExact(object obj)
|
|
||||||
char* PyUnicode_AsUTF8AndSize(object obj, Py_ssize_t *l) except NULL
|
|
||||||
|
|
||||||
|
|
||||||
cdef extern from "pack.h":
|
cdef extern from "pack.h":
|
||||||
struct msgpack_packer:
|
struct msgpack_packer:
|
||||||
|
|
@ -24,22 +22,21 @@ cdef extern from "pack.h":
|
||||||
size_t buf_size
|
size_t buf_size
|
||||||
bint use_bin_type
|
bint use_bin_type
|
||||||
|
|
||||||
int msgpack_pack_int(msgpack_packer* pk, int d)
|
int msgpack_pack_nil(msgpack_packer* pk) except -1
|
||||||
int msgpack_pack_nil(msgpack_packer* pk)
|
int msgpack_pack_true(msgpack_packer* pk) except -1
|
||||||
int msgpack_pack_true(msgpack_packer* pk)
|
int msgpack_pack_false(msgpack_packer* pk) except -1
|
||||||
int msgpack_pack_false(msgpack_packer* pk)
|
int msgpack_pack_long_long(msgpack_packer* pk, long long d) except -1
|
||||||
int msgpack_pack_long(msgpack_packer* pk, long d)
|
int msgpack_pack_unsigned_long_long(msgpack_packer* pk, unsigned long long d) except -1
|
||||||
int msgpack_pack_long_long(msgpack_packer* pk, long long d)
|
int msgpack_pack_float(msgpack_packer* pk, float d) except -1
|
||||||
int msgpack_pack_unsigned_long_long(msgpack_packer* pk, unsigned long long d)
|
int msgpack_pack_double(msgpack_packer* pk, double d) except -1
|
||||||
int msgpack_pack_float(msgpack_packer* pk, float d)
|
int msgpack_pack_array(msgpack_packer* pk, size_t l) except -1
|
||||||
int msgpack_pack_double(msgpack_packer* pk, double d)
|
int msgpack_pack_map(msgpack_packer* pk, size_t l) except -1
|
||||||
int msgpack_pack_array(msgpack_packer* pk, size_t l)
|
int msgpack_pack_raw(msgpack_packer* pk, size_t l) except -1
|
||||||
int msgpack_pack_map(msgpack_packer* pk, size_t l)
|
int msgpack_pack_bin(msgpack_packer* pk, size_t l) except -1
|
||||||
int msgpack_pack_raw(msgpack_packer* pk, size_t l)
|
int msgpack_pack_raw_body(msgpack_packer* pk, char* body, size_t l) except -1
|
||||||
int msgpack_pack_bin(msgpack_packer* pk, size_t l)
|
int msgpack_pack_ext(msgpack_packer* pk, char typecode, size_t l) except -1
|
||||||
int msgpack_pack_raw_body(msgpack_packer* pk, char* body, size_t l)
|
int msgpack_pack_timestamp(msgpack_packer* x, long long seconds, unsigned long nanoseconds) except -1
|
||||||
int msgpack_pack_ext(msgpack_packer* pk, char typecode, size_t l)
|
|
||||||
int msgpack_pack_unicode(msgpack_packer* pk, object o, long long limit)
|
|
||||||
|
|
||||||
cdef int DEFAULT_RECURSE_LIMIT=511
|
cdef int DEFAULT_RECURSE_LIMIT=511
|
||||||
cdef long long ITEM_LIMIT = (2**32)-1
|
cdef long long ITEM_LIMIT = (2**32)-1
|
||||||
|
|
@ -53,11 +50,11 @@ cdef inline int PyBytesLike_CheckExact(object o):
|
||||||
return PyBytes_CheckExact(o) or PyByteArray_CheckExact(o)
|
return PyBytes_CheckExact(o) or PyByteArray_CheckExact(o)
|
||||||
|
|
||||||
|
|
||||||
cdef class Packer(object):
|
cdef class Packer:
|
||||||
"""
|
"""
|
||||||
MessagePack Packer
|
MessagePack Packer
|
||||||
|
|
||||||
usage::
|
Usage::
|
||||||
|
|
||||||
packer = Packer()
|
packer = Packer()
|
||||||
astream.write(packer.pack(a))
|
astream.write(packer.pack(a))
|
||||||
|
|
@ -65,7 +62,8 @@ cdef class Packer(object):
|
||||||
|
|
||||||
Packer's constructor has some keyword arguments:
|
Packer's constructor has some keyword arguments:
|
||||||
|
|
||||||
:param callable default:
|
:param default:
|
||||||
|
When specified, it should be callable.
|
||||||
Convert user type to builtin type that Packer supports.
|
Convert user type to builtin type that Packer supports.
|
||||||
See also simplejson's document.
|
See also simplejson's document.
|
||||||
|
|
||||||
|
|
@ -78,9 +76,7 @@ cdef class Packer(object):
|
||||||
|
|
||||||
:param bool use_bin_type:
|
:param bool use_bin_type:
|
||||||
Use bin type introduced in msgpack spec 2.0 for bytes.
|
Use bin type introduced in msgpack spec 2.0 for bytes.
|
||||||
It also enables str8 type for unicode.
|
It also enables str8 type for unicode. (default: True)
|
||||||
Current default value is false, but it will be changed to true
|
|
||||||
in future version. You should specify it explicitly.
|
|
||||||
|
|
||||||
:param bool strict_types:
|
:param bool strict_types:
|
||||||
If set to true, types will be checked to be exact. Derived classes
|
If set to true, types will be checked to be exact. Derived classes
|
||||||
|
|
@ -90,199 +86,192 @@ cdef class Packer(object):
|
||||||
This is useful when trying to implement accurate serialization
|
This is useful when trying to implement accurate serialization
|
||||||
for python types.
|
for python types.
|
||||||
|
|
||||||
:param str unicode_errors:
|
:param bool datetime:
|
||||||
Error handler for encoding unicode. (default: 'strict')
|
If set to true, datetime with tzinfo is packed into Timestamp type.
|
||||||
|
Note that the tzinfo is stripped in the timestamp.
|
||||||
|
You can get UTC datetime with `timestamp=3` option of the Unpacker.
|
||||||
|
|
||||||
:param str encoding:
|
:param str unicode_errors:
|
||||||
(deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8')
|
The error handler for encoding unicode. (default: 'strict')
|
||||||
|
DO NOT USE THIS!! This option is kept for very specific usage.
|
||||||
|
|
||||||
|
:param int buf_size:
|
||||||
|
The size of the internal buffer. (default: 256*1024)
|
||||||
|
Useful if serialisation size can be correctly estimated,
|
||||||
|
avoid unnecessary reallocations.
|
||||||
"""
|
"""
|
||||||
cdef msgpack_packer pk
|
cdef msgpack_packer pk
|
||||||
cdef object _default
|
cdef object _default
|
||||||
cdef object _bencoding
|
|
||||||
cdef object _berrors
|
cdef object _berrors
|
||||||
cdef const char *encoding
|
|
||||||
cdef const char *unicode_errors
|
cdef const char *unicode_errors
|
||||||
|
cdef size_t exports # number of exported buffers
|
||||||
cdef bint strict_types
|
cdef bint strict_types
|
||||||
cdef bool use_float
|
cdef bint use_float
|
||||||
cdef bint autoreset
|
cdef bint autoreset
|
||||||
|
cdef bint datetime
|
||||||
|
|
||||||
def __cinit__(self):
|
def __cinit__(self, buf_size=256*1024, **_kwargs):
|
||||||
cdef int buf_size = 1024*1024
|
|
||||||
self.pk.buf = <char*> PyMem_Malloc(buf_size)
|
self.pk.buf = <char*> PyMem_Malloc(buf_size)
|
||||||
if self.pk.buf == NULL:
|
if self.pk.buf == NULL:
|
||||||
raise MemoryError("Unable to allocate internal buffer.")
|
raise MemoryError("Unable to allocate internal buffer.")
|
||||||
self.pk.buf_size = buf_size
|
self.pk.buf_size = buf_size
|
||||||
self.pk.length = 0
|
self.pk.length = 0
|
||||||
|
self.exports = 0
|
||||||
|
|
||||||
def __init__(self, default=None, encoding=None, unicode_errors=None,
|
def __dealloc__(self):
|
||||||
bint use_single_float=False, bint autoreset=True, bint use_bin_type=False,
|
PyMem_Free(self.pk.buf)
|
||||||
bint strict_types=False):
|
self.pk.buf = NULL
|
||||||
if encoding is not None:
|
assert self.exports == 0
|
||||||
PyErr_WarnEx(PendingDeprecationWarning, "encoding is deprecated.", 1)
|
|
||||||
|
cdef _check_exports(self):
|
||||||
|
if self.exports > 0:
|
||||||
|
raise BufferError("Existing exports of data: Packer cannot be changed")
|
||||||
|
|
||||||
|
@cython.critical_section
|
||||||
|
def __init__(self, *, default=None,
|
||||||
|
bint use_single_float=False, bint autoreset=True, bint use_bin_type=True,
|
||||||
|
bint strict_types=False, bint datetime=False, unicode_errors=None,
|
||||||
|
buf_size=256*1024):
|
||||||
self.use_float = use_single_float
|
self.use_float = use_single_float
|
||||||
self.strict_types = strict_types
|
self.strict_types = strict_types
|
||||||
self.autoreset = autoreset
|
self.autoreset = autoreset
|
||||||
|
self.datetime = datetime
|
||||||
self.pk.use_bin_type = use_bin_type
|
self.pk.use_bin_type = use_bin_type
|
||||||
if default is not None:
|
if default is not None:
|
||||||
if not PyCallable_Check(default):
|
if not PyCallable_Check(default):
|
||||||
raise TypeError("default must be a callable.")
|
raise TypeError("default must be a callable.")
|
||||||
self._default = default
|
self._default = default
|
||||||
|
|
||||||
self._bencoding = encoding
|
|
||||||
if encoding is None:
|
|
||||||
if PY_MAJOR_VERSION < 3:
|
|
||||||
self.encoding = 'utf-8'
|
|
||||||
else:
|
|
||||||
self.encoding = NULL
|
|
||||||
else:
|
|
||||||
self.encoding = self._bencoding
|
|
||||||
|
|
||||||
self._berrors = unicode_errors
|
self._berrors = unicode_errors
|
||||||
if unicode_errors is None:
|
if unicode_errors is None:
|
||||||
self.unicode_errors = NULL
|
self.unicode_errors = NULL
|
||||||
else:
|
else:
|
||||||
self.unicode_errors = self._berrors
|
self.unicode_errors = self._berrors
|
||||||
|
|
||||||
def __dealloc__(self):
|
# returns -2 when default should(o) be called
|
||||||
PyMem_Free(self.pk.buf)
|
cdef int _pack_inner(self, object o, bint will_default, int nest_limit) except -1:
|
||||||
self.pk.buf = NULL
|
|
||||||
|
|
||||||
cdef int _pack(self, object o, int nest_limit=DEFAULT_RECURSE_LIMIT) except -1:
|
|
||||||
cdef long long llval
|
cdef long long llval
|
||||||
cdef unsigned long long ullval
|
cdef unsigned long long ullval
|
||||||
cdef long longval
|
cdef unsigned long ulval
|
||||||
cdef float fval
|
cdef const char* rawval
|
||||||
cdef double dval
|
|
||||||
cdef char* rawval
|
|
||||||
cdef int ret
|
|
||||||
cdef dict d
|
|
||||||
cdef Py_ssize_t L
|
cdef Py_ssize_t L
|
||||||
cdef int default_used = 0
|
|
||||||
cdef bint strict_types = self.strict_types
|
|
||||||
cdef Py_buffer view
|
cdef Py_buffer view
|
||||||
|
cdef bint strict = self.strict_types
|
||||||
|
|
||||||
if nest_limit < 0:
|
if o is None:
|
||||||
raise PackValueError("recursion limit exceeded.")
|
msgpack_pack_nil(&self.pk)
|
||||||
|
elif o is True:
|
||||||
while True:
|
msgpack_pack_true(&self.pk)
|
||||||
if o is None:
|
elif o is False:
|
||||||
ret = msgpack_pack_nil(&self.pk)
|
msgpack_pack_false(&self.pk)
|
||||||
elif PyBool_Check(o) if strict_types else isinstance(o, bool):
|
elif PyLong_CheckExact(o) if strict else PyLong_Check(o):
|
||||||
if o:
|
try:
|
||||||
ret = msgpack_pack_true(&self.pk)
|
if o > 0:
|
||||||
|
ullval = o
|
||||||
|
msgpack_pack_unsigned_long_long(&self.pk, ullval)
|
||||||
else:
|
else:
|
||||||
ret = msgpack_pack_false(&self.pk)
|
llval = o
|
||||||
elif PyLong_CheckExact(o) if strict_types else PyLong_Check(o):
|
msgpack_pack_long_long(&self.pk, llval)
|
||||||
# PyInt_Check(long) is True for Python 3.
|
except OverflowError as oe:
|
||||||
# So we should test long before int.
|
if will_default:
|
||||||
try:
|
return -2
|
||||||
if o > 0:
|
|
||||||
ullval = o
|
|
||||||
ret = msgpack_pack_unsigned_long_long(&self.pk, ullval)
|
|
||||||
else:
|
|
||||||
llval = o
|
|
||||||
ret = msgpack_pack_long_long(&self.pk, llval)
|
|
||||||
except OverflowError as oe:
|
|
||||||
if not default_used and self._default is not None:
|
|
||||||
o = self._default(o)
|
|
||||||
default_used = True
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
raise PackOverflowError("Integer value out of range")
|
|
||||||
elif PyInt_CheckExact(o) if strict_types else PyInt_Check(o):
|
|
||||||
longval = o
|
|
||||||
ret = msgpack_pack_long(&self.pk, longval)
|
|
||||||
elif PyFloat_CheckExact(o) if strict_types else PyFloat_Check(o):
|
|
||||||
if self.use_float:
|
|
||||||
fval = o
|
|
||||||
ret = msgpack_pack_float(&self.pk, fval)
|
|
||||||
else:
|
else:
|
||||||
dval = o
|
raise OverflowError("Integer value out of range")
|
||||||
ret = msgpack_pack_double(&self.pk, dval)
|
elif PyFloat_CheckExact(o) if strict else PyFloat_Check(o):
|
||||||
elif PyBytesLike_CheckExact(o) if strict_types else PyBytesLike_Check(o):
|
if self.use_float:
|
||||||
L = len(o)
|
msgpack_pack_float(&self.pk, <float>o)
|
||||||
if L > ITEM_LIMIT:
|
|
||||||
raise PackValueError("%s is too large" % type(o).__name__)
|
|
||||||
rawval = o
|
|
||||||
ret = msgpack_pack_bin(&self.pk, L)
|
|
||||||
if ret == 0:
|
|
||||||
ret = msgpack_pack_raw_body(&self.pk, rawval, L)
|
|
||||||
elif PyUnicode_CheckExact(o) if strict_types else PyUnicode_Check(o):
|
|
||||||
if self.encoding == NULL and self.unicode_errors == NULL:
|
|
||||||
ret = msgpack_pack_unicode(&self.pk, o, ITEM_LIMIT);
|
|
||||||
if ret == -2:
|
|
||||||
raise PackValueError("unicode string is too large")
|
|
||||||
else:
|
|
||||||
o = PyUnicode_AsEncodedString(o, self.encoding, self.unicode_errors)
|
|
||||||
L = len(o)
|
|
||||||
if L > ITEM_LIMIT:
|
|
||||||
raise PackValueError("unicode string is too large")
|
|
||||||
ret = msgpack_pack_raw(&self.pk, L)
|
|
||||||
if ret == 0:
|
|
||||||
rawval = o
|
|
||||||
ret = msgpack_pack_raw_body(&self.pk, rawval, L)
|
|
||||||
elif PyDict_CheckExact(o):
|
|
||||||
d = <dict>o
|
|
||||||
L = len(d)
|
|
||||||
if L > ITEM_LIMIT:
|
|
||||||
raise PackValueError("dict is too large")
|
|
||||||
ret = msgpack_pack_map(&self.pk, L)
|
|
||||||
if ret == 0:
|
|
||||||
for k, v in d.iteritems():
|
|
||||||
ret = self._pack(k, nest_limit-1)
|
|
||||||
if ret != 0: break
|
|
||||||
ret = self._pack(v, nest_limit-1)
|
|
||||||
if ret != 0: break
|
|
||||||
elif not strict_types and PyDict_Check(o):
|
|
||||||
L = len(o)
|
|
||||||
if L > ITEM_LIMIT:
|
|
||||||
raise PackValueError("dict is too large")
|
|
||||||
ret = msgpack_pack_map(&self.pk, L)
|
|
||||||
if ret == 0:
|
|
||||||
for k, v in o.items():
|
|
||||||
ret = self._pack(k, nest_limit-1)
|
|
||||||
if ret != 0: break
|
|
||||||
ret = self._pack(v, nest_limit-1)
|
|
||||||
if ret != 0: break
|
|
||||||
elif type(o) is ExtType if strict_types else isinstance(o, ExtType):
|
|
||||||
# This should be before Tuple because ExtType is namedtuple.
|
|
||||||
longval = o.code
|
|
||||||
rawval = o.data
|
|
||||||
L = len(o.data)
|
|
||||||
if L > ITEM_LIMIT:
|
|
||||||
raise PackValueError("EXT data is too large")
|
|
||||||
ret = msgpack_pack_ext(&self.pk, longval, L)
|
|
||||||
ret = msgpack_pack_raw_body(&self.pk, rawval, L)
|
|
||||||
elif PyList_CheckExact(o) if strict_types else (PyTuple_Check(o) or PyList_Check(o)):
|
|
||||||
L = len(o)
|
|
||||||
if L > ITEM_LIMIT:
|
|
||||||
raise PackValueError("list is too large")
|
|
||||||
ret = msgpack_pack_array(&self.pk, L)
|
|
||||||
if ret == 0:
|
|
||||||
for v in o:
|
|
||||||
ret = self._pack(v, nest_limit-1)
|
|
||||||
if ret != 0: break
|
|
||||||
elif PyMemoryView_Check(o):
|
|
||||||
if PyObject_GetBuffer(o, &view, PyBUF_SIMPLE) != 0:
|
|
||||||
raise PackValueError("could not get buffer for memoryview")
|
|
||||||
L = view.len
|
|
||||||
if L > ITEM_LIMIT:
|
|
||||||
PyBuffer_Release(&view);
|
|
||||||
raise PackValueError("memoryview is too large")
|
|
||||||
ret = msgpack_pack_bin(&self.pk, L)
|
|
||||||
if ret == 0:
|
|
||||||
ret = msgpack_pack_raw_body(&self.pk, <char*>view.buf, L)
|
|
||||||
PyBuffer_Release(&view);
|
|
||||||
elif not default_used and self._default:
|
|
||||||
o = self._default(o)
|
|
||||||
default_used = 1
|
|
||||||
continue
|
|
||||||
else:
|
else:
|
||||||
raise TypeError("can't serialize %r" % (o,))
|
msgpack_pack_double(&self.pk, <double>o)
|
||||||
return ret
|
elif PyBytesLike_CheckExact(o) if strict else PyBytesLike_Check(o):
|
||||||
|
L = Py_SIZE(o)
|
||||||
|
if L > ITEM_LIMIT:
|
||||||
|
PyErr_Format(ValueError, b"%.200s object is too large", Py_TYPE(o).tp_name)
|
||||||
|
rawval = o
|
||||||
|
msgpack_pack_bin(&self.pk, L)
|
||||||
|
msgpack_pack_raw_body(&self.pk, rawval, L)
|
||||||
|
elif PyUnicode_CheckExact(o) if strict else PyUnicode_Check(o):
|
||||||
|
if self.unicode_errors == NULL:
|
||||||
|
rawval = PyUnicode_AsUTF8AndSize(o, &L)
|
||||||
|
if L >ITEM_LIMIT:
|
||||||
|
raise ValueError("unicode string is too large")
|
||||||
|
else:
|
||||||
|
o = PyUnicode_AsEncodedString(o, NULL, self.unicode_errors)
|
||||||
|
L = Py_SIZE(o)
|
||||||
|
if L > ITEM_LIMIT:
|
||||||
|
raise ValueError("unicode string is too large")
|
||||||
|
rawval = o
|
||||||
|
msgpack_pack_raw(&self.pk, L)
|
||||||
|
msgpack_pack_raw_body(&self.pk, rawval, L)
|
||||||
|
elif PyDict_CheckExact(o) if strict else PyDict_Check(o):
|
||||||
|
L = len(o)
|
||||||
|
if L > ITEM_LIMIT:
|
||||||
|
raise ValueError("dict is too large")
|
||||||
|
msgpack_pack_map(&self.pk, L)
|
||||||
|
for k, v in o.items():
|
||||||
|
self._pack(k, nest_limit)
|
||||||
|
self._pack(v, nest_limit)
|
||||||
|
elif type(o) is ExtType if strict else isinstance(o, ExtType):
|
||||||
|
# This should be before Tuple because ExtType is namedtuple.
|
||||||
|
rawval = o.data
|
||||||
|
L = len(o.data)
|
||||||
|
if L > ITEM_LIMIT:
|
||||||
|
raise ValueError("EXT data is too large")
|
||||||
|
msgpack_pack_ext(&self.pk, <long>o.code, L)
|
||||||
|
msgpack_pack_raw_body(&self.pk, rawval, L)
|
||||||
|
elif type(o) is Timestamp:
|
||||||
|
llval = o.seconds
|
||||||
|
ulval = o.nanoseconds
|
||||||
|
msgpack_pack_timestamp(&self.pk, llval, ulval)
|
||||||
|
elif PyList_CheckExact(o) if strict else (PyTuple_Check(o) or PyList_Check(o)):
|
||||||
|
L = Py_SIZE(o)
|
||||||
|
if L > ITEM_LIMIT:
|
||||||
|
raise ValueError("list is too large")
|
||||||
|
msgpack_pack_array(&self.pk, L)
|
||||||
|
for v in o:
|
||||||
|
self._pack(v, nest_limit)
|
||||||
|
elif PyMemoryView_Check(o):
|
||||||
|
PyObject_GetBuffer(o, &view, PyBUF_SIMPLE)
|
||||||
|
L = view.len
|
||||||
|
if L > ITEM_LIMIT:
|
||||||
|
PyBuffer_Release(&view);
|
||||||
|
raise ValueError("memoryview is too large")
|
||||||
|
try:
|
||||||
|
msgpack_pack_bin(&self.pk, L)
|
||||||
|
msgpack_pack_raw_body(&self.pk, <char*>view.buf, L)
|
||||||
|
finally:
|
||||||
|
PyBuffer_Release(&view);
|
||||||
|
elif self.datetime and PyDateTime_CheckExact(o) and datetime_tzinfo(o) is not None:
|
||||||
|
delta = o - epoch
|
||||||
|
if not PyDelta_CheckExact(delta):
|
||||||
|
raise ValueError("failed to calculate delta")
|
||||||
|
llval = timedelta_days(delta) * <long long>(24*60*60) + timedelta_seconds(delta)
|
||||||
|
ulval = timedelta_microseconds(delta) * 1000
|
||||||
|
msgpack_pack_timestamp(&self.pk, llval, ulval)
|
||||||
|
elif will_default:
|
||||||
|
return -2
|
||||||
|
elif self.datetime and PyDateTime_CheckExact(o):
|
||||||
|
# this should be later than will_default
|
||||||
|
PyErr_Format(ValueError, b"can not serialize '%.200s' object where tzinfo=None", Py_TYPE(o).tp_name)
|
||||||
|
else:
|
||||||
|
PyErr_Format(TypeError, b"can not serialize '%.200s' object", Py_TYPE(o).tp_name)
|
||||||
|
|
||||||
cpdef pack(self, object obj):
|
cdef int _pack(self, object o, int nest_limit=DEFAULT_RECURSE_LIMIT) except -1:
|
||||||
cdef int ret
|
cdef int ret
|
||||||
|
if nest_limit < 0:
|
||||||
|
raise ValueError("recursion limit exceeded.")
|
||||||
|
nest_limit -= 1
|
||||||
|
if self._default is not None:
|
||||||
|
ret = self._pack_inner(o, 1, nest_limit)
|
||||||
|
if ret == -2:
|
||||||
|
o = self._default(o)
|
||||||
|
else:
|
||||||
|
return ret
|
||||||
|
return self._pack_inner(o, 0, nest_limit)
|
||||||
|
|
||||||
|
@cython.critical_section
|
||||||
|
def pack(self, object obj):
|
||||||
|
cdef int ret
|
||||||
|
self._check_exports()
|
||||||
try:
|
try:
|
||||||
ret = self._pack(obj, DEFAULT_RECURSE_LIMIT)
|
ret = self._pack(obj, DEFAULT_RECURSE_LIMIT)
|
||||||
except:
|
except:
|
||||||
|
|
@ -295,36 +284,37 @@ cdef class Packer(object):
|
||||||
self.pk.length = 0
|
self.pk.length = 0
|
||||||
return buf
|
return buf
|
||||||
|
|
||||||
|
@cython.critical_section
|
||||||
def pack_ext_type(self, typecode, data):
|
def pack_ext_type(self, typecode, data):
|
||||||
|
self._check_exports()
|
||||||
|
if len(data) > ITEM_LIMIT:
|
||||||
|
raise ValueError("ext data too large")
|
||||||
msgpack_pack_ext(&self.pk, typecode, len(data))
|
msgpack_pack_ext(&self.pk, typecode, len(data))
|
||||||
msgpack_pack_raw_body(&self.pk, data, len(data))
|
msgpack_pack_raw_body(&self.pk, data, len(data))
|
||||||
|
|
||||||
|
@cython.critical_section
|
||||||
def pack_array_header(self, long long size):
|
def pack_array_header(self, long long size):
|
||||||
|
self._check_exports()
|
||||||
if size > ITEM_LIMIT:
|
if size > ITEM_LIMIT:
|
||||||
raise PackValueError
|
raise ValueError("array too large")
|
||||||
cdef int ret = msgpack_pack_array(&self.pk, size)
|
msgpack_pack_array(&self.pk, size)
|
||||||
if ret == -1:
|
|
||||||
raise MemoryError
|
|
||||||
elif ret: # should not happen
|
|
||||||
raise TypeError
|
|
||||||
if self.autoreset:
|
if self.autoreset:
|
||||||
buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length)
|
buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length)
|
||||||
self.pk.length = 0
|
self.pk.length = 0
|
||||||
return buf
|
return buf
|
||||||
|
|
||||||
|
@cython.critical_section
|
||||||
def pack_map_header(self, long long size):
|
def pack_map_header(self, long long size):
|
||||||
|
self._check_exports()
|
||||||
if size > ITEM_LIMIT:
|
if size > ITEM_LIMIT:
|
||||||
raise PackValueError
|
raise ValueError("map too learge")
|
||||||
cdef int ret = msgpack_pack_map(&self.pk, size)
|
msgpack_pack_map(&self.pk, size)
|
||||||
if ret == -1:
|
|
||||||
raise MemoryError
|
|
||||||
elif ret: # should not happen
|
|
||||||
raise TypeError
|
|
||||||
if self.autoreset:
|
if self.autoreset:
|
||||||
buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length)
|
buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length)
|
||||||
self.pk.length = 0
|
self.pk.length = 0
|
||||||
return buf
|
return buf
|
||||||
|
|
||||||
|
@cython.critical_section
|
||||||
def pack_map_pairs(self, object pairs):
|
def pack_map_pairs(self, object pairs):
|
||||||
"""
|
"""
|
||||||
Pack *pairs* as msgpack map type.
|
Pack *pairs* as msgpack map type.
|
||||||
|
|
@ -332,26 +322,43 @@ cdef class Packer(object):
|
||||||
*pairs* should be a sequence of pairs.
|
*pairs* should be a sequence of pairs.
|
||||||
(`len(pairs)` and `for k, v in pairs:` should be supported.)
|
(`len(pairs)` and `for k, v in pairs:` should be supported.)
|
||||||
"""
|
"""
|
||||||
cdef int ret = msgpack_pack_map(&self.pk, len(pairs))
|
self._check_exports()
|
||||||
if ret == 0:
|
size = len(pairs)
|
||||||
for k, v in pairs:
|
if size > ITEM_LIMIT:
|
||||||
ret = self._pack(k)
|
raise ValueError("map too large")
|
||||||
if ret != 0: break
|
msgpack_pack_map(&self.pk, size)
|
||||||
ret = self._pack(v)
|
for k, v in pairs:
|
||||||
if ret != 0: break
|
self._pack(k)
|
||||||
if ret == -1:
|
self._pack(v)
|
||||||
raise MemoryError
|
|
||||||
elif ret: # should not happen
|
|
||||||
raise TypeError
|
|
||||||
if self.autoreset:
|
if self.autoreset:
|
||||||
buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length)
|
buf = PyBytes_FromStringAndSize(self.pk.buf, self.pk.length)
|
||||||
self.pk.length = 0
|
self.pk.length = 0
|
||||||
return buf
|
return buf
|
||||||
|
|
||||||
|
@cython.critical_section
|
||||||
def reset(self):
|
def reset(self):
|
||||||
"""Clear internal buffer."""
|
"""Reset internal buffer.
|
||||||
|
|
||||||
|
This method is useful only when autoreset=False.
|
||||||
|
"""
|
||||||
|
self._check_exports()
|
||||||
self.pk.length = 0
|
self.pk.length = 0
|
||||||
|
|
||||||
|
@cython.critical_section
|
||||||
def bytes(self):
|
def bytes(self):
|
||||||
"""Return buffer content."""
|
"""Return internal buffer contents as bytes object"""
|
||||||
return PyBytes_FromStringAndSize(self.pk.buf, self.pk.length)
|
return PyBytes_FromStringAndSize(self.pk.buf, self.pk.length)
|
||||||
|
|
||||||
|
def getbuffer(self):
|
||||||
|
"""Return memoryview of internal buffer.
|
||||||
|
|
||||||
|
Note: Packer now supports buffer protocol. You can use memoryview(packer).
|
||||||
|
"""
|
||||||
|
return memoryview(self)
|
||||||
|
|
||||||
|
def __getbuffer__(self, Py_buffer *buffer, int flags):
|
||||||
|
PyBuffer_FillInfo(buffer, self, self.pk.buf, self.pk.length, 1, flags)
|
||||||
|
self.exports += 1
|
||||||
|
|
||||||
|
def __releasebuffer__(self, Py_buffer *buffer):
|
||||||
|
self.exports -= 1
|
||||||
|
|
|
||||||
|
|
@ -1,44 +1,23 @@
|
||||||
# coding: utf-8
|
from cpython cimport *
|
||||||
#cython: embedsignature=True, c_string_encoding=ascii
|
|
||||||
|
|
||||||
from cpython.version cimport PY_MAJOR_VERSION
|
|
||||||
from cpython.bytes cimport (
|
|
||||||
PyBytes_AsString,
|
|
||||||
PyBytes_FromStringAndSize,
|
|
||||||
PyBytes_Size,
|
|
||||||
)
|
|
||||||
from cpython.buffer cimport (
|
|
||||||
Py_buffer,
|
|
||||||
PyObject_CheckBuffer,
|
|
||||||
PyObject_GetBuffer,
|
|
||||||
PyBuffer_Release,
|
|
||||||
PyBuffer_IsContiguous,
|
|
||||||
PyBUF_READ,
|
|
||||||
PyBUF_SIMPLE,
|
|
||||||
PyBUF_FULL_RO,
|
|
||||||
)
|
|
||||||
from cpython.mem cimport PyMem_Malloc, PyMem_Free
|
|
||||||
from cpython.object cimport PyCallable_Check
|
|
||||||
from cpython.ref cimport Py_DECREF
|
|
||||||
from cpython.exc cimport PyErr_WarnEx
|
|
||||||
|
|
||||||
cdef extern from "Python.h":
|
cdef extern from "Python.h":
|
||||||
ctypedef struct PyObject
|
ctypedef struct PyObject
|
||||||
cdef int PyObject_AsReadBuffer(object o, const void** buff, Py_ssize_t* buf_len) except -1
|
|
||||||
object PyMemoryView_GetContiguous(object obj, int buffertype, char order)
|
object PyMemoryView_GetContiguous(object obj, int buffertype, char order)
|
||||||
|
|
||||||
from libc.stdlib cimport *
|
from libc.stdlib cimport *
|
||||||
from libc.string cimport *
|
from libc.string cimport *
|
||||||
from libc.limits cimport *
|
from libc.limits cimport *
|
||||||
ctypedef unsigned long long uint64_t
|
from libc.stdint cimport uint64_t
|
||||||
|
|
||||||
from msgpack.exceptions import (
|
from .exceptions import (
|
||||||
BufferFull,
|
BufferFull,
|
||||||
OutOfData,
|
OutOfData,
|
||||||
UnpackValueError,
|
|
||||||
ExtraData,
|
ExtraData,
|
||||||
|
FormatError,
|
||||||
|
StackError,
|
||||||
)
|
)
|
||||||
from msgpack import ExtType
|
from .ext import ExtType, Timestamp
|
||||||
|
|
||||||
|
cdef object giga = 1_000_000_000
|
||||||
|
|
||||||
|
|
||||||
cdef extern from "unpack.h":
|
cdef extern from "unpack.h":
|
||||||
|
|
@ -46,11 +25,15 @@ cdef extern from "unpack.h":
|
||||||
bint use_list
|
bint use_list
|
||||||
bint raw
|
bint raw
|
||||||
bint has_pairs_hook # call object_hook with k-v pairs
|
bint has_pairs_hook # call object_hook with k-v pairs
|
||||||
|
bint strict_map_key
|
||||||
|
int timestamp
|
||||||
PyObject* object_hook
|
PyObject* object_hook
|
||||||
PyObject* list_hook
|
PyObject* list_hook
|
||||||
PyObject* ext_hook
|
PyObject* ext_hook
|
||||||
char *encoding
|
PyObject* timestamp_t
|
||||||
char *unicode_errors
|
PyObject *giga;
|
||||||
|
PyObject *utc;
|
||||||
|
const char *unicode_errors
|
||||||
Py_ssize_t max_str_len
|
Py_ssize_t max_str_len
|
||||||
Py_ssize_t max_bin_len
|
Py_ssize_t max_bin_len
|
||||||
Py_ssize_t max_array_len
|
Py_ssize_t max_array_len
|
||||||
|
|
@ -75,14 +58,16 @@ cdef extern from "unpack.h":
|
||||||
cdef inline init_ctx(unpack_context *ctx,
|
cdef inline init_ctx(unpack_context *ctx,
|
||||||
object object_hook, object object_pairs_hook,
|
object object_hook, object object_pairs_hook,
|
||||||
object list_hook, object ext_hook,
|
object list_hook, object ext_hook,
|
||||||
bint use_list, bint raw,
|
bint use_list, bint raw, int timestamp,
|
||||||
const char* encoding, const char* unicode_errors,
|
bint strict_map_key,
|
||||||
|
const char* unicode_errors,
|
||||||
Py_ssize_t max_str_len, Py_ssize_t max_bin_len,
|
Py_ssize_t max_str_len, Py_ssize_t max_bin_len,
|
||||||
Py_ssize_t max_array_len, Py_ssize_t max_map_len,
|
Py_ssize_t max_array_len, Py_ssize_t max_map_len,
|
||||||
Py_ssize_t max_ext_len):
|
Py_ssize_t max_ext_len):
|
||||||
unpack_init(ctx)
|
unpack_init(ctx)
|
||||||
ctx.user.use_list = use_list
|
ctx.user.use_list = use_list
|
||||||
ctx.user.raw = raw
|
ctx.user.raw = raw
|
||||||
|
ctx.user.strict_map_key = strict_map_key
|
||||||
ctx.user.object_hook = ctx.user.list_hook = <PyObject*>NULL
|
ctx.user.object_hook = ctx.user.list_hook = <PyObject*>NULL
|
||||||
ctx.user.max_str_len = max_str_len
|
ctx.user.max_str_len = max_str_len
|
||||||
ctx.user.max_bin_len = max_bin_len
|
ctx.user.max_bin_len = max_bin_len
|
||||||
|
|
@ -116,7 +101,14 @@ cdef inline init_ctx(unpack_context *ctx,
|
||||||
raise TypeError("ext_hook must be a callable.")
|
raise TypeError("ext_hook must be a callable.")
|
||||||
ctx.user.ext_hook = <PyObject*>ext_hook
|
ctx.user.ext_hook = <PyObject*>ext_hook
|
||||||
|
|
||||||
ctx.user.encoding = encoding
|
if timestamp < 0 or 3 < timestamp:
|
||||||
|
raise ValueError("timestamp must be 0..3")
|
||||||
|
|
||||||
|
# Add Timestamp type to the user object so it may be used in unpack.h
|
||||||
|
ctx.user.timestamp = timestamp
|
||||||
|
ctx.user.timestamp_t = <PyObject*>Timestamp
|
||||||
|
ctx.user.giga = <PyObject*>giga
|
||||||
|
ctx.user.utc = <PyObject*>utc
|
||||||
ctx.user.unicode_errors = unicode_errors
|
ctx.user.unicode_errors = unicode_errors
|
||||||
|
|
||||||
def default_read_extended_type(typecode, data):
|
def default_read_extended_type(typecode, data):
|
||||||
|
|
@ -125,54 +117,48 @@ def default_read_extended_type(typecode, data):
|
||||||
cdef inline int get_data_from_buffer(object obj,
|
cdef inline int get_data_from_buffer(object obj,
|
||||||
Py_buffer *view,
|
Py_buffer *view,
|
||||||
char **buf,
|
char **buf,
|
||||||
Py_ssize_t *buffer_len,
|
Py_ssize_t *buffer_len) except 0:
|
||||||
int *new_protocol) except 0:
|
|
||||||
cdef object contiguous
|
cdef object contiguous
|
||||||
cdef Py_buffer tmp
|
cdef Py_buffer tmp
|
||||||
if PyObject_CheckBuffer(obj):
|
if PyObject_GetBuffer(obj, view, PyBUF_FULL_RO) == -1:
|
||||||
new_protocol[0] = 1
|
raise
|
||||||
if PyObject_GetBuffer(obj, view, PyBUF_FULL_RO) == -1:
|
if view.itemsize != 1:
|
||||||
raise
|
PyBuffer_Release(view)
|
||||||
if view.itemsize != 1:
|
raise BufferError("cannot unpack from multi-byte object")
|
||||||
PyBuffer_Release(view)
|
if PyBuffer_IsContiguous(view, b'A') == 0:
|
||||||
raise BufferError("cannot unpack from multi-byte object")
|
PyBuffer_Release(view)
|
||||||
if PyBuffer_IsContiguous(view, 'A') == 0:
|
# create a contiguous copy and get buffer
|
||||||
PyBuffer_Release(view)
|
contiguous = PyMemoryView_GetContiguous(obj, PyBUF_READ, b'C')
|
||||||
# create a contiguous copy and get buffer
|
PyObject_GetBuffer(contiguous, view, PyBUF_SIMPLE)
|
||||||
contiguous = PyMemoryView_GetContiguous(obj, PyBUF_READ, 'C')
|
# view must hold the only reference to contiguous,
|
||||||
PyObject_GetBuffer(contiguous, view, PyBUF_SIMPLE)
|
# so memory is freed when view is released
|
||||||
# view must hold the only reference to contiguous,
|
Py_DECREF(contiguous)
|
||||||
# so memory is freed when view is released
|
buffer_len[0] = view.len
|
||||||
Py_DECREF(contiguous)
|
buf[0] = <char*> view.buf
|
||||||
buffer_len[0] = view.len
|
return 1
|
||||||
buf[0] = <char*> view.buf
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
new_protocol[0] = 0
|
|
||||||
if PyObject_AsReadBuffer(obj, <const void**> buf, buffer_len) == -1:
|
|
||||||
raise BufferError("could not get memoryview")
|
|
||||||
PyErr_WarnEx(RuntimeWarning,
|
|
||||||
"using old buffer interface to unpack %s; "
|
|
||||||
"this leads to unpacking errors if slicing is used and "
|
|
||||||
"will be removed in a future version" % type(obj),
|
|
||||||
1)
|
|
||||||
return 1
|
|
||||||
|
|
||||||
def unpackb(object packed, object object_hook=None, object list_hook=None,
|
|
||||||
bint use_list=True, bint raw=True,
|
def unpackb(object packed, *, object object_hook=None, object list_hook=None,
|
||||||
encoding=None, unicode_errors=None,
|
bint use_list=True, bint raw=False, int timestamp=0, bint strict_map_key=True,
|
||||||
|
unicode_errors=None,
|
||||||
object_pairs_hook=None, ext_hook=ExtType,
|
object_pairs_hook=None, ext_hook=ExtType,
|
||||||
Py_ssize_t max_str_len=2147483647, # 2**32-1
|
Py_ssize_t max_str_len=-1,
|
||||||
Py_ssize_t max_bin_len=2147483647,
|
Py_ssize_t max_bin_len=-1,
|
||||||
Py_ssize_t max_array_len=2147483647,
|
Py_ssize_t max_array_len=-1,
|
||||||
Py_ssize_t max_map_len=2147483647,
|
Py_ssize_t max_map_len=-1,
|
||||||
Py_ssize_t max_ext_len=2147483647):
|
Py_ssize_t max_ext_len=-1):
|
||||||
"""
|
"""
|
||||||
Unpack packed_bytes to object. Returns an unpacked object.
|
Unpack packed_bytes to object. Returns an unpacked object.
|
||||||
|
|
||||||
Raises `ValueError` when `packed` contains extra bytes.
|
Raises ``ExtraData`` when *packed* contains extra bytes.
|
||||||
|
Raises ``ValueError`` when *packed* is incomplete.
|
||||||
|
Raises ``FormatError`` when *packed* is not valid msgpack.
|
||||||
|
Raises ``StackError`` when *packed* contains too nested.
|
||||||
|
Other exceptions can be raised during unpacking.
|
||||||
|
|
||||||
See :class:`Unpacker` for options.
|
See :class:`Unpacker` for options.
|
||||||
|
|
||||||
|
*max_xxx_len* options are configured automatically from ``len(packed)``.
|
||||||
"""
|
"""
|
||||||
cdef unpack_context ctx
|
cdef unpack_context ctx
|
||||||
cdef Py_ssize_t off = 0
|
cdef Py_ssize_t off = 0
|
||||||
|
|
@ -181,26 +167,31 @@ def unpackb(object packed, object object_hook=None, object list_hook=None,
|
||||||
cdef Py_buffer view
|
cdef Py_buffer view
|
||||||
cdef char* buf = NULL
|
cdef char* buf = NULL
|
||||||
cdef Py_ssize_t buf_len
|
cdef Py_ssize_t buf_len
|
||||||
cdef const char* cenc = NULL
|
|
||||||
cdef const char* cerr = NULL
|
cdef const char* cerr = NULL
|
||||||
cdef int new_protocol = 0
|
|
||||||
|
|
||||||
if encoding is not None:
|
|
||||||
PyErr_WarnEx(PendingDeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1)
|
|
||||||
cenc = encoding
|
|
||||||
|
|
||||||
if unicode_errors is not None:
|
if unicode_errors is not None:
|
||||||
cerr = unicode_errors
|
cerr = unicode_errors
|
||||||
|
|
||||||
get_data_from_buffer(packed, &view, &buf, &buf_len, &new_protocol)
|
get_data_from_buffer(packed, &view, &buf, &buf_len)
|
||||||
|
|
||||||
|
if max_str_len == -1:
|
||||||
|
max_str_len = buf_len
|
||||||
|
if max_bin_len == -1:
|
||||||
|
max_bin_len = buf_len
|
||||||
|
if max_array_len == -1:
|
||||||
|
max_array_len = buf_len
|
||||||
|
if max_map_len == -1:
|
||||||
|
max_map_len = buf_len//2
|
||||||
|
if max_ext_len == -1:
|
||||||
|
max_ext_len = buf_len
|
||||||
|
|
||||||
try:
|
try:
|
||||||
init_ctx(&ctx, object_hook, object_pairs_hook, list_hook, ext_hook,
|
init_ctx(&ctx, object_hook, object_pairs_hook, list_hook, ext_hook,
|
||||||
use_list, raw, cenc, cerr,
|
use_list, raw, timestamp, strict_map_key, cerr,
|
||||||
max_str_len, max_bin_len, max_array_len, max_map_len, max_ext_len)
|
max_str_len, max_bin_len, max_array_len, max_map_len, max_ext_len)
|
||||||
ret = unpack_construct(&ctx, buf, buf_len, &off)
|
ret = unpack_construct(&ctx, buf, buf_len, &off)
|
||||||
finally:
|
finally:
|
||||||
if new_protocol:
|
PyBuffer_Release(&view);
|
||||||
PyBuffer_Release(&view);
|
|
||||||
|
|
||||||
if ret == 1:
|
if ret == 1:
|
||||||
obj = unpack_data(&ctx)
|
obj = unpack_data(&ctx)
|
||||||
|
|
@ -208,88 +199,96 @@ def unpackb(object packed, object object_hook=None, object list_hook=None,
|
||||||
raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off))
|
raise ExtraData(obj, PyBytes_FromStringAndSize(buf+off, buf_len-off))
|
||||||
return obj
|
return obj
|
||||||
unpack_clear(&ctx)
|
unpack_clear(&ctx)
|
||||||
raise UnpackValueError("Unpack failed: error = %d" % (ret,))
|
if ret == 0:
|
||||||
|
raise ValueError("Unpack failed: incomplete input")
|
||||||
|
elif ret == -2:
|
||||||
|
raise FormatError
|
||||||
|
elif ret == -3:
|
||||||
|
raise StackError
|
||||||
|
raise ValueError("Unpack failed: error = %d" % (ret,))
|
||||||
|
|
||||||
|
|
||||||
def unpack(object stream, **kwargs):
|
cdef class Unpacker:
|
||||||
PyErr_WarnEx(
|
|
||||||
PendingDeprecationWarning,
|
|
||||||
"Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", 1)
|
|
||||||
data = stream.read()
|
|
||||||
return unpackb(data, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
cdef class Unpacker(object):
|
|
||||||
"""Streaming unpacker.
|
"""Streaming unpacker.
|
||||||
|
|
||||||
arguments:
|
Arguments:
|
||||||
|
|
||||||
:param file_like:
|
:param file_like:
|
||||||
File-like object having `.read(n)` method.
|
File-like object having `.read(n)` method.
|
||||||
If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable.
|
If specified, unpacker reads serialized data from it and `.feed()` is not usable.
|
||||||
|
|
||||||
:param int read_size:
|
:param int read_size:
|
||||||
Used as `file_like.read(read_size)`. (default: `min(1024**2, max_buffer_size)`)
|
Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`)
|
||||||
|
|
||||||
:param bool use_list:
|
:param bool use_list:
|
||||||
If true, unpack msgpack array to Python list.
|
If true, unpack msgpack array to Python list.
|
||||||
Otherwise, unpack to Python tuple. (default: True)
|
Otherwise, unpack to Python tuple. (default: True)
|
||||||
|
|
||||||
:param bool raw:
|
:param bool raw:
|
||||||
If true, unpack msgpack raw to Python bytes (default).
|
If true, unpack msgpack raw to Python bytes.
|
||||||
Otherwise, unpack to Python str (or unicode on Python 2) by decoding
|
Otherwise, unpack to Python str by decoding with UTF-8 encoding (default).
|
||||||
with UTF-8 encoding (recommended).
|
|
||||||
Currently, the default is true, but it will be changed to false in
|
|
||||||
near future. So you must specify it explicitly for keeping backward
|
|
||||||
compatibility.
|
|
||||||
|
|
||||||
*encoding* option which is deprecated overrides this option.
|
:param int timestamp:
|
||||||
|
Control how timestamp type is unpacked:
|
||||||
|
|
||||||
:param callable object_hook:
|
0 - Timestamp
|
||||||
|
1 - float (Seconds from the EPOCH)
|
||||||
|
2 - int (Nanoseconds from the EPOCH)
|
||||||
|
3 - datetime.datetime (UTC).
|
||||||
|
|
||||||
|
:param bool strict_map_key:
|
||||||
|
If true (default), only str or bytes are accepted for map (dict) keys.
|
||||||
|
|
||||||
|
:param object_hook:
|
||||||
When specified, it should be callable.
|
When specified, it should be callable.
|
||||||
Unpacker calls it with a dict argument after unpacking msgpack map.
|
Unpacker calls it with a dict argument after unpacking msgpack map.
|
||||||
(See also simplejson)
|
(See also simplejson)
|
||||||
|
|
||||||
:param callable object_pairs_hook:
|
:param object_pairs_hook:
|
||||||
When specified, it should be callable.
|
When specified, it should be callable.
|
||||||
Unpacker calls it with a list of key-value pairs after unpacking msgpack map.
|
Unpacker calls it with a list of key-value pairs after unpacking msgpack map.
|
||||||
(See also simplejson)
|
(See also simplejson)
|
||||||
|
|
||||||
|
:param str unicode_errors:
|
||||||
|
The error handler for decoding unicode. (default: 'strict')
|
||||||
|
This option should be used only when you have msgpack data which
|
||||||
|
contains invalid UTF-8 string.
|
||||||
|
|
||||||
:param int max_buffer_size:
|
:param int max_buffer_size:
|
||||||
Limits size of data waiting unpacked. 0 means system's INT_MAX (default).
|
Limits size of data waiting unpacked. 0 means 2**32-1.
|
||||||
|
The default value is 100*1024*1024 (100MiB).
|
||||||
Raises `BufferFull` exception when it is insufficient.
|
Raises `BufferFull` exception when it is insufficient.
|
||||||
You should set this parameter when unpacking data from untrusted source.
|
You should set this parameter when unpacking data from untrusted source.
|
||||||
|
|
||||||
:param int max_str_len:
|
:param int max_str_len:
|
||||||
Limits max length of str. (default: 2**31-1)
|
Deprecated, use *max_buffer_size* instead.
|
||||||
|
Limits max length of str. (default: max_buffer_size)
|
||||||
|
|
||||||
:param int max_bin_len:
|
:param int max_bin_len:
|
||||||
Limits max length of bin. (default: 2**31-1)
|
Deprecated, use *max_buffer_size* instead.
|
||||||
|
Limits max length of bin. (default: max_buffer_size)
|
||||||
|
|
||||||
:param int max_array_len:
|
:param int max_array_len:
|
||||||
Limits max length of array. (default: 2**31-1)
|
Limits max length of array.
|
||||||
|
(default: max_buffer_size)
|
||||||
|
|
||||||
:param int max_map_len:
|
:param int max_map_len:
|
||||||
Limits max length of map. (default: 2**31-1)
|
Limits max length of map.
|
||||||
|
(default: max_buffer_size//2)
|
||||||
:param str encoding:
|
|
||||||
Deprecated, use raw instead.
|
|
||||||
Encoding used for decoding msgpack raw.
|
|
||||||
If it is None (default), msgpack raw is deserialized to Python bytes.
|
|
||||||
|
|
||||||
:param str unicode_errors:
|
|
||||||
Error handler used for decoding str type. (default: `'strict'`)
|
|
||||||
|
|
||||||
|
:param int max_ext_len:
|
||||||
|
Deprecated, use *max_buffer_size* instead.
|
||||||
|
Limits max size of ext type. (default: max_buffer_size)
|
||||||
|
|
||||||
Example of streaming deserialize from file-like object::
|
Example of streaming deserialize from file-like object::
|
||||||
|
|
||||||
unpacker = Unpacker(file_like, raw=False)
|
unpacker = Unpacker(file_like)
|
||||||
for o in unpacker:
|
for o in unpacker:
|
||||||
process(o)
|
process(o)
|
||||||
|
|
||||||
Example of streaming deserialize from socket::
|
Example of streaming deserialize from socket::
|
||||||
|
|
||||||
unpacker = Unpacker(raw=False)
|
unpacker = Unpacker()
|
||||||
while True:
|
while True:
|
||||||
buf = sock.recv(1024**2)
|
buf = sock.recv(1024**2)
|
||||||
if not buf:
|
if not buf:
|
||||||
|
|
@ -297,6 +296,12 @@ cdef class Unpacker(object):
|
||||||
unpacker.feed(buf)
|
unpacker.feed(buf)
|
||||||
for o in unpacker:
|
for o in unpacker:
|
||||||
process(o)
|
process(o)
|
||||||
|
|
||||||
|
Raises ``ExtraData`` when *packed* contains extra bytes.
|
||||||
|
Raises ``OutOfData`` when *packed* is incomplete.
|
||||||
|
Raises ``FormatError`` when *packed* is not valid msgpack.
|
||||||
|
Raises ``StackError`` when *packed* contains too nested.
|
||||||
|
Other exceptions can be raised during unpacking.
|
||||||
"""
|
"""
|
||||||
cdef unpack_context ctx
|
cdef unpack_context ctx
|
||||||
cdef char* buf
|
cdef char* buf
|
||||||
|
|
@ -306,7 +311,7 @@ cdef class Unpacker(object):
|
||||||
cdef Py_ssize_t read_size
|
cdef Py_ssize_t read_size
|
||||||
# To maintain refcnt.
|
# To maintain refcnt.
|
||||||
cdef object object_hook, object_pairs_hook, list_hook, ext_hook
|
cdef object object_hook, object_pairs_hook, list_hook, ext_hook
|
||||||
cdef object encoding, unicode_errors
|
cdef object unicode_errors
|
||||||
cdef Py_ssize_t max_buffer_size
|
cdef Py_ssize_t max_buffer_size
|
||||||
cdef uint64_t stream_offset
|
cdef uint64_t stream_offset
|
||||||
|
|
||||||
|
|
@ -317,17 +322,17 @@ cdef class Unpacker(object):
|
||||||
PyMem_Free(self.buf)
|
PyMem_Free(self.buf)
|
||||||
self.buf = NULL
|
self.buf = NULL
|
||||||
|
|
||||||
def __init__(self, file_like=None, Py_ssize_t read_size=0,
|
@cython.critical_section
|
||||||
bint use_list=True, bint raw=True,
|
def __init__(self, file_like=None, *, Py_ssize_t read_size=0,
|
||||||
|
bint use_list=True, bint raw=False, int timestamp=0, bint strict_map_key=True,
|
||||||
object object_hook=None, object object_pairs_hook=None, object list_hook=None,
|
object object_hook=None, object object_pairs_hook=None, object list_hook=None,
|
||||||
encoding=None, unicode_errors=None, int max_buffer_size=0,
|
unicode_errors=None, Py_ssize_t max_buffer_size=100*1024*1024,
|
||||||
object ext_hook=ExtType,
|
object ext_hook=ExtType,
|
||||||
Py_ssize_t max_str_len=2147483647, # 2**32-1
|
Py_ssize_t max_str_len=-1,
|
||||||
Py_ssize_t max_bin_len=2147483647,
|
Py_ssize_t max_bin_len=-1,
|
||||||
Py_ssize_t max_array_len=2147483647,
|
Py_ssize_t max_array_len=-1,
|
||||||
Py_ssize_t max_map_len=2147483647,
|
Py_ssize_t max_map_len=-1,
|
||||||
Py_ssize_t max_ext_len=2147483647):
|
Py_ssize_t max_ext_len=-1):
|
||||||
cdef const char *cenc=NULL,
|
|
||||||
cdef const char *cerr=NULL
|
cdef const char *cerr=NULL
|
||||||
|
|
||||||
self.object_hook = object_hook
|
self.object_hook = object_hook
|
||||||
|
|
@ -340,12 +345,25 @@ cdef class Unpacker(object):
|
||||||
self.file_like_read = file_like.read
|
self.file_like_read = file_like.read
|
||||||
if not PyCallable_Check(self.file_like_read):
|
if not PyCallable_Check(self.file_like_read):
|
||||||
raise TypeError("`file_like.read` must be a callable.")
|
raise TypeError("`file_like.read` must be a callable.")
|
||||||
|
|
||||||
if not max_buffer_size:
|
if not max_buffer_size:
|
||||||
max_buffer_size = INT_MAX
|
max_buffer_size = INT_MAX
|
||||||
|
if max_str_len == -1:
|
||||||
|
max_str_len = max_buffer_size
|
||||||
|
if max_bin_len == -1:
|
||||||
|
max_bin_len = max_buffer_size
|
||||||
|
if max_array_len == -1:
|
||||||
|
max_array_len = max_buffer_size
|
||||||
|
if max_map_len == -1:
|
||||||
|
max_map_len = max_buffer_size//2
|
||||||
|
if max_ext_len == -1:
|
||||||
|
max_ext_len = max_buffer_size
|
||||||
|
|
||||||
if read_size > max_buffer_size:
|
if read_size > max_buffer_size:
|
||||||
raise ValueError("read_size should be less or equal to max_buffer_size")
|
raise ValueError("read_size should be less or equal to max_buffer_size")
|
||||||
if not read_size:
|
if not read_size:
|
||||||
read_size = min(max_buffer_size, 1024**2)
|
read_size = min(max_buffer_size, 1024**2)
|
||||||
|
|
||||||
self.max_buffer_size = max_buffer_size
|
self.max_buffer_size = max_buffer_size
|
||||||
self.read_size = read_size
|
self.read_size = read_size
|
||||||
self.buf = <char*>PyMem_Malloc(read_size)
|
self.buf = <char*>PyMem_Malloc(read_size)
|
||||||
|
|
@ -356,24 +374,19 @@ cdef class Unpacker(object):
|
||||||
self.buf_tail = 0
|
self.buf_tail = 0
|
||||||
self.stream_offset = 0
|
self.stream_offset = 0
|
||||||
|
|
||||||
if encoding is not None:
|
|
||||||
PyErr_WarnEx(PendingDeprecationWarning, "encoding is deprecated, Use raw=False instead.", 1)
|
|
||||||
self.encoding = encoding
|
|
||||||
cenc = encoding
|
|
||||||
|
|
||||||
if unicode_errors is not None:
|
if unicode_errors is not None:
|
||||||
self.unicode_errors = unicode_errors
|
self.unicode_errors = unicode_errors
|
||||||
cerr = unicode_errors
|
cerr = unicode_errors
|
||||||
|
|
||||||
init_ctx(&self.ctx, object_hook, object_pairs_hook, list_hook,
|
init_ctx(&self.ctx, object_hook, object_pairs_hook, list_hook,
|
||||||
ext_hook, use_list, raw, cenc, cerr,
|
ext_hook, use_list, raw, timestamp, strict_map_key, cerr,
|
||||||
max_str_len, max_bin_len, max_array_len,
|
max_str_len, max_bin_len, max_array_len,
|
||||||
max_map_len, max_ext_len)
|
max_map_len, max_ext_len)
|
||||||
|
|
||||||
|
@cython.critical_section
|
||||||
def feed(self, object next_bytes):
|
def feed(self, object next_bytes):
|
||||||
"""Append `next_bytes` to internal buffer."""
|
"""Append `next_bytes` to internal buffer."""
|
||||||
cdef Py_buffer pybuff
|
cdef Py_buffer pybuff
|
||||||
cdef int new_protocol = 0
|
|
||||||
cdef char* buf
|
cdef char* buf
|
||||||
cdef Py_ssize_t buf_len
|
cdef Py_ssize_t buf_len
|
||||||
|
|
||||||
|
|
@ -381,12 +394,11 @@ cdef class Unpacker(object):
|
||||||
raise AssertionError(
|
raise AssertionError(
|
||||||
"unpacker.feed() is not be able to use with `file_like`.")
|
"unpacker.feed() is not be able to use with `file_like`.")
|
||||||
|
|
||||||
get_data_from_buffer(next_bytes, &pybuff, &buf, &buf_len, &new_protocol)
|
get_data_from_buffer(next_bytes, &pybuff, &buf, &buf_len)
|
||||||
try:
|
try:
|
||||||
self.append_buffer(buf, buf_len)
|
self.append_buffer(buf, buf_len)
|
||||||
finally:
|
finally:
|
||||||
if new_protocol:
|
PyBuffer_Release(&pybuff)
|
||||||
PyBuffer_Release(&pybuff)
|
|
||||||
|
|
||||||
cdef append_buffer(self, void* _buf, Py_ssize_t _buf_len):
|
cdef append_buffer(self, void* _buf, Py_ssize_t _buf_len):
|
||||||
cdef:
|
cdef:
|
||||||
|
|
@ -428,112 +440,111 @@ cdef class Unpacker(object):
|
||||||
self.buf_size = buf_size
|
self.buf_size = buf_size
|
||||||
self.buf_tail = tail + _buf_len
|
self.buf_tail = tail + _buf_len
|
||||||
|
|
||||||
cdef read_from_file(self):
|
cdef int read_from_file(self) except -1:
|
||||||
next_bytes = self.file_like_read(
|
cdef Py_ssize_t remains = self.max_buffer_size - (self.buf_tail - self.buf_head)
|
||||||
min(self.read_size,
|
if remains <= 0:
|
||||||
self.max_buffer_size - (self.buf_tail - self.buf_head)
|
raise BufferFull
|
||||||
))
|
|
||||||
|
next_bytes = self.file_like_read(min(self.read_size, remains))
|
||||||
if next_bytes:
|
if next_bytes:
|
||||||
self.append_buffer(PyBytes_AsString(next_bytes), PyBytes_Size(next_bytes))
|
self.append_buffer(PyBytes_AsString(next_bytes), PyBytes_Size(next_bytes))
|
||||||
else:
|
else:
|
||||||
self.file_like = None
|
self.file_like = None
|
||||||
|
return 0
|
||||||
|
|
||||||
cdef object _unpack(self, execute_fn execute, object write_bytes, bint iter=0):
|
cdef object _unpack(self, execute_fn execute, bint iter=0):
|
||||||
cdef int ret
|
cdef int ret
|
||||||
cdef object obj
|
cdef object obj
|
||||||
cdef Py_ssize_t prev_head
|
cdef Py_ssize_t prev_head
|
||||||
|
|
||||||
if write_bytes is not None:
|
|
||||||
PyErr_WarnEx(DeprecationWarning, "`write_bytes` option is deprecated. Use `.tell()` instead.", 1)
|
|
||||||
|
|
||||||
if self.buf_head >= self.buf_tail and self.file_like is not None:
|
|
||||||
self.read_from_file()
|
|
||||||
|
|
||||||
while 1:
|
while 1:
|
||||||
prev_head = self.buf_head
|
prev_head = self.buf_head
|
||||||
if prev_head >= self.buf_tail:
|
if prev_head < self.buf_tail:
|
||||||
|
ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head)
|
||||||
|
self.stream_offset += self.buf_head - prev_head
|
||||||
|
else:
|
||||||
|
ret = 0
|
||||||
|
|
||||||
|
if ret == 1:
|
||||||
|
obj = unpack_data(&self.ctx)
|
||||||
|
unpack_init(&self.ctx)
|
||||||
|
return obj
|
||||||
|
elif ret == 0:
|
||||||
|
if self.file_like is not None:
|
||||||
|
self.read_from_file()
|
||||||
|
continue
|
||||||
if iter:
|
if iter:
|
||||||
raise StopIteration("No more data to unpack.")
|
raise StopIteration("No more data to unpack.")
|
||||||
else:
|
else:
|
||||||
raise OutOfData("No more data to unpack.")
|
raise OutOfData("No more data to unpack.")
|
||||||
|
elif ret == -2:
|
||||||
|
raise FormatError
|
||||||
|
elif ret == -3:
|
||||||
|
raise StackError
|
||||||
|
else:
|
||||||
|
raise ValueError("Unpack failed: error = %d" % (ret,))
|
||||||
|
|
||||||
try:
|
@cython.critical_section
|
||||||
ret = execute(&self.ctx, self.buf, self.buf_tail, &self.buf_head)
|
|
||||||
self.stream_offset += self.buf_head - prev_head
|
|
||||||
if write_bytes is not None:
|
|
||||||
write_bytes(PyBytes_FromStringAndSize(self.buf + prev_head, self.buf_head - prev_head))
|
|
||||||
|
|
||||||
if ret == 1:
|
|
||||||
obj = unpack_data(&self.ctx)
|
|
||||||
unpack_init(&self.ctx)
|
|
||||||
return obj
|
|
||||||
elif ret == 0:
|
|
||||||
if self.file_like is not None:
|
|
||||||
self.read_from_file()
|
|
||||||
continue
|
|
||||||
if iter:
|
|
||||||
raise StopIteration("No more data to unpack.")
|
|
||||||
else:
|
|
||||||
raise OutOfData("No more data to unpack.")
|
|
||||||
else:
|
|
||||||
raise UnpackValueError("Unpack failed: error = %d" % (ret,))
|
|
||||||
except ValueError as e:
|
|
||||||
raise UnpackValueError(e)
|
|
||||||
|
|
||||||
def read_bytes(self, Py_ssize_t nbytes):
|
def read_bytes(self, Py_ssize_t nbytes):
|
||||||
"""Read a specified number of raw bytes from the stream"""
|
"""Read a specified number of raw bytes from the stream"""
|
||||||
cdef Py_ssize_t nread
|
cdef Py_ssize_t nread
|
||||||
nread = min(self.buf_tail - self.buf_head, nbytes)
|
nread = min(self.buf_tail - self.buf_head, nbytes)
|
||||||
ret = PyBytes_FromStringAndSize(self.buf + self.buf_head, nread)
|
ret = PyBytes_FromStringAndSize(self.buf + self.buf_head, nread)
|
||||||
self.buf_head += nread
|
self.buf_head += nread
|
||||||
if len(ret) < nbytes and self.file_like is not None:
|
if nread < nbytes and self.file_like is not None:
|
||||||
ret += self.file_like.read(nbytes - len(ret))
|
ret += self.file_like.read(nbytes - nread)
|
||||||
|
nread = len(ret)
|
||||||
|
self.stream_offset += nread
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def unpack(self, object write_bytes=None):
|
@cython.critical_section
|
||||||
|
def unpack(self):
|
||||||
"""Unpack one object
|
"""Unpack one object
|
||||||
|
|
||||||
If write_bytes is not None, it will be called with parts of the raw
|
|
||||||
message as it is unpacked.
|
|
||||||
|
|
||||||
Raises `OutOfData` when there are no more bytes to unpack.
|
Raises `OutOfData` when there are no more bytes to unpack.
|
||||||
"""
|
"""
|
||||||
return self._unpack(unpack_construct, write_bytes)
|
return self._unpack(unpack_construct)
|
||||||
|
|
||||||
def skip(self, object write_bytes=None):
|
@cython.critical_section
|
||||||
|
def skip(self):
|
||||||
"""Read and ignore one object, returning None
|
"""Read and ignore one object, returning None
|
||||||
|
|
||||||
If write_bytes is not None, it will be called with parts of the raw
|
|
||||||
message as it is unpacked.
|
|
||||||
|
|
||||||
Raises `OutOfData` when there are no more bytes to unpack.
|
Raises `OutOfData` when there are no more bytes to unpack.
|
||||||
"""
|
"""
|
||||||
return self._unpack(unpack_skip, write_bytes)
|
return self._unpack(unpack_skip)
|
||||||
|
|
||||||
def read_array_header(self, object write_bytes=None):
|
@cython.critical_section
|
||||||
|
def read_array_header(self):
|
||||||
"""assuming the next object is an array, return its size n, such that
|
"""assuming the next object is an array, return its size n, such that
|
||||||
the next n unpack() calls will iterate over its contents.
|
the next n unpack() calls will iterate over its contents.
|
||||||
|
|
||||||
Raises `OutOfData` when there are no more bytes to unpack.
|
Raises `OutOfData` when there are no more bytes to unpack.
|
||||||
"""
|
"""
|
||||||
return self._unpack(read_array_header, write_bytes)
|
return self._unpack(read_array_header)
|
||||||
|
|
||||||
def read_map_header(self, object write_bytes=None):
|
@cython.critical_section
|
||||||
|
def read_map_header(self):
|
||||||
"""assuming the next object is a map, return its size n, such that the
|
"""assuming the next object is a map, return its size n, such that the
|
||||||
next n * 2 unpack() calls will iterate over its key-value pairs.
|
next n * 2 unpack() calls will iterate over its key-value pairs.
|
||||||
|
|
||||||
Raises `OutOfData` when there are no more bytes to unpack.
|
Raises `OutOfData` when there are no more bytes to unpack.
|
||||||
"""
|
"""
|
||||||
return self._unpack(read_map_header, write_bytes)
|
return self._unpack(read_map_header)
|
||||||
|
|
||||||
|
@cython.critical_section
|
||||||
def tell(self):
|
def tell(self):
|
||||||
|
"""Returns the current position of the Unpacker in bytes, i.e., the
|
||||||
|
number of bytes that were read from the input, also the starting
|
||||||
|
position of the next object.
|
||||||
|
"""
|
||||||
return self.stream_offset
|
return self.stream_offset
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
@cython.critical_section
|
||||||
def __next__(self):
|
def __next__(self):
|
||||||
return self._unpack(unpack_construct, None, 1)
|
return self._unpack(unpack_construct, 1)
|
||||||
|
|
||||||
# for debug.
|
# for debug.
|
||||||
#def _buf(self):
|
#def _buf(self):
|
||||||
|
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
version = (0, 5, 6)
|
|
||||||
|
|
@ -1,5 +1,10 @@
|
||||||
class UnpackException(Exception):
|
class UnpackException(Exception):
|
||||||
"""Deprecated. Use Exception instead to catch all exception during unpacking."""
|
"""Base class for some exceptions raised while unpacking.
|
||||||
|
|
||||||
|
NOTE: unpack may raise exception other than subclass of
|
||||||
|
UnpackException. If you want to catch all error, catch
|
||||||
|
Exception instead.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
class BufferFull(UnpackException):
|
class BufferFull(UnpackException):
|
||||||
|
|
@ -10,11 +15,25 @@ class OutOfData(UnpackException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class UnpackValueError(UnpackException, ValueError):
|
class FormatError(ValueError, UnpackException):
|
||||||
"""Deprecated. Use ValueError instead."""
|
"""Invalid msgpack format"""
|
||||||
|
|
||||||
|
|
||||||
|
class StackError(ValueError, UnpackException):
|
||||||
|
"""Too nested"""
|
||||||
|
|
||||||
|
|
||||||
|
# Deprecated. Use ValueError instead
|
||||||
|
UnpackValueError = ValueError
|
||||||
|
|
||||||
|
|
||||||
class ExtraData(UnpackValueError):
|
class ExtraData(UnpackValueError):
|
||||||
|
"""ExtraData is raised when there is trailing data.
|
||||||
|
|
||||||
|
This exception is raised while only one-shot (not streaming)
|
||||||
|
unpack.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, unpacked, extra):
|
def __init__(self, unpacked, extra):
|
||||||
self.unpacked = unpacked
|
self.unpacked = unpacked
|
||||||
self.extra = extra
|
self.extra = extra
|
||||||
|
|
@ -23,19 +42,7 @@ class ExtraData(UnpackValueError):
|
||||||
return "unpack(b) received extra data."
|
return "unpack(b) received extra data."
|
||||||
|
|
||||||
|
|
||||||
class PackException(Exception):
|
# Deprecated. Use Exception instead to catch all exception during packing.
|
||||||
"""Deprecated. Use Exception instead to catch all exception during packing."""
|
PackException = Exception
|
||||||
|
PackValueError = ValueError
|
||||||
|
PackOverflowError = OverflowError
|
||||||
class PackValueError(PackException, ValueError):
|
|
||||||
"""PackValueError is raised when type of input data is supported but it's value is unsupported.
|
|
||||||
|
|
||||||
Deprecated. Use ValueError instead.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class PackOverflowError(PackValueError, OverflowError):
|
|
||||||
"""PackOverflowError is raised when integer value is out of range of msgpack support [-2**31, 2**32).
|
|
||||||
|
|
||||||
Deprecated. Use ValueError instead.
|
|
||||||
"""
|
|
||||||
|
|
|
||||||
170
msgpack/ext.py
Normal file
170
msgpack/ext.py
Normal file
|
|
@ -0,0 +1,170 @@
|
||||||
|
import datetime
|
||||||
|
import struct
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
|
||||||
|
class ExtType(namedtuple("ExtType", "code data")):
|
||||||
|
"""ExtType represents ext type in msgpack."""
|
||||||
|
|
||||||
|
def __new__(cls, code, data):
|
||||||
|
if not isinstance(code, int):
|
||||||
|
raise TypeError("code must be int")
|
||||||
|
if not isinstance(data, bytes):
|
||||||
|
raise TypeError("data must be bytes")
|
||||||
|
if not 0 <= code <= 127:
|
||||||
|
raise ValueError("code must be 0~127")
|
||||||
|
return super().__new__(cls, code, data)
|
||||||
|
|
||||||
|
|
||||||
|
class Timestamp:
|
||||||
|
"""Timestamp represents the Timestamp extension type in msgpack.
|
||||||
|
|
||||||
|
When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`.
|
||||||
|
When using pure-Python msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and
|
||||||
|
unpack `Timestamp`.
|
||||||
|
|
||||||
|
This class is immutable: Do not override seconds and nanoseconds.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ["seconds", "nanoseconds"]
|
||||||
|
|
||||||
|
def __init__(self, seconds, nanoseconds=0):
|
||||||
|
"""Initialize a Timestamp object.
|
||||||
|
|
||||||
|
:param int seconds:
|
||||||
|
Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds).
|
||||||
|
May be negative.
|
||||||
|
|
||||||
|
:param int nanoseconds:
|
||||||
|
Number of nanoseconds to add to `seconds` to get fractional time.
|
||||||
|
Maximum is 999_999_999. Default is 0.
|
||||||
|
|
||||||
|
Note: Negative times (before the UNIX epoch) are represented as neg. seconds + pos. ns.
|
||||||
|
"""
|
||||||
|
if not isinstance(seconds, int):
|
||||||
|
raise TypeError("seconds must be an integer")
|
||||||
|
if not isinstance(nanoseconds, int):
|
||||||
|
raise TypeError("nanoseconds must be an integer")
|
||||||
|
if not (0 <= nanoseconds < 10**9):
|
||||||
|
raise ValueError("nanoseconds must be a non-negative integer less than 999999999.")
|
||||||
|
self.seconds = seconds
|
||||||
|
self.nanoseconds = nanoseconds
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""String representation of Timestamp."""
|
||||||
|
return f"Timestamp(seconds={self.seconds}, nanoseconds={self.nanoseconds})"
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
"""Check for equality with another Timestamp object"""
|
||||||
|
if type(other) is self.__class__:
|
||||||
|
return self.seconds == other.seconds and self.nanoseconds == other.nanoseconds
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
"""not-equals method (see :func:`__eq__()`)"""
|
||||||
|
return not self.__eq__(other)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.seconds, self.nanoseconds))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_bytes(b):
|
||||||
|
"""Unpack bytes into a `Timestamp` object.
|
||||||
|
|
||||||
|
Used for pure-Python msgpack unpacking.
|
||||||
|
|
||||||
|
:param b: Payload from msgpack ext message with code -1
|
||||||
|
:type b: bytes
|
||||||
|
|
||||||
|
:returns: Timestamp object unpacked from msgpack ext payload
|
||||||
|
:rtype: Timestamp
|
||||||
|
"""
|
||||||
|
if len(b) == 4:
|
||||||
|
seconds = struct.unpack("!L", b)[0]
|
||||||
|
nanoseconds = 0
|
||||||
|
elif len(b) == 8:
|
||||||
|
data64 = struct.unpack("!Q", b)[0]
|
||||||
|
seconds = data64 & 0x00000003FFFFFFFF
|
||||||
|
nanoseconds = data64 >> 34
|
||||||
|
elif len(b) == 12:
|
||||||
|
nanoseconds, seconds = struct.unpack("!Iq", b)
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"Timestamp type can only be created from 32, 64, or 96-bit byte objects"
|
||||||
|
)
|
||||||
|
return Timestamp(seconds, nanoseconds)
|
||||||
|
|
||||||
|
def to_bytes(self):
|
||||||
|
"""Pack this Timestamp object into bytes.
|
||||||
|
|
||||||
|
Used for pure-Python msgpack packing.
|
||||||
|
|
||||||
|
:returns data: Payload for EXT message with code -1 (timestamp type)
|
||||||
|
:rtype: bytes
|
||||||
|
"""
|
||||||
|
if (self.seconds >> 34) == 0: # seconds is non-negative and fits in 34 bits
|
||||||
|
data64 = self.nanoseconds << 34 | self.seconds
|
||||||
|
if data64 & 0xFFFFFFFF00000000 == 0:
|
||||||
|
# nanoseconds is zero and seconds < 2**32, so timestamp 32
|
||||||
|
data = struct.pack("!L", data64)
|
||||||
|
else:
|
||||||
|
# timestamp 64
|
||||||
|
data = struct.pack("!Q", data64)
|
||||||
|
else:
|
||||||
|
# timestamp 96
|
||||||
|
data = struct.pack("!Iq", self.nanoseconds, self.seconds)
|
||||||
|
return data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_unix(unix_sec):
|
||||||
|
"""Create a Timestamp from posix timestamp in seconds.
|
||||||
|
|
||||||
|
:param unix_float: Posix timestamp in seconds.
|
||||||
|
:type unix_float: int or float
|
||||||
|
"""
|
||||||
|
seconds = int(unix_sec // 1)
|
||||||
|
nanoseconds = int((unix_sec % 1) * 10**9)
|
||||||
|
return Timestamp(seconds, nanoseconds)
|
||||||
|
|
||||||
|
def to_unix(self):
|
||||||
|
"""Get the timestamp as a floating-point value.
|
||||||
|
|
||||||
|
:returns: posix timestamp
|
||||||
|
:rtype: float
|
||||||
|
"""
|
||||||
|
return self.seconds + self.nanoseconds / 1e9
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_unix_nano(unix_ns):
|
||||||
|
"""Create a Timestamp from posix timestamp in nanoseconds.
|
||||||
|
|
||||||
|
:param int unix_ns: Posix timestamp in nanoseconds.
|
||||||
|
:rtype: Timestamp
|
||||||
|
"""
|
||||||
|
return Timestamp(*divmod(unix_ns, 10**9))
|
||||||
|
|
||||||
|
def to_unix_nano(self):
|
||||||
|
"""Get the timestamp as a unixtime in nanoseconds.
|
||||||
|
|
||||||
|
:returns: posix timestamp in nanoseconds
|
||||||
|
:rtype: int
|
||||||
|
"""
|
||||||
|
return self.seconds * 10**9 + self.nanoseconds
|
||||||
|
|
||||||
|
def to_datetime(self):
|
||||||
|
"""Get the timestamp as a UTC datetime.
|
||||||
|
|
||||||
|
:rtype: `datetime.datetime`
|
||||||
|
"""
|
||||||
|
utc = datetime.timezone.utc
|
||||||
|
return datetime.datetime.fromtimestamp(0, utc) + datetime.timedelta(
|
||||||
|
seconds=self.seconds, microseconds=self.nanoseconds // 1000
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_datetime(dt):
|
||||||
|
"""Create a Timestamp from datetime with tzinfo.
|
||||||
|
|
||||||
|
:rtype: Timestamp
|
||||||
|
"""
|
||||||
|
return Timestamp(seconds=int(dt.timestamp()), nanoseconds=dt.microsecond * 1000)
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -21,15 +21,12 @@
|
||||||
#include "sysdep.h"
|
#include "sysdep.h"
|
||||||
#include <limits.h>
|
#include <limits.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
#include <stdbool.h>
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
extern "C" {
|
extern "C" {
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef _MSC_VER
|
|
||||||
#define inline __inline
|
|
||||||
#endif
|
|
||||||
|
|
||||||
typedef struct msgpack_packer {
|
typedef struct msgpack_packer {
|
||||||
char *buf;
|
char *buf;
|
||||||
size_t length;
|
size_t length;
|
||||||
|
|
@ -67,53 +64,6 @@ static inline int msgpack_pack_write(msgpack_packer* pk, const char *data, size_
|
||||||
|
|
||||||
#include "pack_template.h"
|
#include "pack_template.h"
|
||||||
|
|
||||||
// return -2 when o is too long
|
|
||||||
static inline int
|
|
||||||
msgpack_pack_unicode(msgpack_packer *pk, PyObject *o, long long limit)
|
|
||||||
{
|
|
||||||
#if PY_MAJOR_VERSION >= 3
|
|
||||||
assert(PyUnicode_Check(o));
|
|
||||||
|
|
||||||
Py_ssize_t len;
|
|
||||||
const char* buf = PyUnicode_AsUTF8AndSize(o, &len);
|
|
||||||
if (buf == NULL)
|
|
||||||
return -1;
|
|
||||||
|
|
||||||
if (len > limit) {
|
|
||||||
return -2;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ret = msgpack_pack_raw(pk, len);
|
|
||||||
if (ret) return ret;
|
|
||||||
|
|
||||||
return msgpack_pack_raw_body(pk, buf, len);
|
|
||||||
#else
|
|
||||||
PyObject *bytes;
|
|
||||||
Py_ssize_t len;
|
|
||||||
int ret;
|
|
||||||
|
|
||||||
// py2
|
|
||||||
bytes = PyUnicode_AsUTF8String(o);
|
|
||||||
if (bytes == NULL)
|
|
||||||
return -1;
|
|
||||||
|
|
||||||
len = PyString_GET_SIZE(bytes);
|
|
||||||
if (len > limit) {
|
|
||||||
Py_DECREF(bytes);
|
|
||||||
return -2;
|
|
||||||
}
|
|
||||||
|
|
||||||
ret = msgpack_pack_raw(pk, len);
|
|
||||||
if (ret) {
|
|
||||||
Py_DECREF(bytes);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
ret = msgpack_pack_raw_body(pk, PyString_AS_STRING(bytes), len);
|
|
||||||
Py_DECREF(bytes);
|
|
||||||
return ret;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
||||||
|
|
@ -37,18 +37,6 @@
|
||||||
* Integer
|
* Integer
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#define msgpack_pack_real_uint8(x, d) \
|
|
||||||
do { \
|
|
||||||
if(d < (1<<7)) { \
|
|
||||||
/* fixnum */ \
|
|
||||||
msgpack_pack_append_buffer(x, &TAKE8_8(d), 1); \
|
|
||||||
} else { \
|
|
||||||
/* unsigned 8 */ \
|
|
||||||
unsigned char buf[2] = {0xcc, TAKE8_8(d)}; \
|
|
||||||
msgpack_pack_append_buffer(x, buf, 2); \
|
|
||||||
} \
|
|
||||||
} while(0)
|
|
||||||
|
|
||||||
#define msgpack_pack_real_uint16(x, d) \
|
#define msgpack_pack_real_uint16(x, d) \
|
||||||
do { \
|
do { \
|
||||||
if(d < (1<<7)) { \
|
if(d < (1<<7)) { \
|
||||||
|
|
@ -123,18 +111,6 @@ do { \
|
||||||
} \
|
} \
|
||||||
} while(0)
|
} while(0)
|
||||||
|
|
||||||
#define msgpack_pack_real_int8(x, d) \
|
|
||||||
do { \
|
|
||||||
if(d < -(1<<5)) { \
|
|
||||||
/* signed 8 */ \
|
|
||||||
unsigned char buf[2] = {0xd0, TAKE8_8(d)}; \
|
|
||||||
msgpack_pack_append_buffer(x, buf, 2); \
|
|
||||||
} else { \
|
|
||||||
/* fixnum */ \
|
|
||||||
msgpack_pack_append_buffer(x, &TAKE8_8(d), 1); \
|
|
||||||
} \
|
|
||||||
} while(0)
|
|
||||||
|
|
||||||
#define msgpack_pack_real_int16(x, d) \
|
#define msgpack_pack_real_int16(x, d) \
|
||||||
do { \
|
do { \
|
||||||
if(d < -(1<<5)) { \
|
if(d < -(1<<5)) { \
|
||||||
|
|
@ -264,49 +240,6 @@ do { \
|
||||||
} while(0)
|
} while(0)
|
||||||
|
|
||||||
|
|
||||||
static inline int msgpack_pack_uint8(msgpack_packer* x, uint8_t d)
|
|
||||||
{
|
|
||||||
msgpack_pack_real_uint8(x, d);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int msgpack_pack_uint16(msgpack_packer* x, uint16_t d)
|
|
||||||
{
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int msgpack_pack_uint32(msgpack_packer* x, uint32_t d)
|
|
||||||
{
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int msgpack_pack_uint64(msgpack_packer* x, uint64_t d)
|
|
||||||
{
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int msgpack_pack_int8(msgpack_packer* x, int8_t d)
|
|
||||||
{
|
|
||||||
msgpack_pack_real_int8(x, d);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int msgpack_pack_int16(msgpack_packer* x, int16_t d)
|
|
||||||
{
|
|
||||||
msgpack_pack_real_int16(x, d);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int msgpack_pack_int32(msgpack_packer* x, int32_t d)
|
|
||||||
{
|
|
||||||
msgpack_pack_real_int32(x, d);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int msgpack_pack_int64(msgpack_packer* x, int64_t d)
|
|
||||||
{
|
|
||||||
msgpack_pack_real_int64(x, d);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//#ifdef msgpack_pack_inline_func_cint
|
|
||||||
|
|
||||||
static inline int msgpack_pack_short(msgpack_packer* x, short d)
|
static inline int msgpack_pack_short(msgpack_packer* x, short d)
|
||||||
{
|
{
|
||||||
#if defined(SIZEOF_SHORT)
|
#if defined(SIZEOF_SHORT)
|
||||||
|
|
@ -372,192 +305,37 @@ if(sizeof(int) == 2) {
|
||||||
static inline int msgpack_pack_long(msgpack_packer* x, long d)
|
static inline int msgpack_pack_long(msgpack_packer* x, long d)
|
||||||
{
|
{
|
||||||
#if defined(SIZEOF_LONG)
|
#if defined(SIZEOF_LONG)
|
||||||
#if SIZEOF_LONG == 2
|
#if SIZEOF_LONG == 4
|
||||||
msgpack_pack_real_int16(x, d);
|
|
||||||
#elif SIZEOF_LONG == 4
|
|
||||||
msgpack_pack_real_int32(x, d);
|
msgpack_pack_real_int32(x, d);
|
||||||
#else
|
#else
|
||||||
msgpack_pack_real_int64(x, d);
|
msgpack_pack_real_int64(x, d);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#elif defined(LONG_MAX)
|
#elif defined(LONG_MAX)
|
||||||
#if LONG_MAX == 0x7fffL
|
#if LONG_MAX == 0x7fffffffL
|
||||||
msgpack_pack_real_int16(x, d);
|
|
||||||
#elif LONG_MAX == 0x7fffffffL
|
|
||||||
msgpack_pack_real_int32(x, d);
|
msgpack_pack_real_int32(x, d);
|
||||||
#else
|
#else
|
||||||
msgpack_pack_real_int64(x, d);
|
msgpack_pack_real_int64(x, d);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#else
|
#else
|
||||||
if(sizeof(long) == 2) {
|
if (sizeof(long) == 4) {
|
||||||
msgpack_pack_real_int16(x, d);
|
msgpack_pack_real_int32(x, d);
|
||||||
} else if(sizeof(long) == 4) {
|
} else {
|
||||||
msgpack_pack_real_int32(x, d);
|
msgpack_pack_real_int64(x, d);
|
||||||
} else {
|
}
|
||||||
msgpack_pack_real_int64(x, d);
|
|
||||||
}
|
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int msgpack_pack_long_long(msgpack_packer* x, long long d)
|
static inline int msgpack_pack_long_long(msgpack_packer* x, long long d)
|
||||||
{
|
{
|
||||||
#if defined(SIZEOF_LONG_LONG)
|
|
||||||
#if SIZEOF_LONG_LONG == 2
|
|
||||||
msgpack_pack_real_int16(x, d);
|
|
||||||
#elif SIZEOF_LONG_LONG == 4
|
|
||||||
msgpack_pack_real_int32(x, d);
|
|
||||||
#else
|
|
||||||
msgpack_pack_real_int64(x, d);
|
msgpack_pack_real_int64(x, d);
|
||||||
#endif
|
|
||||||
|
|
||||||
#elif defined(LLONG_MAX)
|
|
||||||
#if LLONG_MAX == 0x7fffL
|
|
||||||
msgpack_pack_real_int16(x, d);
|
|
||||||
#elif LLONG_MAX == 0x7fffffffL
|
|
||||||
msgpack_pack_real_int32(x, d);
|
|
||||||
#else
|
|
||||||
msgpack_pack_real_int64(x, d);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#else
|
|
||||||
if(sizeof(long long) == 2) {
|
|
||||||
msgpack_pack_real_int16(x, d);
|
|
||||||
} else if(sizeof(long long) == 4) {
|
|
||||||
msgpack_pack_real_int32(x, d);
|
|
||||||
} else {
|
|
||||||
msgpack_pack_real_int64(x, d);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int msgpack_pack_unsigned_short(msgpack_packer* x, unsigned short d)
|
|
||||||
{
|
|
||||||
#if defined(SIZEOF_SHORT)
|
|
||||||
#if SIZEOF_SHORT == 2
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
#elif SIZEOF_SHORT == 4
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
#else
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#elif defined(USHRT_MAX)
|
|
||||||
#if USHRT_MAX == 0xffffU
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
#elif USHRT_MAX == 0xffffffffU
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
#else
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#else
|
|
||||||
if(sizeof(unsigned short) == 2) {
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
} else if(sizeof(unsigned short) == 4) {
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
} else {
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int msgpack_pack_unsigned_int(msgpack_packer* x, unsigned int d)
|
|
||||||
{
|
|
||||||
#if defined(SIZEOF_INT)
|
|
||||||
#if SIZEOF_INT == 2
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
#elif SIZEOF_INT == 4
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
#else
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#elif defined(UINT_MAX)
|
|
||||||
#if UINT_MAX == 0xffffU
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
#elif UINT_MAX == 0xffffffffU
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
#else
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#else
|
|
||||||
if(sizeof(unsigned int) == 2) {
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
} else if(sizeof(unsigned int) == 4) {
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
} else {
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline int msgpack_pack_unsigned_long(msgpack_packer* x, unsigned long d)
|
|
||||||
{
|
|
||||||
#if defined(SIZEOF_LONG)
|
|
||||||
#if SIZEOF_LONG == 2
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
#elif SIZEOF_LONG == 4
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
#else
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#elif defined(ULONG_MAX)
|
|
||||||
#if ULONG_MAX == 0xffffUL
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
#elif ULONG_MAX == 0xffffffffUL
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
#else
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#else
|
|
||||||
if(sizeof(unsigned long) == 2) {
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
} else if(sizeof(unsigned long) == 4) {
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
} else {
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int msgpack_pack_unsigned_long_long(msgpack_packer* x, unsigned long long d)
|
static inline int msgpack_pack_unsigned_long_long(msgpack_packer* x, unsigned long long d)
|
||||||
{
|
{
|
||||||
#if defined(SIZEOF_LONG_LONG)
|
|
||||||
#if SIZEOF_LONG_LONG == 2
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
#elif SIZEOF_LONG_LONG == 4
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
#else
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#elif defined(ULLONG_MAX)
|
|
||||||
#if ULLONG_MAX == 0xffffUL
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
#elif ULLONG_MAX == 0xffffffffUL
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
#else
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#else
|
|
||||||
if(sizeof(unsigned long long) == 2) {
|
|
||||||
msgpack_pack_real_uint16(x, d);
|
|
||||||
} else if(sizeof(unsigned long long) == 4) {
|
|
||||||
msgpack_pack_real_uint32(x, d);
|
|
||||||
} else {
|
|
||||||
msgpack_pack_real_uint64(x, d);
|
msgpack_pack_real_uint64(x, d);
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
//#undef msgpack_pack_inline_func_cint
|
|
||||||
//#endif
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
@ -566,24 +344,26 @@ if(sizeof(unsigned long long) == 2) {
|
||||||
|
|
||||||
static inline int msgpack_pack_float(msgpack_packer* x, float d)
|
static inline int msgpack_pack_float(msgpack_packer* x, float d)
|
||||||
{
|
{
|
||||||
union { float f; uint32_t i; } mem;
|
|
||||||
mem.f = d;
|
|
||||||
unsigned char buf[5];
|
unsigned char buf[5];
|
||||||
buf[0] = 0xca; _msgpack_store32(&buf[1], mem.i);
|
buf[0] = 0xca;
|
||||||
|
|
||||||
|
#if PY_VERSION_HEX >= 0x030B00A7
|
||||||
|
PyFloat_Pack4(d, (char *)&buf[1], 0);
|
||||||
|
#else
|
||||||
|
_PyFloat_Pack4(d, &buf[1], 0);
|
||||||
|
#endif
|
||||||
msgpack_pack_append_buffer(x, buf, 5);
|
msgpack_pack_append_buffer(x, buf, 5);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int msgpack_pack_double(msgpack_packer* x, double d)
|
static inline int msgpack_pack_double(msgpack_packer* x, double d)
|
||||||
{
|
{
|
||||||
union { double f; uint64_t i; } mem;
|
|
||||||
mem.f = d;
|
|
||||||
unsigned char buf[9];
|
unsigned char buf[9];
|
||||||
buf[0] = 0xcb;
|
buf[0] = 0xcb;
|
||||||
#if defined(__arm__) && !(__ARM_EABI__) // arm-oabi
|
#if PY_VERSION_HEX >= 0x030B00A7
|
||||||
// https://github.com/msgpack/msgpack-perl/pull/1
|
PyFloat_Pack8(d, (char *)&buf[1], 0);
|
||||||
mem.i = (mem.i & 0xFFFFFFFFUL) << 32UL | (mem.i >> 32UL);
|
#else
|
||||||
|
_PyFloat_Pack8(d, &buf[1], 0);
|
||||||
#endif
|
#endif
|
||||||
_msgpack_store64(&buf[1], mem.i);
|
|
||||||
msgpack_pack_append_buffer(x, buf, 9);
|
msgpack_pack_append_buffer(x, buf, 9);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -766,6 +546,39 @@ static inline int msgpack_pack_ext(msgpack_packer* x, char typecode, size_t l)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Pack Timestamp extension type. Follows msgpack-c pack_template.h.
|
||||||
|
*/
|
||||||
|
static inline int msgpack_pack_timestamp(msgpack_packer* x, int64_t seconds, uint32_t nanoseconds)
|
||||||
|
{
|
||||||
|
if ((seconds >> 34) == 0) {
|
||||||
|
/* seconds is unsigned and fits in 34 bits */
|
||||||
|
uint64_t data64 = ((uint64_t)nanoseconds << 34) | (uint64_t)seconds;
|
||||||
|
if ((data64 & 0xffffffff00000000L) == 0) {
|
||||||
|
/* no nanoseconds and seconds is 32bits or smaller. timestamp32. */
|
||||||
|
unsigned char buf[4];
|
||||||
|
uint32_t data32 = (uint32_t)data64;
|
||||||
|
msgpack_pack_ext(x, -1, 4);
|
||||||
|
_msgpack_store32(buf, data32);
|
||||||
|
msgpack_pack_raw_body(x, buf, 4);
|
||||||
|
} else {
|
||||||
|
/* timestamp64 */
|
||||||
|
unsigned char buf[8];
|
||||||
|
msgpack_pack_ext(x, -1, 8);
|
||||||
|
_msgpack_store64(buf, data64);
|
||||||
|
msgpack_pack_raw_body(x, buf, 8);
|
||||||
|
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
/* seconds is signed or >34bits */
|
||||||
|
unsigned char buf[12];
|
||||||
|
_msgpack_store32(&buf[0], nanoseconds);
|
||||||
|
_msgpack_store64(&buf[4], seconds);
|
||||||
|
msgpack_pack_ext(x, -1, 12);
|
||||||
|
msgpack_pack_raw_body(x, buf, 12);
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#undef msgpack_pack_append_buffer
|
#undef msgpack_pack_append_buffer
|
||||||
|
|
@ -775,11 +588,9 @@ static inline int msgpack_pack_ext(msgpack_packer* x, char typecode, size_t l)
|
||||||
#undef TAKE8_32
|
#undef TAKE8_32
|
||||||
#undef TAKE8_64
|
#undef TAKE8_64
|
||||||
|
|
||||||
#undef msgpack_pack_real_uint8
|
|
||||||
#undef msgpack_pack_real_uint16
|
#undef msgpack_pack_real_uint16
|
||||||
#undef msgpack_pack_real_uint32
|
#undef msgpack_pack_real_uint32
|
||||||
#undef msgpack_pack_real_uint64
|
#undef msgpack_pack_real_uint64
|
||||||
#undef msgpack_pack_real_int8
|
|
||||||
#undef msgpack_pack_real_int16
|
#undef msgpack_pack_real_int16
|
||||||
#undef msgpack_pack_real_int32
|
#undef msgpack_pack_real_int32
|
||||||
#undef msgpack_pack_real_int64
|
#undef msgpack_pack_real_int64
|
||||||
|
|
|
||||||
|
|
@ -61,14 +61,14 @@ typedef unsigned int _msgpack_atomic_counter_t;
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#else
|
#else /* _WIN32 */
|
||||||
#include <arpa/inet.h> /* __BYTE_ORDER */
|
#include <arpa/inet.h> /* ntohs, ntohl */
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if !defined(__LITTLE_ENDIAN__) && !defined(__BIG_ENDIAN__)
|
#if !defined(__LITTLE_ENDIAN__) && !defined(__BIG_ENDIAN__)
|
||||||
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
|
||||||
#define __LITTLE_ENDIAN__
|
#define __LITTLE_ENDIAN__
|
||||||
#elif __BYTE_ORDER == __BIG_ENDIAN
|
#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
|
||||||
#define __BIG_ENDIAN__
|
#define __BIG_ENDIAN__
|
||||||
#elif _WIN32
|
#elif _WIN32
|
||||||
#define __LITTLE_ENDIAN__
|
#define __LITTLE_ENDIAN__
|
||||||
|
|
@ -95,7 +95,7 @@ typedef unsigned int _msgpack_atomic_counter_t;
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
# if defined(ntohl)
|
# if defined(ntohl)
|
||||||
# define _msgpack_be32(x) ntohl(x)
|
# define _msgpack_be32(x) ntohl(x)
|
||||||
# elif defined(_byteswap_ulong) || (defined(_MSC_VER) && _MSC_VER >= 1400)
|
# elif defined(_byteswap_ulong) || defined(_MSC_VER)
|
||||||
# define _msgpack_be32(x) ((uint32_t)_byteswap_ulong((unsigned long)x))
|
# define _msgpack_be32(x) ((uint32_t)_byteswap_ulong((unsigned long)x))
|
||||||
# else
|
# else
|
||||||
# define _msgpack_be32(x) \
|
# define _msgpack_be32(x) \
|
||||||
|
|
@ -108,7 +108,7 @@ typedef unsigned int _msgpack_atomic_counter_t;
|
||||||
# define _msgpack_be32(x) ntohl(x)
|
# define _msgpack_be32(x) ntohl(x)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if defined(_byteswap_uint64) || (defined(_MSC_VER) && _MSC_VER >= 1400)
|
#if defined(_byteswap_uint64) || defined(_MSC_VER)
|
||||||
# define _msgpack_be64(x) (_byteswap_uint64(x))
|
# define _msgpack_be64(x) (_byteswap_uint64(x))
|
||||||
#elif defined(bswap_64)
|
#elif defined(bswap_64)
|
||||||
# define _msgpack_be64(x) bswap_64(x)
|
# define _msgpack_be64(x) bswap_64(x)
|
||||||
|
|
|
||||||
143
msgpack/unpack.h
143
msgpack/unpack.h
|
|
@ -23,10 +23,14 @@ typedef struct unpack_user {
|
||||||
bool use_list;
|
bool use_list;
|
||||||
bool raw;
|
bool raw;
|
||||||
bool has_pairs_hook;
|
bool has_pairs_hook;
|
||||||
|
bool strict_map_key;
|
||||||
|
int timestamp;
|
||||||
PyObject *object_hook;
|
PyObject *object_hook;
|
||||||
PyObject *list_hook;
|
PyObject *list_hook;
|
||||||
PyObject *ext_hook;
|
PyObject *ext_hook;
|
||||||
const char *encoding;
|
PyObject *timestamp_t;
|
||||||
|
PyObject *giga;
|
||||||
|
PyObject *utc;
|
||||||
const char *unicode_errors;
|
const char *unicode_errors;
|
||||||
Py_ssize_t max_str_len, max_bin_len, max_array_len, max_map_len, max_ext_len;
|
Py_ssize_t max_str_len, max_bin_len, max_array_len, max_map_len, max_ext_len;
|
||||||
} unpack_user;
|
} unpack_user;
|
||||||
|
|
@ -43,7 +47,7 @@ static inline msgpack_unpack_object unpack_callback_root(unpack_user* u)
|
||||||
|
|
||||||
static inline int unpack_callback_uint16(unpack_user* u, uint16_t d, msgpack_unpack_object* o)
|
static inline int unpack_callback_uint16(unpack_user* u, uint16_t d, msgpack_unpack_object* o)
|
||||||
{
|
{
|
||||||
PyObject *p = PyInt_FromLong((long)d);
|
PyObject *p = PyLong_FromLong((long)d);
|
||||||
if (!p)
|
if (!p)
|
||||||
return -1;
|
return -1;
|
||||||
*o = p;
|
*o = p;
|
||||||
|
|
@ -57,7 +61,7 @@ static inline int unpack_callback_uint8(unpack_user* u, uint8_t d, msgpack_unpac
|
||||||
|
|
||||||
static inline int unpack_callback_uint32(unpack_user* u, uint32_t d, msgpack_unpack_object* o)
|
static inline int unpack_callback_uint32(unpack_user* u, uint32_t d, msgpack_unpack_object* o)
|
||||||
{
|
{
|
||||||
PyObject *p = PyInt_FromSize_t((size_t)d);
|
PyObject *p = PyLong_FromSize_t((size_t)d);
|
||||||
if (!p)
|
if (!p)
|
||||||
return -1;
|
return -1;
|
||||||
*o = p;
|
*o = p;
|
||||||
|
|
@ -70,7 +74,7 @@ static inline int unpack_callback_uint64(unpack_user* u, uint64_t d, msgpack_unp
|
||||||
if (d > LONG_MAX) {
|
if (d > LONG_MAX) {
|
||||||
p = PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG)d);
|
p = PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG)d);
|
||||||
} else {
|
} else {
|
||||||
p = PyInt_FromLong((long)d);
|
p = PyLong_FromLong((long)d);
|
||||||
}
|
}
|
||||||
if (!p)
|
if (!p)
|
||||||
return -1;
|
return -1;
|
||||||
|
|
@ -80,7 +84,7 @@ static inline int unpack_callback_uint64(unpack_user* u, uint64_t d, msgpack_unp
|
||||||
|
|
||||||
static inline int unpack_callback_int32(unpack_user* u, int32_t d, msgpack_unpack_object* o)
|
static inline int unpack_callback_int32(unpack_user* u, int32_t d, msgpack_unpack_object* o)
|
||||||
{
|
{
|
||||||
PyObject *p = PyInt_FromLong(d);
|
PyObject *p = PyLong_FromLong(d);
|
||||||
if (!p)
|
if (!p)
|
||||||
return -1;
|
return -1;
|
||||||
*o = p;
|
*o = p;
|
||||||
|
|
@ -103,7 +107,7 @@ static inline int unpack_callback_int64(unpack_user* u, int64_t d, msgpack_unpac
|
||||||
if (d > LONG_MAX || d < LONG_MIN) {
|
if (d > LONG_MAX || d < LONG_MIN) {
|
||||||
p = PyLong_FromLongLong((PY_LONG_LONG)d);
|
p = PyLong_FromLongLong((PY_LONG_LONG)d);
|
||||||
} else {
|
} else {
|
||||||
p = PyInt_FromLong((long)d);
|
p = PyLong_FromLong((long)d);
|
||||||
}
|
}
|
||||||
*o = p;
|
*o = p;
|
||||||
return 0;
|
return 0;
|
||||||
|
|
@ -188,6 +192,13 @@ static inline int unpack_callback_map(unpack_user* u, unsigned int n, msgpack_un
|
||||||
|
|
||||||
static inline int unpack_callback_map_item(unpack_user* u, unsigned int current, msgpack_unpack_object* c, msgpack_unpack_object k, msgpack_unpack_object v)
|
static inline int unpack_callback_map_item(unpack_user* u, unsigned int current, msgpack_unpack_object* c, msgpack_unpack_object k, msgpack_unpack_object v)
|
||||||
{
|
{
|
||||||
|
if (u->strict_map_key && !PyUnicode_CheckExact(k) && !PyBytes_CheckExact(k)) {
|
||||||
|
PyErr_Format(PyExc_ValueError, "%.100s is not allowed for map key when strict_map_key=True", Py_TYPE(k)->tp_name);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if (PyUnicode_CheckExact(k)) {
|
||||||
|
PyUnicode_InternInPlace(&k);
|
||||||
|
}
|
||||||
if (u->has_pairs_hook) {
|
if (u->has_pairs_hook) {
|
||||||
msgpack_unpack_object item = PyTuple_Pack(2, k, v);
|
msgpack_unpack_object item = PyTuple_Pack(2, k, v);
|
||||||
if (!item)
|
if (!item)
|
||||||
|
|
@ -227,9 +238,7 @@ static inline int unpack_callback_raw(unpack_user* u, const char* b, const char*
|
||||||
|
|
||||||
PyObject *py;
|
PyObject *py;
|
||||||
|
|
||||||
if (u->encoding) {
|
if (u->raw) {
|
||||||
py = PyUnicode_Decode(p, l, u->encoding, u->unicode_errors);
|
|
||||||
} else if (u->raw) {
|
|
||||||
py = PyBytes_FromStringAndSize(p, l);
|
py = PyBytes_FromStringAndSize(p, l);
|
||||||
} else {
|
} else {
|
||||||
py = PyUnicode_DecodeUTF8(p, l, u->unicode_errors);
|
py = PyUnicode_DecodeUTF8(p, l, u->unicode_errors);
|
||||||
|
|
@ -254,10 +263,43 @@ static inline int unpack_callback_bin(unpack_user* u, const char* b, const char*
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int unpack_callback_ext(unpack_user* u, const char* base, const char* pos,
|
typedef struct msgpack_timestamp {
|
||||||
unsigned int length, msgpack_unpack_object* o)
|
int64_t tv_sec;
|
||||||
|
uint32_t tv_nsec;
|
||||||
|
} msgpack_timestamp;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Unpack ext buffer to a timestamp. Pulled from msgpack-c timestamp.h.
|
||||||
|
*/
|
||||||
|
static int unpack_timestamp(const char* buf, unsigned int buflen, msgpack_timestamp* ts) {
|
||||||
|
switch (buflen) {
|
||||||
|
case 4:
|
||||||
|
ts->tv_nsec = 0;
|
||||||
|
{
|
||||||
|
uint32_t v = _msgpack_load32(uint32_t, buf);
|
||||||
|
ts->tv_sec = (int64_t)v;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
case 8: {
|
||||||
|
uint64_t value =_msgpack_load64(uint64_t, buf);
|
||||||
|
ts->tv_nsec = (uint32_t)(value >> 34);
|
||||||
|
ts->tv_sec = value & 0x00000003ffffffffLL;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
case 12:
|
||||||
|
ts->tv_nsec = _msgpack_load32(uint32_t, buf);
|
||||||
|
ts->tv_sec = _msgpack_load64(int64_t, buf + 4);
|
||||||
|
return 0;
|
||||||
|
default:
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#include "datetime.h"
|
||||||
|
|
||||||
|
static int unpack_callback_ext(unpack_user* u, const char* base, const char* pos,
|
||||||
|
unsigned int length, msgpack_unpack_object* o)
|
||||||
{
|
{
|
||||||
PyObject *py;
|
|
||||||
int8_t typecode = (int8_t)*pos++;
|
int8_t typecode = (int8_t)*pos++;
|
||||||
if (!u->ext_hook) {
|
if (!u->ext_hook) {
|
||||||
PyErr_SetString(PyExc_AssertionError, "u->ext_hook cannot be NULL");
|
PyErr_SetString(PyExc_AssertionError, "u->ext_hook cannot be NULL");
|
||||||
|
|
@ -267,12 +309,79 @@ static inline int unpack_callback_ext(unpack_user* u, const char* base, const ch
|
||||||
PyErr_Format(PyExc_ValueError, "%u exceeds max_ext_len(%zd)", length, u->max_ext_len);
|
PyErr_Format(PyExc_ValueError, "%u exceeds max_ext_len(%zd)", length, u->max_ext_len);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PyObject *py = NULL;
|
||||||
// length also includes the typecode, so the actual data is length-1
|
// length also includes the typecode, so the actual data is length-1
|
||||||
#if PY_MAJOR_VERSION == 2
|
if (typecode == -1) {
|
||||||
py = PyObject_CallFunction(u->ext_hook, "(is#)", (int)typecode, pos, (Py_ssize_t)length-1);
|
msgpack_timestamp ts;
|
||||||
#else
|
if (unpack_timestamp(pos, length-1, &ts) < 0) {
|
||||||
py = PyObject_CallFunction(u->ext_hook, "(iy#)", (int)typecode, pos, (Py_ssize_t)length-1);
|
return -1;
|
||||||
#endif
|
}
|
||||||
|
|
||||||
|
if (u->timestamp == 2) { // int
|
||||||
|
PyObject *a = PyLong_FromLongLong(ts.tv_sec);
|
||||||
|
if (a == NULL) return -1;
|
||||||
|
|
||||||
|
PyObject *c = PyNumber_Multiply(a, u->giga);
|
||||||
|
Py_DECREF(a);
|
||||||
|
if (c == NULL) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
PyObject *b = PyLong_FromUnsignedLong(ts.tv_nsec);
|
||||||
|
if (b == NULL) {
|
||||||
|
Py_DECREF(c);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
py = PyNumber_Add(c, b);
|
||||||
|
Py_DECREF(c);
|
||||||
|
Py_DECREF(b);
|
||||||
|
}
|
||||||
|
else if (u->timestamp == 0) { // Timestamp
|
||||||
|
py = PyObject_CallFunction(u->timestamp_t, "(Lk)", ts.tv_sec, ts.tv_nsec);
|
||||||
|
}
|
||||||
|
else if (u->timestamp == 3) { // datetime
|
||||||
|
// Calculate datetime using epoch + delta
|
||||||
|
// due to limitations PyDateTime_FromTimestamp on Windows with negative timestamps
|
||||||
|
PyObject *epoch = PyDateTimeAPI->DateTime_FromDateAndTime(1970, 1, 1, 0, 0, 0, 0, u->utc, PyDateTimeAPI->DateTimeType);
|
||||||
|
if (epoch == NULL) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
PyObject* d = PyDelta_FromDSU(ts.tv_sec/(24*3600), ts.tv_sec%(24*3600), ts.tv_nsec / 1000);
|
||||||
|
if (d == NULL) {
|
||||||
|
Py_DECREF(epoch);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
py = PyNumber_Add(epoch, d);
|
||||||
|
|
||||||
|
Py_DECREF(epoch);
|
||||||
|
Py_DECREF(d);
|
||||||
|
}
|
||||||
|
else { // float
|
||||||
|
PyObject *a = PyFloat_FromDouble((double)ts.tv_nsec);
|
||||||
|
if (a == NULL) return -1;
|
||||||
|
|
||||||
|
PyObject *b = PyNumber_TrueDivide(a, u->giga);
|
||||||
|
Py_DECREF(a);
|
||||||
|
if (b == NULL) return -1;
|
||||||
|
|
||||||
|
PyObject *c = PyLong_FromLongLong(ts.tv_sec);
|
||||||
|
if (c == NULL) {
|
||||||
|
Py_DECREF(b);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
a = PyNumber_Add(b, c);
|
||||||
|
Py_DECREF(b);
|
||||||
|
Py_DECREF(c);
|
||||||
|
py = a;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
py = PyObject_CallFunction(u->ext_hook, "(iy#)", (int)typecode, pos, (Py_ssize_t)length-1);
|
||||||
|
}
|
||||||
if (!py)
|
if (!py)
|
||||||
return -1;
|
return -1;
|
||||||
*o = py;
|
*o = py;
|
||||||
|
|
|
||||||
51
msgpack/unpack_container_header.h
Normal file
51
msgpack/unpack_container_header.h
Normal file
|
|
@ -0,0 +1,51 @@
|
||||||
|
static inline int unpack_container_header(unpack_context* ctx, const char* data, Py_ssize_t len, Py_ssize_t* off)
|
||||||
|
{
|
||||||
|
assert(len >= *off);
|
||||||
|
uint32_t size;
|
||||||
|
const unsigned char *const p = (unsigned char*)data + *off;
|
||||||
|
|
||||||
|
#define inc_offset(inc) \
|
||||||
|
if (len - *off < inc) \
|
||||||
|
return 0; \
|
||||||
|
*off += inc;
|
||||||
|
|
||||||
|
switch (*p) {
|
||||||
|
case var_offset:
|
||||||
|
inc_offset(3);
|
||||||
|
size = _msgpack_load16(uint16_t, p + 1);
|
||||||
|
break;
|
||||||
|
case var_offset + 1:
|
||||||
|
inc_offset(5);
|
||||||
|
size = _msgpack_load32(uint32_t, p + 1);
|
||||||
|
break;
|
||||||
|
#ifdef USE_CASE_RANGE
|
||||||
|
case fixed_offset + 0x0 ... fixed_offset + 0xf:
|
||||||
|
#else
|
||||||
|
case fixed_offset + 0x0:
|
||||||
|
case fixed_offset + 0x1:
|
||||||
|
case fixed_offset + 0x2:
|
||||||
|
case fixed_offset + 0x3:
|
||||||
|
case fixed_offset + 0x4:
|
||||||
|
case fixed_offset + 0x5:
|
||||||
|
case fixed_offset + 0x6:
|
||||||
|
case fixed_offset + 0x7:
|
||||||
|
case fixed_offset + 0x8:
|
||||||
|
case fixed_offset + 0x9:
|
||||||
|
case fixed_offset + 0xa:
|
||||||
|
case fixed_offset + 0xb:
|
||||||
|
case fixed_offset + 0xc:
|
||||||
|
case fixed_offset + 0xd:
|
||||||
|
case fixed_offset + 0xe:
|
||||||
|
case fixed_offset + 0xf:
|
||||||
|
#endif
|
||||||
|
++*off;
|
||||||
|
size = ((unsigned int)*p) & 0x0f;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
PyErr_SetString(PyExc_ValueError, "Unexpected type header on stream");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
unpack_callback_uint32(&ctx->user, size, &ctx->stack[0].obj);
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
@ -75,8 +75,7 @@ static inline void unpack_clear(unpack_context *ctx)
|
||||||
Py_CLEAR(ctx->stack[0].obj);
|
Py_CLEAR(ctx->stack[0].obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <bool construct>
|
static inline int unpack_execute(bool construct, unpack_context* ctx, const char* data, Py_ssize_t len, Py_ssize_t* off)
|
||||||
static inline int unpack_execute(unpack_context* ctx, const char* data, Py_ssize_t len, Py_ssize_t* off)
|
|
||||||
{
|
{
|
||||||
assert(len >= *off);
|
assert(len >= *off);
|
||||||
|
|
||||||
|
|
@ -123,7 +122,7 @@ static inline int unpack_execute(unpack_context* ctx, const char* data, Py_ssize
|
||||||
goto _fixed_trail_again
|
goto _fixed_trail_again
|
||||||
|
|
||||||
#define start_container(func, count_, ct_) \
|
#define start_container(func, count_, ct_) \
|
||||||
if(top >= MSGPACK_EMBED_STACK_SIZE) { goto _failed; } /* FIXME */ \
|
if(top >= MSGPACK_EMBED_STACK_SIZE) { ret = -3; goto _end; } \
|
||||||
if(construct_cb(func)(user, count_, &stack[top].obj) < 0) { goto _failed; } \
|
if(construct_cb(func)(user, count_, &stack[top].obj) < 0) { goto _failed; } \
|
||||||
if((count_) == 0) { obj = stack[top].obj; \
|
if((count_) == 0) { obj = stack[top].obj; \
|
||||||
if (construct_cb(func##_end)(user, &obj) < 0) { goto _failed; } \
|
if (construct_cb(func##_end)(user, &obj) < 0) { goto _failed; } \
|
||||||
|
|
@ -132,27 +131,6 @@ static inline int unpack_execute(unpack_context* ctx, const char* data, Py_ssize
|
||||||
stack[top].size = count_; \
|
stack[top].size = count_; \
|
||||||
stack[top].count = 0; \
|
stack[top].count = 0; \
|
||||||
++top; \
|
++top; \
|
||||||
/*printf("container %d count %d stack %d\n",stack[top].obj,count_,top);*/ \
|
|
||||||
/*printf("stack push %d\n", top);*/ \
|
|
||||||
/* FIXME \
|
|
||||||
if(top >= stack_size) { \
|
|
||||||
if(stack_size == MSGPACK_EMBED_STACK_SIZE) { \
|
|
||||||
size_t csize = sizeof(unpack_stack) * MSGPACK_EMBED_STACK_SIZE; \
|
|
||||||
size_t nsize = csize * 2; \
|
|
||||||
unpack_stack* tmp = (unpack_stack*)malloc(nsize); \
|
|
||||||
if(tmp == NULL) { goto _failed; } \
|
|
||||||
memcpy(tmp, ctx->stack, csize); \
|
|
||||||
ctx->stack = stack = tmp; \
|
|
||||||
ctx->stack_size = stack_size = MSGPACK_EMBED_STACK_SIZE * 2; \
|
|
||||||
} else { \
|
|
||||||
size_t nsize = sizeof(unpack_stack) * ctx->stack_size * 2; \
|
|
||||||
unpack_stack* tmp = (unpack_stack*)realloc(ctx->stack, nsize); \
|
|
||||||
if(tmp == NULL) { goto _failed; } \
|
|
||||||
ctx->stack = stack = tmp; \
|
|
||||||
ctx->stack_size = stack_size = stack_size * 2; \
|
|
||||||
} \
|
|
||||||
} \
|
|
||||||
*/ \
|
|
||||||
goto _header_again
|
goto _header_again
|
||||||
|
|
||||||
#define NEXT_CS(p) ((unsigned int)*p & 0x1f)
|
#define NEXT_CS(p) ((unsigned int)*p & 0x1f)
|
||||||
|
|
@ -229,7 +207,8 @@ static inline int unpack_execute(unpack_context* ctx, const char* data, Py_ssize
|
||||||
case 0xdf: // map 32
|
case 0xdf: // map 32
|
||||||
again_fixed_trail(NEXT_CS(p), 2 << (((unsigned int)*p) & 0x01));
|
again_fixed_trail(NEXT_CS(p), 2 << (((unsigned int)*p) & 0x01));
|
||||||
default:
|
default:
|
||||||
goto _failed;
|
ret = -2;
|
||||||
|
goto _end;
|
||||||
}
|
}
|
||||||
SWITCH_RANGE(0xa0, 0xbf) // FixRaw
|
SWITCH_RANGE(0xa0, 0xbf) // FixRaw
|
||||||
again_fixed_trail_if_zero(ACS_RAW_VALUE, ((unsigned int)*p & 0x1f), _raw_zero);
|
again_fixed_trail_if_zero(ACS_RAW_VALUE, ((unsigned int)*p & 0x1f), _raw_zero);
|
||||||
|
|
@ -239,7 +218,8 @@ static inline int unpack_execute(unpack_context* ctx, const char* data, Py_ssize
|
||||||
start_container(_map, ((unsigned int)*p) & 0x0f, CT_MAP_KEY);
|
start_container(_map, ((unsigned int)*p) & 0x0f, CT_MAP_KEY);
|
||||||
|
|
||||||
SWITCH_RANGE_DEFAULT
|
SWITCH_RANGE_DEFAULT
|
||||||
goto _failed;
|
ret = -2;
|
||||||
|
goto _end;
|
||||||
SWITCH_RANGE_END
|
SWITCH_RANGE_END
|
||||||
// end CS_HEADER
|
// end CS_HEADER
|
||||||
|
|
||||||
|
|
@ -262,17 +242,21 @@ static inline int unpack_execute(unpack_context* ctx, const char* data, Py_ssize
|
||||||
_msgpack_load32(uint32_t,n)+1,
|
_msgpack_load32(uint32_t,n)+1,
|
||||||
_ext_zero);
|
_ext_zero);
|
||||||
case CS_FLOAT: {
|
case CS_FLOAT: {
|
||||||
union { uint32_t i; float f; } mem;
|
double f;
|
||||||
mem.i = _msgpack_load32(uint32_t,n);
|
#if PY_VERSION_HEX >= 0x030B00A7
|
||||||
push_fixed_value(_float, mem.f); }
|
f = PyFloat_Unpack4((const char*)n, 0);
|
||||||
case CS_DOUBLE: {
|
#else
|
||||||
union { uint64_t i; double f; } mem;
|
f = _PyFloat_Unpack4((unsigned char*)n, 0);
|
||||||
mem.i = _msgpack_load64(uint64_t,n);
|
|
||||||
#if defined(__arm__) && !(__ARM_EABI__) // arm-oabi
|
|
||||||
// https://github.com/msgpack/msgpack-perl/pull/1
|
|
||||||
mem.i = (mem.i & 0xFFFFFFFFUL) << 32UL | (mem.i >> 32UL);
|
|
||||||
#endif
|
#endif
|
||||||
push_fixed_value(_double, mem.f); }
|
push_fixed_value(_float, f); }
|
||||||
|
case CS_DOUBLE: {
|
||||||
|
double f;
|
||||||
|
#if PY_VERSION_HEX >= 0x030B00A7
|
||||||
|
f = PyFloat_Unpack8((const char*)n, 0);
|
||||||
|
#else
|
||||||
|
f = _PyFloat_Unpack8((unsigned char*)n, 0);
|
||||||
|
#endif
|
||||||
|
push_fixed_value(_double, f); }
|
||||||
case CS_UINT_8:
|
case CS_UINT_8:
|
||||||
push_fixed_value(_uint8, *(uint8_t*)n);
|
push_fixed_value(_uint8, *(uint8_t*)n);
|
||||||
case CS_UINT_16:
|
case CS_UINT_16:
|
||||||
|
|
@ -401,6 +385,7 @@ _end:
|
||||||
#undef construct_cb
|
#undef construct_cb
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#undef NEXT_CS
|
||||||
#undef SWITCH_RANGE_BEGIN
|
#undef SWITCH_RANGE_BEGIN
|
||||||
#undef SWITCH_RANGE
|
#undef SWITCH_RANGE
|
||||||
#undef SWITCH_RANGE_DEFAULT
|
#undef SWITCH_RANGE_DEFAULT
|
||||||
|
|
@ -412,68 +397,27 @@ _end:
|
||||||
#undef again_fixed_trail_if_zero
|
#undef again_fixed_trail_if_zero
|
||||||
#undef start_container
|
#undef start_container
|
||||||
|
|
||||||
template <unsigned int fixed_offset, unsigned int var_offset>
|
static int unpack_construct(unpack_context *ctx, const char *data, Py_ssize_t len, Py_ssize_t *off) {
|
||||||
static inline int unpack_container_header(unpack_context* ctx, const char* data, Py_ssize_t len, Py_ssize_t* off)
|
return unpack_execute(1, ctx, data, len, off);
|
||||||
{
|
}
|
||||||
assert(len >= *off);
|
static int unpack_skip(unpack_context *ctx, const char *data, Py_ssize_t len, Py_ssize_t *off) {
|
||||||
uint32_t size;
|
return unpack_execute(0, ctx, data, len, off);
|
||||||
const unsigned char *const p = (unsigned char*)data + *off;
|
|
||||||
|
|
||||||
#define inc_offset(inc) \
|
|
||||||
if (len - *off < inc) \
|
|
||||||
return 0; \
|
|
||||||
*off += inc;
|
|
||||||
|
|
||||||
switch (*p) {
|
|
||||||
case var_offset:
|
|
||||||
inc_offset(3);
|
|
||||||
size = _msgpack_load16(uint16_t, p + 1);
|
|
||||||
break;
|
|
||||||
case var_offset + 1:
|
|
||||||
inc_offset(5);
|
|
||||||
size = _msgpack_load32(uint32_t, p + 1);
|
|
||||||
break;
|
|
||||||
#ifdef USE_CASE_RANGE
|
|
||||||
case fixed_offset + 0x0 ... fixed_offset + 0xf:
|
|
||||||
#else
|
|
||||||
case fixed_offset + 0x0:
|
|
||||||
case fixed_offset + 0x1:
|
|
||||||
case fixed_offset + 0x2:
|
|
||||||
case fixed_offset + 0x3:
|
|
||||||
case fixed_offset + 0x4:
|
|
||||||
case fixed_offset + 0x5:
|
|
||||||
case fixed_offset + 0x6:
|
|
||||||
case fixed_offset + 0x7:
|
|
||||||
case fixed_offset + 0x8:
|
|
||||||
case fixed_offset + 0x9:
|
|
||||||
case fixed_offset + 0xa:
|
|
||||||
case fixed_offset + 0xb:
|
|
||||||
case fixed_offset + 0xc:
|
|
||||||
case fixed_offset + 0xd:
|
|
||||||
case fixed_offset + 0xe:
|
|
||||||
case fixed_offset + 0xf:
|
|
||||||
#endif
|
|
||||||
++*off;
|
|
||||||
size = ((unsigned int)*p) & 0x0f;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
PyErr_SetString(PyExc_ValueError, "Unexpected type header on stream");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
unpack_callback_uint32(&ctx->user, size, &ctx->stack[0].obj);
|
|
||||||
return 1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#undef SWITCH_RANGE_BEGIN
|
#define unpack_container_header read_array_header
|
||||||
#undef SWITCH_RANGE
|
#define fixed_offset 0x90
|
||||||
#undef SWITCH_RANGE_DEFAULT
|
#define var_offset 0xdc
|
||||||
#undef SWITCH_RANGE_END
|
#include "unpack_container_header.h"
|
||||||
|
#undef unpack_container_header
|
||||||
|
#undef fixed_offset
|
||||||
|
#undef var_offset
|
||||||
|
|
||||||
static const execute_fn unpack_construct = &unpack_execute<true>;
|
#define unpack_container_header read_map_header
|
||||||
static const execute_fn unpack_skip = &unpack_execute<false>;
|
#define fixed_offset 0x80
|
||||||
static const execute_fn read_array_header = &unpack_container_header<0x90, 0xdc>;
|
#define var_offset 0xde
|
||||||
static const execute_fn read_map_header = &unpack_container_header<0x80, 0xde>;
|
#include "unpack_container_header.h"
|
||||||
|
#undef unpack_container_header
|
||||||
#undef NEXT_CS
|
#undef fixed_offset
|
||||||
|
#undef var_offset
|
||||||
|
|
||||||
/* vim: set ts=4 sw=4 sts=4 expandtab */
|
/* vim: set ts=4 sw=4 sts=4 expandtab */
|
||||||
|
|
|
||||||
45
pyproject.toml
Normal file
45
pyproject.toml
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools >= 78.1.1"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "msgpack"
|
||||||
|
dynamic = ["version"]
|
||||||
|
license = "Apache-2.0"
|
||||||
|
authors = [{name="Inada Naoki", email="songofacandy@gmail.com"}]
|
||||||
|
description = "MessagePack serializer"
|
||||||
|
readme = "README.md"
|
||||||
|
keywords = ["msgpack", "messagepack", "serializer", "serialization", "binary"]
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 5 - Production/Stable",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
"Topic :: File Formats",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://msgpack.org/"
|
||||||
|
Documentation = "https://msgpack-python.readthedocs.io/"
|
||||||
|
Repository = "https://github.com/msgpack/msgpack-python/"
|
||||||
|
Tracker = "https://github.com/msgpack/msgpack-python/issues"
|
||||||
|
Changelog = "https://github.com/msgpack/msgpack-python/blob/main/ChangeLog.rst"
|
||||||
|
|
||||||
|
[tool.setuptools]
|
||||||
|
# Do not install C/C++/Cython source files
|
||||||
|
include-package-data = false
|
||||||
|
|
||||||
|
[tool.setuptools.dynamic]
|
||||||
|
version = {attr = "msgpack.__version__"}
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 100
|
||||||
|
target-version = "py310"
|
||||||
|
lint.select = [
|
||||||
|
"E", # pycodestyle
|
||||||
|
"F", # Pyflakes
|
||||||
|
"I", # isort
|
||||||
|
#"UP", pyupgrade
|
||||||
|
]
|
||||||
|
|
@ -1 +1,3 @@
|
||||||
Cython==0.27.3
|
Cython==3.2.1
|
||||||
|
setuptools==78.1.1
|
||||||
|
build
|
||||||
|
|
|
||||||
147
setup.py
Executable file → Normal file
147
setup.py
Executable file → Normal file
|
|
@ -1,137 +1,32 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding: utf-8
|
|
||||||
import io
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from glob import glob
|
|
||||||
from distutils.command.sdist import sdist
|
|
||||||
from setuptools import setup, Extension
|
|
||||||
|
|
||||||
from distutils.command.build_ext import build_ext
|
from setuptools import Extension, setup
|
||||||
|
|
||||||
# for building transitional package.
|
PYPY = hasattr(sys, "pypy_version_info")
|
||||||
TRANSITIONAL = False
|
|
||||||
|
|
||||||
class NoCython(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
import Cython.Compiler.Main as cython_compiler
|
|
||||||
have_cython = True
|
|
||||||
except ImportError:
|
|
||||||
have_cython = False
|
|
||||||
|
|
||||||
|
|
||||||
def cythonize(src):
|
|
||||||
sys.stderr.write("cythonize: %r\n" % (src,))
|
|
||||||
cython_compiler.compile([src], cplus=True)
|
|
||||||
|
|
||||||
def ensure_source(src):
|
|
||||||
pyx = os.path.splitext(src)[0] + '.pyx'
|
|
||||||
|
|
||||||
if not os.path.exists(src):
|
|
||||||
if not have_cython:
|
|
||||||
raise NoCython
|
|
||||||
cythonize(pyx)
|
|
||||||
elif (os.path.exists(pyx) and
|
|
||||||
os.stat(src).st_mtime < os.stat(pyx).st_mtime and
|
|
||||||
have_cython):
|
|
||||||
cythonize(pyx)
|
|
||||||
return src
|
|
||||||
|
|
||||||
|
|
||||||
class BuildExt(build_ext):
|
|
||||||
def build_extension(self, ext):
|
|
||||||
try:
|
|
||||||
ext.sources = list(map(ensure_source, ext.sources))
|
|
||||||
except NoCython:
|
|
||||||
print("WARNING")
|
|
||||||
print("Cython is required for building extension from checkout.")
|
|
||||||
print("Install Cython >= 0.16 or install msgpack from PyPI.")
|
|
||||||
print("Falling back to pure Python implementation.")
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
return build_ext.build_extension(self, ext)
|
|
||||||
except Exception as e:
|
|
||||||
print("WARNING: Failed to compile extension modules.")
|
|
||||||
print("msgpack uses fallback pure python implementation.")
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
|
|
||||||
exec(open('msgpack/_version.py').read())
|
|
||||||
|
|
||||||
version_str = '.'.join(str(x) for x in version[:3])
|
|
||||||
if len(version) > 3 and version[3] != 'final':
|
|
||||||
version_str += version[3]
|
|
||||||
|
|
||||||
# take care of extension modules.
|
|
||||||
if have_cython:
|
|
||||||
class Sdist(sdist):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
for src in glob('msgpack/*.pyx'):
|
|
||||||
cythonize(src)
|
|
||||||
sdist.__init__(self, *args, **kwargs)
|
|
||||||
else:
|
|
||||||
Sdist = sdist
|
|
||||||
|
|
||||||
libraries = []
|
libraries = []
|
||||||
if sys.platform == 'win32':
|
macros = []
|
||||||
libraries.append('ws2_32')
|
|
||||||
|
|
||||||
if sys.byteorder == 'big':
|
|
||||||
macros = [('__BIG_ENDIAN__', '1')]
|
|
||||||
else:
|
|
||||||
macros = [('__LITTLE_ENDIAN__', '1')]
|
|
||||||
|
|
||||||
ext_modules = []
|
ext_modules = []
|
||||||
if not hasattr(sys, 'pypy_version_info'):
|
|
||||||
ext_modules.append(Extension('msgpack._packer',
|
if sys.platform == "win32":
|
||||||
sources=['msgpack/_packer.cpp'],
|
libraries.append("ws2_32")
|
||||||
libraries=libraries,
|
macros = [("__LITTLE_ENDIAN__", "1")]
|
||||||
include_dirs=['.'],
|
|
||||||
define_macros=macros,
|
if not PYPY and not os.environ.get("MSGPACK_PUREPYTHON"):
|
||||||
))
|
ext_modules.append(
|
||||||
ext_modules.append(Extension('msgpack._unpacker',
|
Extension(
|
||||||
sources=['msgpack/_unpacker.cpp'],
|
"msgpack._cmsgpack",
|
||||||
libraries=libraries,
|
sources=["msgpack/_cmsgpack.c"],
|
||||||
include_dirs=['.'],
|
libraries=libraries,
|
||||||
define_macros=macros,
|
include_dirs=["."],
|
||||||
))
|
define_macros=macros,
|
||||||
|
)
|
||||||
|
)
|
||||||
del libraries, macros
|
del libraries, macros
|
||||||
|
|
||||||
|
setup(
|
||||||
desc = 'MessagePack (de)serializer.'
|
ext_modules=ext_modules,
|
||||||
with io.open('README.rst', encoding='utf-8') as f:
|
packages=["msgpack"],
|
||||||
long_desc = f.read()
|
|
||||||
del f
|
|
||||||
|
|
||||||
name = 'msgpack'
|
|
||||||
|
|
||||||
if TRANSITIONAL:
|
|
||||||
name = 'msgpack-python'
|
|
||||||
long_desc = "This package is deprecated. Install msgpack instead."
|
|
||||||
|
|
||||||
setup(name=name,
|
|
||||||
author='INADA Naoki',
|
|
||||||
author_email='songofacandy@gmail.com',
|
|
||||||
version=version_str,
|
|
||||||
cmdclass={'build_ext': BuildExt, 'sdist': Sdist},
|
|
||||||
ext_modules=ext_modules,
|
|
||||||
packages=['msgpack'],
|
|
||||||
description=desc,
|
|
||||||
long_description=long_desc,
|
|
||||||
url='http://msgpack.org/',
|
|
||||||
license='Apache 2.0',
|
|
||||||
classifiers=[
|
|
||||||
'Programming Language :: Python :: 2',
|
|
||||||
'Programming Language :: Python :: 2.7',
|
|
||||||
'Programming Language :: Python :: 3',
|
|
||||||
'Programming Language :: Python :: 3.5',
|
|
||||||
'Programming Language :: Python :: 3.6',
|
|
||||||
'Programming Language :: Python :: 3.7',
|
|
||||||
'Programming Language :: Python :: Implementation :: CPython',
|
|
||||||
'Programming Language :: Python :: Implementation :: PyPy',
|
|
||||||
'Intended Audience :: Developers',
|
|
||||||
'License :: OSI Approved :: Apache Software License',
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,29 +1,49 @@
|
||||||
#!/usr/bin/env python
|
from pytest import raises
|
||||||
# coding: utf-8
|
|
||||||
|
|
||||||
from msgpack import packb, unpackb
|
from msgpack import Packer, packb, unpackb
|
||||||
|
|
||||||
|
|
||||||
def test_unpack_buffer():
|
def test_unpack_buffer():
|
||||||
from array import array
|
from array import array
|
||||||
buf = array('b')
|
|
||||||
buf.fromstring(packb((b'foo', b'bar')))
|
buf = array("b")
|
||||||
|
buf.frombytes(packb((b"foo", b"bar")))
|
||||||
obj = unpackb(buf, use_list=1)
|
obj = unpackb(buf, use_list=1)
|
||||||
assert [b'foo', b'bar'] == obj
|
assert [b"foo", b"bar"] == obj
|
||||||
|
|
||||||
|
|
||||||
def test_unpack_bytearray():
|
def test_unpack_bytearray():
|
||||||
buf = bytearray(packb(('foo', 'bar')))
|
buf = bytearray(packb((b"foo", b"bar")))
|
||||||
obj = unpackb(buf, use_list=1)
|
obj = unpackb(buf, use_list=1)
|
||||||
assert [b'foo', b'bar'] == obj
|
assert [b"foo", b"bar"] == obj
|
||||||
expected_type = bytes
|
expected_type = bytes
|
||||||
assert all(type(s) == expected_type for s in obj)
|
assert all(type(s) is expected_type for s in obj)
|
||||||
|
|
||||||
|
|
||||||
def test_unpack_memoryview():
|
def test_unpack_memoryview():
|
||||||
buf = bytearray(packb(('foo', 'bar')))
|
buf = bytearray(packb((b"foo", b"bar")))
|
||||||
view = memoryview(buf)
|
view = memoryview(buf)
|
||||||
obj = unpackb(view, use_list=1)
|
obj = unpackb(view, use_list=1)
|
||||||
assert [b'foo', b'bar'] == obj
|
assert [b"foo", b"bar"] == obj
|
||||||
expected_type = bytes
|
expected_type = bytes
|
||||||
assert all(type(s) == expected_type for s in obj)
|
assert all(type(s) is expected_type for s in obj)
|
||||||
|
|
||||||
|
|
||||||
|
def test_packer_getbuffer():
|
||||||
|
packer = Packer(autoreset=False)
|
||||||
|
packer.pack_array_header(2)
|
||||||
|
packer.pack(42)
|
||||||
|
packer.pack("hello")
|
||||||
|
buffer = packer.getbuffer()
|
||||||
|
assert isinstance(buffer, memoryview)
|
||||||
|
assert bytes(buffer) == b"\x92*\xa5hello"
|
||||||
|
|
||||||
|
if Packer.__module__ == "msgpack._cmsgpack": # only for Cython
|
||||||
|
# cython Packer supports buffer protocol directly
|
||||||
|
assert bytes(packer) == b"\x92*\xa5hello"
|
||||||
|
|
||||||
|
with raises(BufferError):
|
||||||
|
packer.pack(42)
|
||||||
|
buffer.release()
|
||||||
|
packer.pack(42)
|
||||||
|
assert bytes(packer) == b"\x92*\xa5hello*"
|
||||||
|
|
|
||||||
|
|
@ -1,102 +1,136 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding: utf-8
|
|
||||||
|
|
||||||
from msgpack import packb, unpackb
|
from msgpack import packb, unpackb
|
||||||
|
|
||||||
|
|
||||||
def check(length, obj):
|
def check(length, obj, use_bin_type=True):
|
||||||
v = packb(obj)
|
v = packb(obj, use_bin_type=use_bin_type)
|
||||||
assert len(v) == length, \
|
assert len(v) == length, f"{obj!r} length should be {length!r} but get {len(v)!r}"
|
||||||
"%r length should be %r but get %r" % (obj, length, len(v))
|
assert unpackb(v, use_list=0, raw=not use_bin_type) == obj
|
||||||
assert unpackb(v, use_list=0) == obj
|
|
||||||
|
|
||||||
def test_1():
|
def test_1():
|
||||||
for o in [None, True, False, 0, 1, (1 << 6), (1 << 7) - 1, -1,
|
for o in [
|
||||||
-((1<<5)-1), -(1<<5)]:
|
None,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
0,
|
||||||
|
1,
|
||||||
|
(1 << 6),
|
||||||
|
(1 << 7) - 1,
|
||||||
|
-1,
|
||||||
|
-((1 << 5) - 1),
|
||||||
|
-(1 << 5),
|
||||||
|
]:
|
||||||
check(1, o)
|
check(1, o)
|
||||||
|
|
||||||
|
|
||||||
def test_2():
|
def test_2():
|
||||||
for o in [1 << 7, (1 << 8) - 1,
|
for o in [1 << 7, (1 << 8) - 1, -((1 << 5) + 1), -(1 << 7)]:
|
||||||
-((1<<5)+1), -(1<<7)
|
|
||||||
]:
|
|
||||||
check(2, o)
|
check(2, o)
|
||||||
|
|
||||||
|
|
||||||
def test_3():
|
def test_3():
|
||||||
for o in [1 << 8, (1 << 16) - 1,
|
for o in [1 << 8, (1 << 16) - 1, -((1 << 7) + 1), -(1 << 15)]:
|
||||||
-((1<<7)+1), -(1<<15)]:
|
|
||||||
check(3, o)
|
check(3, o)
|
||||||
|
|
||||||
|
|
||||||
def test_5():
|
def test_5():
|
||||||
for o in [1 << 16, (1 << 32) - 1,
|
for o in [1 << 16, (1 << 32) - 1, -((1 << 15) + 1), -(1 << 31)]:
|
||||||
-((1<<15)+1), -(1<<31)]:
|
|
||||||
check(5, o)
|
check(5, o)
|
||||||
|
|
||||||
|
|
||||||
def test_9():
|
def test_9():
|
||||||
for o in [1 << 32, (1 << 64) - 1,
|
for o in [
|
||||||
-((1<<31)+1), -(1<<63),
|
1 << 32,
|
||||||
1.0, 0.1, -0.1, -1.0]:
|
(1 << 64) - 1,
|
||||||
|
-((1 << 31) + 1),
|
||||||
|
-(1 << 63),
|
||||||
|
1.0,
|
||||||
|
0.1,
|
||||||
|
-0.1,
|
||||||
|
-1.0,
|
||||||
|
]:
|
||||||
check(9, o)
|
check(9, o)
|
||||||
|
|
||||||
|
|
||||||
def check_raw(overhead, num):
|
def check_raw(overhead, num):
|
||||||
check(num + overhead, b" " * num)
|
check(num + overhead, b" " * num, use_bin_type=False)
|
||||||
|
|
||||||
|
|
||||||
def test_fixraw():
|
def test_fixraw():
|
||||||
check_raw(1, 0)
|
check_raw(1, 0)
|
||||||
check_raw(1, (1<<5) - 1)
|
check_raw(1, (1 << 5) - 1)
|
||||||
|
|
||||||
|
|
||||||
def test_raw16():
|
def test_raw16():
|
||||||
check_raw(3, 1<<5)
|
check_raw(3, 1 << 5)
|
||||||
check_raw(3, (1<<16) - 1)
|
check_raw(3, (1 << 16) - 1)
|
||||||
|
|
||||||
|
|
||||||
def test_raw32():
|
def test_raw32():
|
||||||
check_raw(5, 1<<16)
|
check_raw(5, 1 << 16)
|
||||||
|
|
||||||
|
|
||||||
def check_array(overhead, num):
|
def check_array(overhead, num):
|
||||||
check(num + overhead, (None,) * num)
|
check(num + overhead, (None,) * num)
|
||||||
|
|
||||||
|
|
||||||
def test_fixarray():
|
def test_fixarray():
|
||||||
check_array(1, 0)
|
check_array(1, 0)
|
||||||
check_array(1, (1 << 4) - 1)
|
check_array(1, (1 << 4) - 1)
|
||||||
|
|
||||||
|
|
||||||
def test_array16():
|
def test_array16():
|
||||||
check_array(3, 1 << 4)
|
check_array(3, 1 << 4)
|
||||||
check_array(3, (1<<16)-1)
|
check_array(3, (1 << 16) - 1)
|
||||||
|
|
||||||
|
|
||||||
def test_array32():
|
def test_array32():
|
||||||
check_array(5, (1<<16))
|
check_array(5, (1 << 16))
|
||||||
|
|
||||||
|
|
||||||
def match(obj, buf):
|
def match(obj, buf):
|
||||||
assert packb(obj) == buf
|
assert packb(obj) == buf
|
||||||
assert unpackb(buf, use_list=0) == obj
|
assert unpackb(buf, use_list=0, strict_map_key=False) == obj
|
||||||
|
|
||||||
|
|
||||||
def test_match():
|
def test_match():
|
||||||
cases = [
|
cases = [
|
||||||
(None, b'\xc0'),
|
(None, b"\xc0"),
|
||||||
(False, b'\xc2'),
|
(False, b"\xc2"),
|
||||||
(True, b'\xc3'),
|
(True, b"\xc3"),
|
||||||
(0, b'\x00'),
|
(0, b"\x00"),
|
||||||
(127, b'\x7f'),
|
(127, b"\x7f"),
|
||||||
(128, b'\xcc\x80'),
|
(128, b"\xcc\x80"),
|
||||||
(256, b'\xcd\x01\x00'),
|
(256, b"\xcd\x01\x00"),
|
||||||
(-1, b'\xff'),
|
(-1, b"\xff"),
|
||||||
(-33, b'\xd0\xdf'),
|
(-33, b"\xd0\xdf"),
|
||||||
(-129, b'\xd1\xff\x7f'),
|
(-129, b"\xd1\xff\x7f"),
|
||||||
({1:1}, b'\x81\x01\x01'),
|
({1: 1}, b"\x81\x01\x01"),
|
||||||
(1.0, b"\xcb\x3f\xf0\x00\x00\x00\x00\x00\x00"),
|
(1.0, b"\xcb\x3f\xf0\x00\x00\x00\x00\x00\x00"),
|
||||||
((), b'\x90'),
|
((), b"\x90"),
|
||||||
(tuple(range(15)),b"\x9f\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e"),
|
(
|
||||||
(tuple(range(16)),b"\xdc\x00\x10\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"),
|
tuple(range(15)),
|
||||||
({}, b'\x80'),
|
b"\x9f\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e",
|
||||||
(dict([(x,x) for x in range(15)]), b'\x8f\x00\x00\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x07\x08\x08\t\t\n\n\x0b\x0b\x0c\x0c\r\r\x0e\x0e'),
|
),
|
||||||
(dict([(x,x) for x in range(16)]), b'\xde\x00\x10\x00\x00\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x07\x08\x08\t\t\n\n\x0b\x0b\x0c\x0c\r\r\x0e\x0e\x0f\x0f'),
|
(
|
||||||
]
|
tuple(range(16)),
|
||||||
|
b"\xdc\x00\x10\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
|
||||||
|
),
|
||||||
|
({}, b"\x80"),
|
||||||
|
(
|
||||||
|
{x: x for x in range(15)},
|
||||||
|
b"\x8f\x00\x00\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x07\x08\x08\t\t\n\n\x0b\x0b\x0c\x0c\r\r\x0e\x0e",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
{x: x for x in range(16)},
|
||||||
|
b"\xde\x00\x10\x00\x00\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x07\x08\x08\t\t\n\n\x0b\x0b\x0c\x0c\r\r\x0e\x0e\x0f\x0f",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
for v, p in cases:
|
for v, p in cases:
|
||||||
match(v, p)
|
match(v, p)
|
||||||
|
|
||||||
def test_unicode():
|
|
||||||
assert unpackb(packb('foobar'), use_list=1) == b'foobar'
|
|
||||||
|
|
||||||
|
def test_unicode():
|
||||||
|
assert unpackb(packb("foobar"), use_list=1) == "foobar"
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding: utf-8
|
|
||||||
|
|
||||||
from pytest import raises
|
|
||||||
from msgpack import packb, unpackb
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
from pytest import raises
|
||||||
|
|
||||||
|
from msgpack import FormatError, OutOfData, StackError, Unpacker, packb, unpackb
|
||||||
|
|
||||||
|
|
||||||
class DummyException(Exception):
|
class DummyException(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
@ -19,13 +19,45 @@ def test_raise_on_find_unsupported_value():
|
||||||
def test_raise_from_object_hook():
|
def test_raise_from_object_hook():
|
||||||
def hook(obj):
|
def hook(obj):
|
||||||
raise DummyException
|
raise DummyException
|
||||||
|
|
||||||
raises(DummyException, unpackb, packb({}), object_hook=hook)
|
raises(DummyException, unpackb, packb({}), object_hook=hook)
|
||||||
raises(DummyException, unpackb, packb({'fizz': 'buzz'}), object_hook=hook)
|
raises(DummyException, unpackb, packb({"fizz": "buzz"}), object_hook=hook)
|
||||||
raises(DummyException, unpackb, packb({'fizz': 'buzz'}), object_pairs_hook=hook)
|
raises(DummyException, unpackb, packb({"fizz": "buzz"}), object_pairs_hook=hook)
|
||||||
raises(DummyException, unpackb, packb({'fizz': {'buzz': 'spam'}}), object_hook=hook)
|
raises(DummyException, unpackb, packb({"fizz": {"buzz": "spam"}}), object_hook=hook)
|
||||||
raises(DummyException, unpackb, packb({'fizz': {'buzz': 'spam'}}), object_pairs_hook=hook)
|
raises(
|
||||||
|
DummyException,
|
||||||
|
unpackb,
|
||||||
|
packb({"fizz": {"buzz": "spam"}}),
|
||||||
|
object_pairs_hook=hook,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_invalidvalue():
|
def test_invalidvalue():
|
||||||
|
incomplete = b"\xd9\x97#DL_" # raw8 - length=0x97
|
||||||
with raises(ValueError):
|
with raises(ValueError):
|
||||||
unpackb(b'\xd9\x97#DL_')
|
unpackb(incomplete)
|
||||||
|
|
||||||
|
with raises(OutOfData):
|
||||||
|
unpacker = Unpacker()
|
||||||
|
unpacker.feed(incomplete)
|
||||||
|
unpacker.unpack()
|
||||||
|
|
||||||
|
with raises(FormatError):
|
||||||
|
unpackb(b"\xc1") # (undefined tag)
|
||||||
|
|
||||||
|
with raises(FormatError):
|
||||||
|
unpackb(b"\x91\xc1") # fixarray(len=1) [ (undefined tag) ]
|
||||||
|
|
||||||
|
with raises(StackError):
|
||||||
|
unpackb(b"\x91" * 3000) # nested fixarray(len=1)
|
||||||
|
|
||||||
|
|
||||||
|
def test_strict_map_key():
|
||||||
|
valid = {"unicode": 1, b"bytes": 2}
|
||||||
|
packed = packb(valid, use_bin_type=True)
|
||||||
|
assert valid == unpackb(packed, raw=False, strict_map_key=True)
|
||||||
|
|
||||||
|
invalid = {42: 1}
|
||||||
|
packed = packb(invalid, use_bin_type=True)
|
||||||
|
with raises(ValueError):
|
||||||
|
unpackb(packed, raw=False, strict_map_key=True)
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
from __future__ import print_function
|
|
||||||
import array
|
import array
|
||||||
|
|
||||||
import msgpack
|
import msgpack
|
||||||
from msgpack import ExtType
|
from msgpack import ExtType
|
||||||
|
|
||||||
|
|
@ -9,65 +9,67 @@ def test_pack_ext_type():
|
||||||
packer = msgpack.Packer()
|
packer = msgpack.Packer()
|
||||||
packer.pack_ext_type(0x42, s)
|
packer.pack_ext_type(0x42, s)
|
||||||
return packer.bytes()
|
return packer.bytes()
|
||||||
assert p(b'A') == b'\xd4\x42A' # fixext 1
|
|
||||||
assert p(b'AB') == b'\xd5\x42AB' # fixext 2
|
assert p(b"A") == b"\xd4\x42A" # fixext 1
|
||||||
assert p(b'ABCD') == b'\xd6\x42ABCD' # fixext 4
|
assert p(b"AB") == b"\xd5\x42AB" # fixext 2
|
||||||
assert p(b'ABCDEFGH') == b'\xd7\x42ABCDEFGH' # fixext 8
|
assert p(b"ABCD") == b"\xd6\x42ABCD" # fixext 4
|
||||||
assert p(b'A'*16) == b'\xd8\x42' + b'A'*16 # fixext 16
|
assert p(b"ABCDEFGH") == b"\xd7\x42ABCDEFGH" # fixext 8
|
||||||
assert p(b'ABC') == b'\xc7\x03\x42ABC' # ext 8
|
assert p(b"A" * 16) == b"\xd8\x42" + b"A" * 16 # fixext 16
|
||||||
assert p(b'A'*0x0123) == b'\xc8\x01\x23\x42' + b'A'*0x0123 # ext 16
|
assert p(b"ABC") == b"\xc7\x03\x42ABC" # ext 8
|
||||||
assert p(b'A'*0x00012345) == b'\xc9\x00\x01\x23\x45\x42' + b'A'*0x00012345 # ext 32
|
assert p(b"A" * 0x0123) == b"\xc8\x01\x23\x42" + b"A" * 0x0123 # ext 16
|
||||||
|
assert p(b"A" * 0x00012345) == b"\xc9\x00\x01\x23\x45\x42" + b"A" * 0x00012345 # ext 32
|
||||||
|
|
||||||
|
|
||||||
def test_unpack_ext_type():
|
def test_unpack_ext_type():
|
||||||
def check(b, expected):
|
def check(b, expected):
|
||||||
assert msgpack.unpackb(b) == expected
|
assert msgpack.unpackb(b) == expected
|
||||||
|
|
||||||
check(b'\xd4\x42A', ExtType(0x42, b'A')) # fixext 1
|
check(b"\xd4\x42A", ExtType(0x42, b"A")) # fixext 1
|
||||||
check(b'\xd5\x42AB', ExtType(0x42, b'AB')) # fixext 2
|
check(b"\xd5\x42AB", ExtType(0x42, b"AB")) # fixext 2
|
||||||
check(b'\xd6\x42ABCD', ExtType(0x42, b'ABCD')) # fixext 4
|
check(b"\xd6\x42ABCD", ExtType(0x42, b"ABCD")) # fixext 4
|
||||||
check(b'\xd7\x42ABCDEFGH', ExtType(0x42, b'ABCDEFGH')) # fixext 8
|
check(b"\xd7\x42ABCDEFGH", ExtType(0x42, b"ABCDEFGH")) # fixext 8
|
||||||
check(b'\xd8\x42' + b'A'*16, ExtType(0x42, b'A'*16)) # fixext 16
|
check(b"\xd8\x42" + b"A" * 16, ExtType(0x42, b"A" * 16)) # fixext 16
|
||||||
check(b'\xc7\x03\x42ABC', ExtType(0x42, b'ABC')) # ext 8
|
check(b"\xc7\x03\x42ABC", ExtType(0x42, b"ABC")) # ext 8
|
||||||
check(b'\xc8\x01\x23\x42' + b'A'*0x0123,
|
check(b"\xc8\x01\x23\x42" + b"A" * 0x0123, ExtType(0x42, b"A" * 0x0123)) # ext 16
|
||||||
ExtType(0x42, b'A'*0x0123)) # ext 16
|
check(
|
||||||
check(b'\xc9\x00\x01\x23\x45\x42' + b'A'*0x00012345,
|
b"\xc9\x00\x01\x23\x45\x42" + b"A" * 0x00012345,
|
||||||
ExtType(0x42, b'A'*0x00012345)) # ext 32
|
ExtType(0x42, b"A" * 0x00012345),
|
||||||
|
) # ext 32
|
||||||
|
|
||||||
|
|
||||||
def test_extension_type():
|
def test_extension_type():
|
||||||
def default(obj):
|
def default(obj):
|
||||||
print('default called', obj)
|
print("default called", obj)
|
||||||
if isinstance(obj, array.array):
|
if isinstance(obj, array.array):
|
||||||
typecode = 123 # application specific typecode
|
typecode = 123 # application specific typecode
|
||||||
data = obj.tostring()
|
try:
|
||||||
|
data = obj.tobytes()
|
||||||
|
except AttributeError:
|
||||||
|
data = obj.tostring()
|
||||||
return ExtType(typecode, data)
|
return ExtType(typecode, data)
|
||||||
raise TypeError("Unknown type object %r" % (obj,))
|
raise TypeError(f"Unknown type object {obj!r}")
|
||||||
|
|
||||||
def ext_hook(code, data):
|
def ext_hook(code, data):
|
||||||
print('ext_hook called', code, data)
|
print("ext_hook called", code, data)
|
||||||
assert code == 123
|
assert code == 123
|
||||||
obj = array.array('d')
|
obj = array.array("d")
|
||||||
obj.fromstring(data)
|
obj.frombytes(data)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
obj = [42, b'hello', array.array('d', [1.1, 2.2, 3.3])]
|
obj = [42, b"hello", array.array("d", [1.1, 2.2, 3.3])]
|
||||||
s = msgpack.packb(obj, default=default)
|
s = msgpack.packb(obj, default=default)
|
||||||
obj2 = msgpack.unpackb(s, ext_hook=ext_hook)
|
obj2 = msgpack.unpackb(s, ext_hook=ext_hook)
|
||||||
assert obj == obj2
|
assert obj == obj2
|
||||||
|
|
||||||
import sys
|
|
||||||
if sys.version > '3':
|
|
||||||
long = int
|
|
||||||
|
|
||||||
def test_overriding_hooks():
|
def test_overriding_hooks():
|
||||||
def default(obj):
|
def default(obj):
|
||||||
if isinstance(obj, long):
|
if isinstance(obj, int):
|
||||||
return {"__type__": "long", "__data__": str(obj)}
|
return {"__type__": "long", "__data__": str(obj)}
|
||||||
else:
|
else:
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
obj = {"testval": long(1823746192837461928374619)}
|
obj = {"testval": 1823746192837461928374619}
|
||||||
refobj = {"testval": default(obj["testval"])}
|
refobj = {"testval": default(obj["testval"])}
|
||||||
refout = msgpack.packb(refobj)
|
refout = msgpack.packb(refobj)
|
||||||
assert isinstance(refout, (str, bytes))
|
assert isinstance(refout, (str, bytes))
|
||||||
|
|
|
||||||
|
|
@ -1,70 +1,88 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding: utf-8
|
|
||||||
|
|
||||||
from msgpack import unpackb
|
from msgpack import unpackb
|
||||||
|
|
||||||
def check(src, should, use_list=0):
|
|
||||||
assert unpackb(src, use_list=use_list) == should
|
def check(src, should, use_list=0, raw=True):
|
||||||
|
assert unpackb(src, use_list=use_list, raw=raw, strict_map_key=False) == should
|
||||||
|
|
||||||
|
|
||||||
def testSimpleValue():
|
def testSimpleValue():
|
||||||
check(b"\x93\xc0\xc2\xc3",
|
check(b"\x93\xc0\xc2\xc3", (None, False, True))
|
||||||
(None, False, True,))
|
|
||||||
|
|
||||||
def testFixnum():
|
def testFixnum():
|
||||||
check(b"\x92\x93\x00\x40\x7f\x93\xe0\xf0\xff",
|
check(b"\x92\x93\x00\x40\x7f\x93\xe0\xf0\xff", ((0, 64, 127), (-32, -16, -1)))
|
||||||
((0,64,127,), (-32,-16,-1,),)
|
|
||||||
)
|
|
||||||
|
|
||||||
def testFixArray():
|
def testFixArray():
|
||||||
check(b"\x92\x90\x91\x91\xc0",
|
check(b"\x92\x90\x91\x91\xc0", ((), ((None,),)))
|
||||||
((),((None,),),),
|
|
||||||
)
|
|
||||||
|
|
||||||
def testFixRaw():
|
def testFixRaw():
|
||||||
check(b"\x94\xa0\xa1a\xa2bc\xa3def",
|
check(b"\x94\xa0\xa1a\xa2bc\xa3def", (b"", b"a", b"bc", b"def"))
|
||||||
(b"", b"a", b"bc", b"def",),
|
|
||||||
)
|
|
||||||
|
|
||||||
def testFixMap():
|
def testFixMap():
|
||||||
check(
|
check(b"\x82\xc2\x81\xc0\xc0\xc3\x81\xc0\x80", {False: {None: None}, True: {None: {}}})
|
||||||
b"\x82\xc2\x81\xc0\xc0\xc3\x81\xc0\x80",
|
|
||||||
{False: {None: None}, True:{None:{}}},
|
|
||||||
)
|
|
||||||
|
|
||||||
def testUnsignedInt():
|
def testUnsignedInt():
|
||||||
check(
|
check(
|
||||||
b"\x99\xcc\x00\xcc\x80\xcc\xff\xcd\x00\x00\xcd\x80\x00"
|
b"\x99\xcc\x00\xcc\x80\xcc\xff\xcd\x00\x00\xcd\x80\x00"
|
||||||
b"\xcd\xff\xff\xce\x00\x00\x00\x00\xce\x80\x00\x00\x00"
|
b"\xcd\xff\xff\xce\x00\x00\x00\x00\xce\x80\x00\x00\x00"
|
||||||
b"\xce\xff\xff\xff\xff",
|
b"\xce\xff\xff\xff\xff",
|
||||||
(0, 128, 255, 0, 32768, 65535, 0, 2147483648, 4294967295,),
|
(0, 128, 255, 0, 32768, 65535, 0, 2147483648, 4294967295),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def testSignedInt():
|
def testSignedInt():
|
||||||
check(b"\x99\xd0\x00\xd0\x80\xd0\xff\xd1\x00\x00\xd1\x80\x00"
|
check(
|
||||||
b"\xd1\xff\xff\xd2\x00\x00\x00\x00\xd2\x80\x00\x00\x00"
|
b"\x99\xd0\x00\xd0\x80\xd0\xff\xd1\x00\x00\xd1\x80\x00"
|
||||||
b"\xd2\xff\xff\xff\xff",
|
b"\xd1\xff\xff\xd2\x00\x00\x00\x00\xd2\x80\x00\x00\x00"
|
||||||
(0, -128, -1, 0, -32768, -1, 0, -2147483648, -1,))
|
b"\xd2\xff\xff\xff\xff",
|
||||||
|
(0, -128, -1, 0, -32768, -1, 0, -2147483648, -1),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def testRaw():
|
def testRaw():
|
||||||
check(b"\x96\xda\x00\x00\xda\x00\x01a\xda\x00\x02ab\xdb\x00\x00"
|
check(
|
||||||
|
b"\x96\xda\x00\x00\xda\x00\x01a\xda\x00\x02ab\xdb\x00\x00"
|
||||||
b"\x00\x00\xdb\x00\x00\x00\x01a\xdb\x00\x00\x00\x02ab",
|
b"\x00\x00\xdb\x00\x00\x00\x01a\xdb\x00\x00\x00\x02ab",
|
||||||
(b"", b"a", b"ab", b"", b"a", b"ab"))
|
(b"", b"a", b"ab", b"", b"a", b"ab"),
|
||||||
|
)
|
||||||
|
check(
|
||||||
|
b"\x96\xda\x00\x00\xda\x00\x01a\xda\x00\x02ab\xdb\x00\x00"
|
||||||
|
b"\x00\x00\xdb\x00\x00\x00\x01a\xdb\x00\x00\x00\x02ab",
|
||||||
|
("", "a", "ab", "", "a", "ab"),
|
||||||
|
raw=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def testArray():
|
def testArray():
|
||||||
check(b"\x96\xdc\x00\x00\xdc\x00\x01\xc0\xdc\x00\x02\xc2\xc3\xdd\x00"
|
check(
|
||||||
|
b"\x96\xdc\x00\x00\xdc\x00\x01\xc0\xdc\x00\x02\xc2\xc3\xdd\x00"
|
||||||
b"\x00\x00\x00\xdd\x00\x00\x00\x01\xc0\xdd\x00\x00\x00\x02"
|
b"\x00\x00\x00\xdd\x00\x00\x00\x01\xc0\xdd\x00\x00\x00\x02"
|
||||||
b"\xc2\xc3",
|
b"\xc2\xc3",
|
||||||
((), (None,), (False,True), (), (None,), (False,True))
|
((), (None,), (False, True), (), (None,), (False, True)),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def testMap():
|
def testMap():
|
||||||
check(
|
check(
|
||||||
b"\x96"
|
b"\x96"
|
||||||
b"\xde\x00\x00"
|
b"\xde\x00\x00"
|
||||||
b"\xde\x00\x01\xc0\xc2"
|
b"\xde\x00\x01\xc0\xc2"
|
||||||
b"\xde\x00\x02\xc0\xc2\xc3\xc2"
|
b"\xde\x00\x02\xc0\xc2\xc3\xc2"
|
||||||
b"\xdf\x00\x00\x00\x00"
|
b"\xdf\x00\x00\x00\x00"
|
||||||
b"\xdf\x00\x00\x00\x01\xc0\xc2"
|
b"\xdf\x00\x00\x00\x01\xc0\xc2"
|
||||||
b"\xdf\x00\x00\x00\x02\xc0\xc2\xc3\xc2",
|
b"\xdf\x00\x00\x00\x02\xc0\xc2\xc3\xc2",
|
||||||
({}, {None: False}, {True: False, None: False}, {},
|
(
|
||||||
{None: False}, {True: False, None: False}))
|
{},
|
||||||
|
{None: False},
|
||||||
|
{True: False, None: False},
|
||||||
|
{},
|
||||||
|
{None: False},
|
||||||
|
{True: False, None: False},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,42 +1,46 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding: utf-8
|
|
||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from msgpack import (
|
from msgpack import (
|
||||||
packb, unpackb, Packer, Unpacker, ExtType,
|
ExtType,
|
||||||
PackOverflowError, PackValueError, UnpackValueError,
|
Packer,
|
||||||
|
PackOverflowError,
|
||||||
|
PackValueError,
|
||||||
|
Unpacker,
|
||||||
|
UnpackValueError,
|
||||||
|
packb,
|
||||||
|
unpackb,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_integer():
|
def test_integer():
|
||||||
x = -(2 ** 63)
|
x = -(2**63)
|
||||||
assert unpackb(packb(x)) == x
|
assert unpackb(packb(x)) == x
|
||||||
with pytest.raises(PackOverflowError):
|
with pytest.raises(PackOverflowError):
|
||||||
packb(x-1)
|
packb(x - 1)
|
||||||
|
|
||||||
x = 2 ** 64 - 1
|
x = 2**64 - 1
|
||||||
assert unpackb(packb(x)) == x
|
assert unpackb(packb(x)) == x
|
||||||
with pytest.raises(PackOverflowError):
|
with pytest.raises(PackOverflowError):
|
||||||
packb(x+1)
|
packb(x + 1)
|
||||||
|
|
||||||
|
|
||||||
def test_array_header():
|
def test_array_header():
|
||||||
packer = Packer()
|
packer = Packer()
|
||||||
packer.pack_array_header(2**32-1)
|
packer.pack_array_header(2**32 - 1)
|
||||||
with pytest.raises(PackValueError):
|
with pytest.raises(PackValueError):
|
||||||
packer.pack_array_header(2**32)
|
packer.pack_array_header(2**32)
|
||||||
|
|
||||||
|
|
||||||
def test_map_header():
|
def test_map_header():
|
||||||
packer = Packer()
|
packer = Packer()
|
||||||
packer.pack_map_header(2**32-1)
|
packer.pack_map_header(2**32 - 1)
|
||||||
with pytest.raises(PackValueError):
|
with pytest.raises(PackValueError):
|
||||||
packer.pack_array_header(2**32)
|
packer.pack_array_header(2**32)
|
||||||
|
|
||||||
|
|
||||||
def test_max_str_len():
|
def test_max_str_len():
|
||||||
d = 'x' * 3
|
d = "x" * 3
|
||||||
packed = packb(d)
|
packed = packb(d)
|
||||||
|
|
||||||
unpacker = Unpacker(max_str_len=3, raw=False)
|
unpacker = Unpacker(max_str_len=3, raw=False)
|
||||||
|
|
@ -50,7 +54,7 @@ def test_max_str_len():
|
||||||
|
|
||||||
|
|
||||||
def test_max_bin_len():
|
def test_max_bin_len():
|
||||||
d = b'x' * 3
|
d = b"x" * 3
|
||||||
packed = packb(d, use_bin_type=True)
|
packed = packb(d, use_bin_type=True)
|
||||||
|
|
||||||
unpacker = Unpacker(max_bin_len=3)
|
unpacker = Unpacker(max_bin_len=3)
|
||||||
|
|
@ -64,7 +68,7 @@ def test_max_bin_len():
|
||||||
|
|
||||||
|
|
||||||
def test_max_array_len():
|
def test_max_array_len():
|
||||||
d = [1,2,3]
|
d = [1, 2, 3]
|
||||||
packed = packb(d)
|
packed = packb(d)
|
||||||
|
|
||||||
unpacker = Unpacker(max_array_len=3)
|
unpacker = Unpacker(max_array_len=3)
|
||||||
|
|
@ -81,11 +85,11 @@ def test_max_map_len():
|
||||||
d = {1: 2, 3: 4, 5: 6}
|
d = {1: 2, 3: 4, 5: 6}
|
||||||
packed = packb(d)
|
packed = packb(d)
|
||||||
|
|
||||||
unpacker = Unpacker(max_map_len=3)
|
unpacker = Unpacker(max_map_len=3, strict_map_key=False)
|
||||||
unpacker.feed(packed)
|
unpacker.feed(packed)
|
||||||
assert unpacker.unpack() == d
|
assert unpacker.unpack() == d
|
||||||
|
|
||||||
unpacker = Unpacker(max_map_len=2)
|
unpacker = Unpacker(max_map_len=2, strict_map_key=False)
|
||||||
with pytest.raises(UnpackValueError):
|
with pytest.raises(UnpackValueError):
|
||||||
unpacker.feed(packed)
|
unpacker.feed(packed)
|
||||||
unpacker.unpack()
|
unpacker.unpack()
|
||||||
|
|
@ -105,11 +109,10 @@ def test_max_ext_len():
|
||||||
unpacker.unpack()
|
unpacker.unpack()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# PyPy fails following tests because of constant folding?
|
# PyPy fails following tests because of constant folding?
|
||||||
# https://bugs.pypy.org/issue1721
|
# https://bugs.pypy.org/issue1721
|
||||||
#@pytest.mark.skipif(True, reason="Requires very large memory.")
|
# @pytest.mark.skipif(True, reason="Requires very large memory.")
|
||||||
#def test_binary():
|
# def test_binary():
|
||||||
# x = b'x' * (2**32 - 1)
|
# x = b'x' * (2**32 - 1)
|
||||||
# assert unpackb(packb(x)) == x
|
# assert unpackb(packb(x)) == x
|
||||||
# del x
|
# del x
|
||||||
|
|
@ -118,8 +121,8 @@ def test_max_ext_len():
|
||||||
# packb(x)
|
# packb(x)
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
#@pytest.mark.skipif(True, reason="Requires very large memory.")
|
# @pytest.mark.skipif(True, reason="Requires very large memory.")
|
||||||
#def test_string():
|
# def test_string():
|
||||||
# x = 'x' * (2**32 - 1)
|
# x = 'x' * (2**32 - 1)
|
||||||
# assert unpackb(packb(x)) == x
|
# assert unpackb(packb(x)) == x
|
||||||
# x += 'y'
|
# x += 'y'
|
||||||
|
|
@ -127,10 +130,36 @@ def test_max_ext_len():
|
||||||
# packb(x)
|
# packb(x)
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
#@pytest.mark.skipif(True, reason="Requires very large memory.")
|
# @pytest.mark.skipif(True, reason="Requires very large memory.")
|
||||||
#def test_array():
|
# def test_array():
|
||||||
# x = [0] * (2**32 - 1)
|
# x = [0] * (2**32 - 1)
|
||||||
# assert unpackb(packb(x)) == x
|
# assert unpackb(packb(x)) == x
|
||||||
# x.append(0)
|
# x.append(0)
|
||||||
# with pytest.raises(ValueError):
|
# with pytest.raises(ValueError):
|
||||||
# packb(x)
|
# packb(x)
|
||||||
|
|
||||||
|
|
||||||
|
# auto max len
|
||||||
|
|
||||||
|
|
||||||
|
def test_auto_max_array_len():
|
||||||
|
packed = b"\xde\x00\x06zz"
|
||||||
|
with pytest.raises(UnpackValueError):
|
||||||
|
unpackb(packed, raw=False)
|
||||||
|
|
||||||
|
unpacker = Unpacker(max_buffer_size=5, raw=False)
|
||||||
|
unpacker.feed(packed)
|
||||||
|
with pytest.raises(UnpackValueError):
|
||||||
|
unpacker.unpack()
|
||||||
|
|
||||||
|
|
||||||
|
def test_auto_max_map_len():
|
||||||
|
# len(packed) == 6 -> max_map_len == 3
|
||||||
|
packed = b"\xde\x00\x04zzz"
|
||||||
|
with pytest.raises(UnpackValueError):
|
||||||
|
unpackb(packed, raw=False)
|
||||||
|
|
||||||
|
unpacker = Unpacker(max_buffer_size=6, raw=False)
|
||||||
|
unpacker.feed(packed)
|
||||||
|
with pytest.raises(UnpackValueError):
|
||||||
|
unpacker.unpack()
|
||||||
|
|
|
||||||
|
|
@ -1,47 +1,26 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding: utf-8
|
|
||||||
|
|
||||||
from array import array
|
from array import array
|
||||||
|
|
||||||
from msgpack import packb, unpackb
|
from msgpack import packb, unpackb
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
# For Python < 3:
|
def make_array(f, data):
|
||||||
# - array type only supports old buffer interface
|
a = array(f)
|
||||||
# - array.frombytes is not available, must use deprecated array.fromstring
|
a.frombytes(data)
|
||||||
if sys.version_info[0] < 3:
|
return a
|
||||||
def make_memoryview(obj):
|
|
||||||
return memoryview(buffer(obj))
|
|
||||||
|
|
||||||
def make_array(f, data):
|
|
||||||
a = array(f)
|
|
||||||
a.fromstring(data)
|
|
||||||
return a
|
|
||||||
|
|
||||||
def get_data(a):
|
|
||||||
return a.tostring()
|
|
||||||
else:
|
|
||||||
make_memoryview = memoryview
|
|
||||||
|
|
||||||
def make_array(f, data):
|
|
||||||
a = array(f)
|
|
||||||
a.frombytes(data)
|
|
||||||
return a
|
|
||||||
|
|
||||||
def get_data(a):
|
|
||||||
return a.tobytes()
|
|
||||||
|
|
||||||
|
|
||||||
def _runtest(format, nbytes, expected_header, expected_prefix, use_bin_type):
|
def _runtest(format, nbytes, expected_header, expected_prefix, use_bin_type):
|
||||||
# create a new array
|
# create a new array
|
||||||
original_array = array(format)
|
original_array = array(format)
|
||||||
original_array.fromlist([255] * (nbytes // original_array.itemsize))
|
original_array.fromlist([255] * (nbytes // original_array.itemsize))
|
||||||
original_data = get_data(original_array)
|
original_data = original_array.tobytes()
|
||||||
view = make_memoryview(original_array)
|
view = memoryview(original_array)
|
||||||
|
|
||||||
# pack, unpack, and reconstruct array
|
# pack, unpack, and reconstruct array
|
||||||
packed = packb(view, use_bin_type=use_bin_type)
|
packed = packb(view, use_bin_type=use_bin_type)
|
||||||
unpacked = unpackb(packed)
|
unpacked = unpackb(packed, raw=(not use_bin_type))
|
||||||
reconstructed_array = make_array(format, unpacked)
|
reconstructed_array = make_array(format, unpacked)
|
||||||
|
|
||||||
# check that we got the right amount of data
|
# check that we got the right amount of data
|
||||||
|
|
@ -49,64 +28,72 @@ def _runtest(format, nbytes, expected_header, expected_prefix, use_bin_type):
|
||||||
# check packed header
|
# check packed header
|
||||||
assert packed[:1] == expected_header
|
assert packed[:1] == expected_header
|
||||||
# check packed length prefix, if any
|
# check packed length prefix, if any
|
||||||
assert packed[1:1+len(expected_prefix)] == expected_prefix
|
assert packed[1 : 1 + len(expected_prefix)] == expected_prefix
|
||||||
# check packed data
|
# check packed data
|
||||||
assert packed[1+len(expected_prefix):] == original_data
|
assert packed[1 + len(expected_prefix) :] == original_data
|
||||||
# check array unpacked correctly
|
# check array unpacked correctly
|
||||||
assert original_array == reconstructed_array
|
assert original_array == reconstructed_array
|
||||||
|
|
||||||
|
|
||||||
def test_fixstr_from_byte():
|
def test_fixstr_from_byte():
|
||||||
_runtest('B', 1, b'\xa1', b'', False)
|
_runtest("B", 1, b"\xa1", b"", False)
|
||||||
_runtest('B', 31, b'\xbf', b'', False)
|
_runtest("B", 31, b"\xbf", b"", False)
|
||||||
|
|
||||||
|
|
||||||
def test_fixstr_from_float():
|
def test_fixstr_from_float():
|
||||||
_runtest('f', 4, b'\xa4', b'', False)
|
_runtest("f", 4, b"\xa4", b"", False)
|
||||||
_runtest('f', 28, b'\xbc', b'', False)
|
_runtest("f", 28, b"\xbc", b"", False)
|
||||||
|
|
||||||
|
|
||||||
def test_str16_from_byte():
|
def test_str16_from_byte():
|
||||||
_runtest('B', 2**8, b'\xda', b'\x01\x00', False)
|
_runtest("B", 2**8, b"\xda", b"\x01\x00", False)
|
||||||
_runtest('B', 2**16-1, b'\xda', b'\xff\xff', False)
|
_runtest("B", 2**16 - 1, b"\xda", b"\xff\xff", False)
|
||||||
|
|
||||||
|
|
||||||
def test_str16_from_float():
|
def test_str16_from_float():
|
||||||
_runtest('f', 2**8, b'\xda', b'\x01\x00', False)
|
_runtest("f", 2**8, b"\xda", b"\x01\x00", False)
|
||||||
_runtest('f', 2**16-4, b'\xda', b'\xff\xfc', False)
|
_runtest("f", 2**16 - 4, b"\xda", b"\xff\xfc", False)
|
||||||
|
|
||||||
|
|
||||||
def test_str32_from_byte():
|
def test_str32_from_byte():
|
||||||
_runtest('B', 2**16, b'\xdb', b'\x00\x01\x00\x00', False)
|
_runtest("B", 2**16, b"\xdb", b"\x00\x01\x00\x00", False)
|
||||||
|
|
||||||
|
|
||||||
def test_str32_from_float():
|
def test_str32_from_float():
|
||||||
_runtest('f', 2**16, b'\xdb', b'\x00\x01\x00\x00', False)
|
_runtest("f", 2**16, b"\xdb", b"\x00\x01\x00\x00", False)
|
||||||
|
|
||||||
|
|
||||||
def test_bin8_from_byte():
|
def test_bin8_from_byte():
|
||||||
_runtest('B', 1, b'\xc4', b'\x01', True)
|
_runtest("B", 1, b"\xc4", b"\x01", True)
|
||||||
_runtest('B', 2**8-1, b'\xc4', b'\xff', True)
|
_runtest("B", 2**8 - 1, b"\xc4", b"\xff", True)
|
||||||
|
|
||||||
|
|
||||||
def test_bin8_from_float():
|
def test_bin8_from_float():
|
||||||
_runtest('f', 4, b'\xc4', b'\x04', True)
|
_runtest("f", 4, b"\xc4", b"\x04", True)
|
||||||
_runtest('f', 2**8-4, b'\xc4', b'\xfc', True)
|
_runtest("f", 2**8 - 4, b"\xc4", b"\xfc", True)
|
||||||
|
|
||||||
|
|
||||||
def test_bin16_from_byte():
|
def test_bin16_from_byte():
|
||||||
_runtest('B', 2**8, b'\xc5', b'\x01\x00', True)
|
_runtest("B", 2**8, b"\xc5", b"\x01\x00", True)
|
||||||
_runtest('B', 2**16-1, b'\xc5', b'\xff\xff', True)
|
_runtest("B", 2**16 - 1, b"\xc5", b"\xff\xff", True)
|
||||||
|
|
||||||
|
|
||||||
def test_bin16_from_float():
|
def test_bin16_from_float():
|
||||||
_runtest('f', 2**8, b'\xc5', b'\x01\x00', True)
|
_runtest("f", 2**8, b"\xc5", b"\x01\x00", True)
|
||||||
_runtest('f', 2**16-4, b'\xc5', b'\xff\xfc', True)
|
_runtest("f", 2**16 - 4, b"\xc5", b"\xff\xfc", True)
|
||||||
|
|
||||||
|
|
||||||
def test_bin32_from_byte():
|
def test_bin32_from_byte():
|
||||||
_runtest('B', 2**16, b'\xc6', b'\x00\x01\x00\x00', True)
|
_runtest("B", 2**16, b"\xc6", b"\x00\x01\x00\x00", True)
|
||||||
|
|
||||||
|
|
||||||
def test_bin32_from_float():
|
def test_bin32_from_float():
|
||||||
_runtest('f', 2**16, b'\xc6', b'\x00\x01\x00\x00', True)
|
_runtest("f", 2**16, b"\xc6", b"\x00\x01\x00\x00", True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_multidim_memoryview():
|
||||||
|
# See https://github.com/msgpack/msgpack-python/issues/526
|
||||||
|
view = memoryview(b"\00" * 6)
|
||||||
|
data = view.cast(view.format, (3, 2))
|
||||||
|
packed = packb(data)
|
||||||
|
assert packed == b"\xc4\x06\x00\x00\x00\x00\x00\x00"
|
||||||
|
|
|
||||||
|
|
@ -1,88 +1,90 @@
|
||||||
# coding: utf-8
|
from msgpack import ExtType, packb, unpackb
|
||||||
|
|
||||||
from msgpack import packb, unpackb, ExtType
|
|
||||||
|
|
||||||
|
|
||||||
def test_str8():
|
def test_str8():
|
||||||
header = b'\xd9'
|
header = b"\xd9"
|
||||||
data = b'x' * 32
|
data = b"x" * 32
|
||||||
b = packb(data.decode(), use_bin_type=True)
|
b = packb(data.decode(), use_bin_type=True)
|
||||||
assert len(b) == len(data) + 2
|
assert len(b) == len(data) + 2
|
||||||
assert b[0:2] == header + b'\x20'
|
assert b[0:2] == header + b"\x20"
|
||||||
assert b[2:] == data
|
assert b[2:] == data
|
||||||
assert unpackb(b) == data
|
assert unpackb(b, raw=True) == data
|
||||||
|
assert unpackb(b, raw=False) == data.decode()
|
||||||
|
|
||||||
data = b'x' * 255
|
data = b"x" * 255
|
||||||
b = packb(data.decode(), use_bin_type=True)
|
b = packb(data.decode(), use_bin_type=True)
|
||||||
assert len(b) == len(data) + 2
|
assert len(b) == len(data) + 2
|
||||||
assert b[0:2] == header + b'\xff'
|
assert b[0:2] == header + b"\xff"
|
||||||
assert b[2:] == data
|
assert b[2:] == data
|
||||||
assert unpackb(b) == data
|
assert unpackb(b, raw=True) == data
|
||||||
|
assert unpackb(b, raw=False) == data.decode()
|
||||||
|
|
||||||
|
|
||||||
def test_bin8():
|
def test_bin8():
|
||||||
header = b'\xc4'
|
header = b"\xc4"
|
||||||
data = b''
|
data = b""
|
||||||
b = packb(data, use_bin_type=True)
|
b = packb(data, use_bin_type=True)
|
||||||
assert len(b) == len(data) + 2
|
assert len(b) == len(data) + 2
|
||||||
assert b[0:2] == header + b'\x00'
|
assert b[0:2] == header + b"\x00"
|
||||||
assert b[2:] == data
|
assert b[2:] == data
|
||||||
assert unpackb(b) == data
|
assert unpackb(b) == data
|
||||||
|
|
||||||
data = b'x' * 255
|
data = b"x" * 255
|
||||||
b = packb(data, use_bin_type=True)
|
b = packb(data, use_bin_type=True)
|
||||||
assert len(b) == len(data) + 2
|
assert len(b) == len(data) + 2
|
||||||
assert b[0:2] == header + b'\xff'
|
assert b[0:2] == header + b"\xff"
|
||||||
assert b[2:] == data
|
assert b[2:] == data
|
||||||
assert unpackb(b) == data
|
assert unpackb(b) == data
|
||||||
|
|
||||||
|
|
||||||
def test_bin16():
|
def test_bin16():
|
||||||
header = b'\xc5'
|
header = b"\xc5"
|
||||||
data = b'x' * 256
|
data = b"x" * 256
|
||||||
b = packb(data, use_bin_type=True)
|
b = packb(data, use_bin_type=True)
|
||||||
assert len(b) == len(data) + 3
|
assert len(b) == len(data) + 3
|
||||||
assert b[0:1] == header
|
assert b[0:1] == header
|
||||||
assert b[1:3] == b'\x01\x00'
|
assert b[1:3] == b"\x01\x00"
|
||||||
assert b[3:] == data
|
assert b[3:] == data
|
||||||
assert unpackb(b) == data
|
assert unpackb(b) == data
|
||||||
|
|
||||||
data = b'x' * 65535
|
data = b"x" * 65535
|
||||||
b = packb(data, use_bin_type=True)
|
b = packb(data, use_bin_type=True)
|
||||||
assert len(b) == len(data) + 3
|
assert len(b) == len(data) + 3
|
||||||
assert b[0:1] == header
|
assert b[0:1] == header
|
||||||
assert b[1:3] == b'\xff\xff'
|
assert b[1:3] == b"\xff\xff"
|
||||||
assert b[3:] == data
|
assert b[3:] == data
|
||||||
assert unpackb(b) == data
|
assert unpackb(b) == data
|
||||||
|
|
||||||
|
|
||||||
def test_bin32():
|
def test_bin32():
|
||||||
header = b'\xc6'
|
header = b"\xc6"
|
||||||
data = b'x' * 65536
|
data = b"x" * 65536
|
||||||
b = packb(data, use_bin_type=True)
|
b = packb(data, use_bin_type=True)
|
||||||
assert len(b) == len(data) + 5
|
assert len(b) == len(data) + 5
|
||||||
assert b[0:1] == header
|
assert b[0:1] == header
|
||||||
assert b[1:5] == b'\x00\x01\x00\x00'
|
assert b[1:5] == b"\x00\x01\x00\x00"
|
||||||
assert b[5:] == data
|
assert b[5:] == data
|
||||||
assert unpackb(b) == data
|
assert unpackb(b) == data
|
||||||
|
|
||||||
|
|
||||||
def test_ext():
|
def test_ext():
|
||||||
def check(ext, packed):
|
def check(ext, packed):
|
||||||
assert packb(ext) == packed
|
assert packb(ext) == packed
|
||||||
assert unpackb(packed) == ext
|
assert unpackb(packed) == ext
|
||||||
check(ExtType(0x42, b'Z'), b'\xd4\x42Z') # fixext 1
|
|
||||||
check(ExtType(0x42, b'ZZ'), b'\xd5\x42ZZ') # fixext 2
|
check(ExtType(0x42, b"Z"), b"\xd4\x42Z") # fixext 1
|
||||||
check(ExtType(0x42, b'Z'*4), b'\xd6\x42' + b'Z'*4) # fixext 4
|
check(ExtType(0x42, b"ZZ"), b"\xd5\x42ZZ") # fixext 2
|
||||||
check(ExtType(0x42, b'Z'*8), b'\xd7\x42' + b'Z'*8) # fixext 8
|
check(ExtType(0x42, b"Z" * 4), b"\xd6\x42" + b"Z" * 4) # fixext 4
|
||||||
check(ExtType(0x42, b'Z'*16), b'\xd8\x42' + b'Z'*16) # fixext 16
|
check(ExtType(0x42, b"Z" * 8), b"\xd7\x42" + b"Z" * 8) # fixext 8
|
||||||
|
check(ExtType(0x42, b"Z" * 16), b"\xd8\x42" + b"Z" * 16) # fixext 16
|
||||||
# ext 8
|
# ext 8
|
||||||
check(ExtType(0x42, b''), b'\xc7\x00\x42')
|
check(ExtType(0x42, b""), b"\xc7\x00\x42")
|
||||||
check(ExtType(0x42, b'Z'*255), b'\xc7\xff\x42' + b'Z'*255)
|
check(ExtType(0x42, b"Z" * 255), b"\xc7\xff\x42" + b"Z" * 255)
|
||||||
# ext 16
|
# ext 16
|
||||||
check(ExtType(0x42, b'Z'*256), b'\xc8\x01\x00\x42' + b'Z'*256)
|
check(ExtType(0x42, b"Z" * 256), b"\xc8\x01\x00\x42" + b"Z" * 256)
|
||||||
check(ExtType(0x42, b'Z'*0xffff), b'\xc8\xff\xff\x42' + b'Z'*0xffff)
|
check(ExtType(0x42, b"Z" * 0xFFFF), b"\xc8\xff\xff\x42" + b"Z" * 0xFFFF)
|
||||||
# ext 32
|
# ext 32
|
||||||
check(ExtType(0x42, b'Z'*0x10000), b'\xc9\x00\x01\x00\x00\x42' + b'Z'*0x10000)
|
check(ExtType(0x42, b"Z" * 0x10000), b"\xc9\x00\x01\x00\x00\x42" + b"Z" * 0x10000)
|
||||||
# needs large memory
|
# needs large memory
|
||||||
#check(ExtType(0x42, b'Z'*0xffffffff),
|
# check(ExtType(0x42, b'Z'*0xffffffff),
|
||||||
# b'\xc9\xff\xff\xff\xff\x42' + b'Z'*0xffffffff)
|
# b'\xc9\xff\xff\xff\xff\x42' + b'Z'*0xffffffff)
|
||||||
|
|
|
||||||
|
|
@ -1,67 +1,82 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding: utf-8
|
|
||||||
|
|
||||||
from pytest import raises
|
from pytest import raises
|
||||||
|
|
||||||
from msgpack import packb, unpackb
|
from msgpack import packb, unpackb
|
||||||
|
|
||||||
|
|
||||||
def _decode_complex(obj):
|
def _decode_complex(obj):
|
||||||
if b'__complex__' in obj:
|
if b"__complex__" in obj:
|
||||||
return complex(obj[b'real'], obj[b'imag'])
|
return complex(obj[b"real"], obj[b"imag"])
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
def _encode_complex(obj):
|
def _encode_complex(obj):
|
||||||
if isinstance(obj, complex):
|
if isinstance(obj, complex):
|
||||||
return {b'__complex__': True, b'real': 1, b'imag': 2}
|
return {b"__complex__": True, b"real": 1, b"imag": 2}
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
def test_encode_hook():
|
def test_encode_hook():
|
||||||
packed = packb([3, 1+2j], default=_encode_complex)
|
packed = packb([3, 1 + 2j], default=_encode_complex)
|
||||||
unpacked = unpackb(packed, use_list=1)
|
unpacked = unpackb(packed, use_list=1)
|
||||||
assert unpacked[1] == {b'__complex__': True, b'real': 1, b'imag': 2}
|
assert unpacked[1] == {b"__complex__": True, b"real": 1, b"imag": 2}
|
||||||
|
|
||||||
|
|
||||||
def test_decode_hook():
|
def test_decode_hook():
|
||||||
packed = packb([3, {b'__complex__': True, b'real': 1, b'imag': 2}])
|
packed = packb([3, {b"__complex__": True, b"real": 1, b"imag": 2}])
|
||||||
unpacked = unpackb(packed, object_hook=_decode_complex, use_list=1)
|
unpacked = unpackb(packed, object_hook=_decode_complex, use_list=1)
|
||||||
assert unpacked[1] == 1+2j
|
assert unpacked[1] == 1 + 2j
|
||||||
|
|
||||||
|
|
||||||
def test_decode_pairs_hook():
|
def test_decode_pairs_hook():
|
||||||
packed = packb([3, {1: 2, 3: 4}])
|
packed = packb([3, {1: 2, 3: 4}])
|
||||||
prod_sum = 1 * 2 + 3 * 4
|
prod_sum = 1 * 2 + 3 * 4
|
||||||
unpacked = unpackb(packed, object_pairs_hook=lambda l: sum(k * v for k, v in l), use_list=1)
|
unpacked = unpackb(
|
||||||
|
packed,
|
||||||
|
object_pairs_hook=lambda lst: sum(k * v for k, v in lst),
|
||||||
|
use_list=1,
|
||||||
|
strict_map_key=False,
|
||||||
|
)
|
||||||
assert unpacked[1] == prod_sum
|
assert unpacked[1] == prod_sum
|
||||||
|
|
||||||
|
|
||||||
def test_only_one_obj_hook():
|
def test_only_one_obj_hook():
|
||||||
with raises(TypeError):
|
with raises(TypeError):
|
||||||
unpackb(b'', object_hook=lambda x: x, object_pairs_hook=lambda x: x)
|
unpackb(b"", object_hook=lambda x: x, object_pairs_hook=lambda x: x)
|
||||||
|
|
||||||
|
|
||||||
def test_bad_hook():
|
def test_bad_hook():
|
||||||
with raises(TypeError):
|
with raises(TypeError):
|
||||||
packed = packb([3, 1+2j], default=lambda o: o)
|
packed = packb([3, 1 + 2j], default=lambda o: o)
|
||||||
unpacked = unpackb(packed, use_list=1)
|
unpackb(packed, use_list=1)
|
||||||
|
|
||||||
|
|
||||||
def _arr_to_str(arr):
|
def _arr_to_str(arr):
|
||||||
return ''.join(str(c) for c in arr)
|
return "".join(str(c) for c in arr)
|
||||||
|
|
||||||
|
|
||||||
def test_array_hook():
|
def test_array_hook():
|
||||||
packed = packb([1,2,3])
|
packed = packb([1, 2, 3])
|
||||||
unpacked = unpackb(packed, list_hook=_arr_to_str, use_list=1)
|
unpacked = unpackb(packed, list_hook=_arr_to_str, use_list=1)
|
||||||
assert unpacked == '123'
|
assert unpacked == "123"
|
||||||
|
|
||||||
|
|
||||||
class DecodeError(Exception):
|
class DecodeError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def bad_complex_decoder(o):
|
def bad_complex_decoder(o):
|
||||||
raise DecodeError("Ooops!")
|
raise DecodeError("Ooops!")
|
||||||
|
|
||||||
|
|
||||||
def test_an_exception_in_objecthook1():
|
def test_an_exception_in_objecthook1():
|
||||||
with raises(DecodeError):
|
with raises(DecodeError):
|
||||||
packed = packb({1: {'__complex__': True, 'real': 1, 'imag': 2}})
|
packed = packb({1: {"__complex__": True, "real": 1, "imag": 2}})
|
||||||
unpackb(packed, object_hook=bad_complex_decoder)
|
unpackb(packed, object_hook=bad_complex_decoder, strict_map_key=False)
|
||||||
|
|
||||||
|
|
||||||
def test_an_exception_in_objecthook2():
|
def test_an_exception_in_objecthook2():
|
||||||
with raises(DecodeError):
|
with raises(DecodeError):
|
||||||
packed = packb({1: [{'__complex__': True, 'real': 1, 'imag': 2}]})
|
packed = packb({1: [{"__complex__": True, "real": 1, "imag": 2}]})
|
||||||
unpackb(packed, list_hook=bad_complex_decoder, use_list=1)
|
unpackb(packed, list_hook=bad_complex_decoder, use_list=1, strict_map_key=False)
|
||||||
|
|
|
||||||
|
|
@ -1,33 +1,58 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding: utf-8
|
|
||||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
||||||
|
|
||||||
import struct
|
import struct
|
||||||
from pytest import raises, xfail
|
|
||||||
|
|
||||||
from msgpack import packb, unpackb, Unpacker, Packer
|
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from msgpack import Packer, Unpacker, packb, unpackb
|
||||||
|
|
||||||
|
|
||||||
def check(data, use_list=False):
|
def check(data, use_list=False):
|
||||||
re = unpackb(packb(data), use_list=use_list)
|
re = unpackb(packb(data), use_list=use_list, strict_map_key=False)
|
||||||
assert re == data
|
assert re == data
|
||||||
|
|
||||||
|
|
||||||
def testPack():
|
def testPack():
|
||||||
test_data = [
|
test_data = [
|
||||||
0, 1, 127, 128, 255, 256, 65535, 65536, 4294967295, 4294967296,
|
0,
|
||||||
-1, -32, -33, -128, -129, -32768, -32769, -4294967296, -4294967297,
|
1,
|
||||||
1.0,
|
127,
|
||||||
b"", b"a", b"a"*31, b"a"*32,
|
128,
|
||||||
None, True, False,
|
255,
|
||||||
(), ((),), ((), None,),
|
256,
|
||||||
|
65535,
|
||||||
|
65536,
|
||||||
|
4294967295,
|
||||||
|
4294967296,
|
||||||
|
-1,
|
||||||
|
-32,
|
||||||
|
-33,
|
||||||
|
-128,
|
||||||
|
-129,
|
||||||
|
-32768,
|
||||||
|
-32769,
|
||||||
|
-4294967296,
|
||||||
|
-4294967297,
|
||||||
|
1.0,
|
||||||
|
b"",
|
||||||
|
b"a",
|
||||||
|
b"a" * 31,
|
||||||
|
b"a" * 32,
|
||||||
|
None,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
(),
|
||||||
|
((),),
|
||||||
|
((), None),
|
||||||
{None: 0},
|
{None: 0},
|
||||||
(1<<23),
|
(1 << 23),
|
||||||
]
|
]
|
||||||
for td in test_data:
|
for td in test_data:
|
||||||
check(td)
|
check(td)
|
||||||
|
|
||||||
|
|
||||||
def testPackUnicode():
|
def testPackUnicode():
|
||||||
test_data = ["", "abcd", ["defgh"], "Русский текст"]
|
test_data = ["", "abcd", ["defgh"], "Русский текст"]
|
||||||
for td in test_data:
|
for td in test_data:
|
||||||
|
|
@ -38,57 +63,48 @@ def testPackUnicode():
|
||||||
re = Unpacker(BytesIO(data), raw=False, use_list=1).unpack()
|
re = Unpacker(BytesIO(data), raw=False, use_list=1).unpack()
|
||||||
assert re == td
|
assert re == td
|
||||||
|
|
||||||
def testPackUTF32(): # deprecated
|
|
||||||
try:
|
|
||||||
test_data = [
|
|
||||||
"",
|
|
||||||
"abcd",
|
|
||||||
["defgh"],
|
|
||||||
"Русский текст",
|
|
||||||
]
|
|
||||||
for td in test_data:
|
|
||||||
re = unpackb(packb(td, encoding='utf-32'), use_list=1, encoding='utf-32')
|
|
||||||
assert re == td
|
|
||||||
except LookupError as e:
|
|
||||||
xfail(e)
|
|
||||||
|
|
||||||
def testPackBytes():
|
def testPackBytes():
|
||||||
test_data = [
|
test_data = [b"", b"abcd", (b"defgh",)]
|
||||||
b"", b"abcd", (b"defgh",),
|
|
||||||
]
|
|
||||||
for td in test_data:
|
for td in test_data:
|
||||||
check(td)
|
check(td)
|
||||||
|
|
||||||
|
|
||||||
def testPackByteArrays():
|
def testPackByteArrays():
|
||||||
test_data = [
|
test_data = [bytearray(b""), bytearray(b"abcd"), (bytearray(b"defgh"),)]
|
||||||
bytearray(b""), bytearray(b"abcd"), (bytearray(b"defgh"),),
|
|
||||||
]
|
|
||||||
for td in test_data:
|
for td in test_data:
|
||||||
check(td)
|
check(td)
|
||||||
|
|
||||||
def testIgnoreUnicodeErrors(): # deprecated
|
|
||||||
re = unpackb(packb(b'abc\xeddef'), encoding='utf-8', unicode_errors='ignore', use_list=1)
|
def testIgnoreUnicodeErrors():
|
||||||
|
re = unpackb(packb(b"abc\xeddef", use_bin_type=False), raw=False, unicode_errors="ignore")
|
||||||
assert re == "abcdef"
|
assert re == "abcdef"
|
||||||
|
|
||||||
|
|
||||||
def testStrictUnicodeUnpack():
|
def testStrictUnicodeUnpack():
|
||||||
with raises(UnicodeDecodeError):
|
packed = packb(b"abc\xeddef", use_bin_type=False)
|
||||||
unpackb(packb(b'abc\xeddef'), raw=False, use_list=1)
|
with pytest.raises(UnicodeDecodeError):
|
||||||
|
unpackb(packed, raw=False, use_list=1)
|
||||||
|
|
||||||
def testStrictUnicodePack(): # deprecated
|
|
||||||
with raises(UnicodeEncodeError):
|
|
||||||
packb("abc\xeddef", encoding='ascii', unicode_errors='strict')
|
|
||||||
|
|
||||||
def testIgnoreErrorsPack(): # deprecated
|
def testIgnoreErrorsPack():
|
||||||
re = unpackb(packb("abcФФФdef", encoding='ascii', unicode_errors='ignore'), raw=False, use_list=1)
|
re = unpackb(
|
||||||
|
packb("abc\udc80\udcffdef", use_bin_type=True, unicode_errors="ignore"),
|
||||||
|
raw=False,
|
||||||
|
use_list=1,
|
||||||
|
)
|
||||||
assert re == "abcdef"
|
assert re == "abcdef"
|
||||||
|
|
||||||
|
|
||||||
def testDecodeBinary():
|
def testDecodeBinary():
|
||||||
re = unpackb(packb(b"abc"), encoding=None, use_list=1)
|
re = unpackb(packb(b"abc"), use_list=1)
|
||||||
assert re == b"abc"
|
assert re == b"abc"
|
||||||
|
|
||||||
|
|
||||||
def testPackFloat():
|
def testPackFloat():
|
||||||
assert packb(1.0, use_single_float=True) == b'\xca' + struct.pack(str('>f'), 1.0)
|
assert packb(1.0, use_single_float=True) == b"\xca" + struct.pack(">f", 1.0)
|
||||||
assert packb(1.0, use_single_float=False) == b'\xcb' + struct.pack(str('>d'), 1.0)
|
assert packb(1.0, use_single_float=False) == b"\xcb" + struct.pack(">d", 1.0)
|
||||||
|
|
||||||
|
|
||||||
def testArraySize(sizes=[0, 5, 50, 1000]):
|
def testArraySize(sizes=[0, 5, 50, 1000]):
|
||||||
bio = BytesIO()
|
bio = BytesIO()
|
||||||
|
|
@ -103,6 +119,7 @@ def testArraySize(sizes=[0, 5, 50, 1000]):
|
||||||
for size in sizes:
|
for size in sizes:
|
||||||
assert unpacker.unpack() == list(range(size))
|
assert unpacker.unpack() == list(range(size))
|
||||||
|
|
||||||
|
|
||||||
def test_manualreset(sizes=[0, 5, 50, 1000]):
|
def test_manualreset(sizes=[0, 5, 50, 1000]):
|
||||||
packer = Packer(autoreset=False)
|
packer = Packer(autoreset=False)
|
||||||
for size in sizes:
|
for size in sizes:
|
||||||
|
|
@ -116,7 +133,8 @@ def test_manualreset(sizes=[0, 5, 50, 1000]):
|
||||||
assert unpacker.unpack() == list(range(size))
|
assert unpacker.unpack() == list(range(size))
|
||||||
|
|
||||||
packer.reset()
|
packer.reset()
|
||||||
assert packer.bytes() == b''
|
assert packer.bytes() == b""
|
||||||
|
|
||||||
|
|
||||||
def testMapSize(sizes=[0, 5, 50, 1000]):
|
def testMapSize(sizes=[0, 5, 50, 1000]):
|
||||||
bio = BytesIO()
|
bio = BytesIO()
|
||||||
|
|
@ -124,27 +142,40 @@ def testMapSize(sizes=[0, 5, 50, 1000]):
|
||||||
for size in sizes:
|
for size in sizes:
|
||||||
bio.write(packer.pack_map_header(size))
|
bio.write(packer.pack_map_header(size))
|
||||||
for i in range(size):
|
for i in range(size):
|
||||||
bio.write(packer.pack(i)) # key
|
bio.write(packer.pack(i)) # key
|
||||||
bio.write(packer.pack(i * 2)) # value
|
bio.write(packer.pack(i * 2)) # value
|
||||||
|
|
||||||
bio.seek(0)
|
bio.seek(0)
|
||||||
unpacker = Unpacker(bio)
|
unpacker = Unpacker(bio, strict_map_key=False)
|
||||||
for size in sizes:
|
for size in sizes:
|
||||||
assert unpacker.unpack() == dict((i, i * 2) for i in range(size))
|
assert unpacker.unpack() == {i: i * 2 for i in range(size)}
|
||||||
|
|
||||||
|
|
||||||
def test_odict():
|
def test_odict():
|
||||||
seq = [(b'one', 1), (b'two', 2), (b'three', 3), (b'four', 4)]
|
seq = [(b"one", 1), (b"two", 2), (b"three", 3), (b"four", 4)]
|
||||||
od = OrderedDict(seq)
|
od = OrderedDict(seq)
|
||||||
assert unpackb(packb(od), use_list=1) == dict(seq)
|
assert unpackb(packb(od), use_list=1) == dict(seq)
|
||||||
|
|
||||||
def pair_hook(seq):
|
def pair_hook(seq):
|
||||||
return list(seq)
|
return list(seq)
|
||||||
|
|
||||||
assert unpackb(packb(od), object_pairs_hook=pair_hook, use_list=1) == seq
|
assert unpackb(packb(od), object_pairs_hook=pair_hook, use_list=1) == seq
|
||||||
|
|
||||||
|
|
||||||
def test_pairlist():
|
def test_pairlist():
|
||||||
pairlist = [(b'a', 1), (2, b'b'), (b'foo', b'bar')]
|
pairlist = [(b"a", 1), (2, b"b"), (b"foo", b"bar")]
|
||||||
packer = Packer()
|
packer = Packer()
|
||||||
packed = packer.pack_map_pairs(pairlist)
|
packed = packer.pack_map_pairs(pairlist)
|
||||||
unpacked = unpackb(packed, object_pairs_hook=list)
|
unpacked = unpackb(packed, object_pairs_hook=list, strict_map_key=False)
|
||||||
assert pairlist == unpacked
|
assert pairlist == unpacked
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_buffer():
|
||||||
|
packer = Packer(autoreset=0, use_bin_type=True)
|
||||||
|
packer.pack([1, 2])
|
||||||
|
strm = BytesIO()
|
||||||
|
strm.write(packer.getbuffer())
|
||||||
|
written = strm.getvalue()
|
||||||
|
|
||||||
|
expected = packb([1, 2], use_bin_type=True)
|
||||||
|
assert written == expected
|
||||||
|
|
|
||||||
|
|
@ -1,66 +1,72 @@
|
||||||
"""Test Unpacker's read_array_header and read_map_header methods"""
|
"""Test Unpacker's read_array_header and read_map_header methods"""
|
||||||
from msgpack import packb, Unpacker, OutOfData
|
|
||||||
|
from msgpack import OutOfData, Unpacker, packb
|
||||||
|
|
||||||
UnexpectedTypeException = ValueError
|
UnexpectedTypeException = ValueError
|
||||||
|
|
||||||
|
|
||||||
def test_read_array_header():
|
def test_read_array_header():
|
||||||
unpacker = Unpacker()
|
unpacker = Unpacker()
|
||||||
unpacker.feed(packb(['a', 'b', 'c']))
|
unpacker.feed(packb(["a", "b", "c"]))
|
||||||
assert unpacker.read_array_header() == 3
|
assert unpacker.read_array_header() == 3
|
||||||
assert unpacker.unpack() == b'a'
|
assert unpacker.unpack() == "a"
|
||||||
assert unpacker.unpack() == b'b'
|
assert unpacker.unpack() == "b"
|
||||||
assert unpacker.unpack() == b'c'
|
assert unpacker.unpack() == "c"
|
||||||
try:
|
try:
|
||||||
unpacker.unpack()
|
unpacker.unpack()
|
||||||
assert 0, 'should raise exception'
|
assert 0, "should raise exception"
|
||||||
except OutOfData:
|
except OutOfData:
|
||||||
assert 1, 'okay'
|
assert 1, "okay"
|
||||||
|
|
||||||
|
|
||||||
def test_read_map_header():
|
def test_read_map_header():
|
||||||
unpacker = Unpacker()
|
unpacker = Unpacker()
|
||||||
unpacker.feed(packb({'a': 'A'}))
|
unpacker.feed(packb({"a": "A"}))
|
||||||
assert unpacker.read_map_header() == 1
|
assert unpacker.read_map_header() == 1
|
||||||
assert unpacker.unpack() == B'a'
|
assert unpacker.unpack() == "a"
|
||||||
assert unpacker.unpack() == B'A'
|
assert unpacker.unpack() == "A"
|
||||||
try:
|
try:
|
||||||
unpacker.unpack()
|
unpacker.unpack()
|
||||||
assert 0, 'should raise exception'
|
assert 0, "should raise exception"
|
||||||
except OutOfData:
|
except OutOfData:
|
||||||
assert 1, 'okay'
|
assert 1, "okay"
|
||||||
|
|
||||||
|
|
||||||
def test_incorrect_type_array():
|
def test_incorrect_type_array():
|
||||||
unpacker = Unpacker()
|
unpacker = Unpacker()
|
||||||
unpacker.feed(packb(1))
|
unpacker.feed(packb(1))
|
||||||
try:
|
try:
|
||||||
unpacker.read_array_header()
|
unpacker.read_array_header()
|
||||||
assert 0, 'should raise exception'
|
assert 0, "should raise exception"
|
||||||
except UnexpectedTypeException:
|
except UnexpectedTypeException:
|
||||||
assert 1, 'okay'
|
assert 1, "okay"
|
||||||
|
|
||||||
|
|
||||||
def test_incorrect_type_map():
|
def test_incorrect_type_map():
|
||||||
unpacker = Unpacker()
|
unpacker = Unpacker()
|
||||||
unpacker.feed(packb(1))
|
unpacker.feed(packb(1))
|
||||||
try:
|
try:
|
||||||
unpacker.read_map_header()
|
unpacker.read_map_header()
|
||||||
assert 0, 'should raise exception'
|
assert 0, "should raise exception"
|
||||||
except UnexpectedTypeException:
|
except UnexpectedTypeException:
|
||||||
assert 1, 'okay'
|
assert 1, "okay"
|
||||||
|
|
||||||
|
|
||||||
def test_correct_type_nested_array():
|
def test_correct_type_nested_array():
|
||||||
unpacker = Unpacker()
|
unpacker = Unpacker()
|
||||||
unpacker.feed(packb({'a': ['b', 'c', 'd']}))
|
unpacker.feed(packb({"a": ["b", "c", "d"]}))
|
||||||
try:
|
try:
|
||||||
unpacker.read_array_header()
|
unpacker.read_array_header()
|
||||||
assert 0, 'should raise exception'
|
assert 0, "should raise exception"
|
||||||
except UnexpectedTypeException:
|
except UnexpectedTypeException:
|
||||||
assert 1, 'okay'
|
assert 1, "okay"
|
||||||
|
|
||||||
|
|
||||||
def test_incorrect_type_nested_map():
|
def test_incorrect_type_nested_map():
|
||||||
unpacker = Unpacker()
|
unpacker = Unpacker()
|
||||||
unpacker.feed(packb([{'a': 'b'}]))
|
unpacker.feed(packb([{"a": "b"}]))
|
||||||
try:
|
try:
|
||||||
unpacker.read_map_header()
|
unpacker.read_map_header()
|
||||||
assert 0, 'should raise exception'
|
assert 0, "should raise exception"
|
||||||
except UnexpectedTypeException:
|
except UnexpectedTypeException:
|
||||||
assert 1, 'okay'
|
assert 1, "okay"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,14 @@
|
||||||
#!/usr/bin/env python
|
# ruff: noqa: E501
|
||||||
# coding: utf-8
|
# ignore line length limit for long comments
|
||||||
|
|
||||||
import io
|
import io
|
||||||
import msgpack
|
|
||||||
|
|
||||||
|
import msgpack
|
||||||
|
|
||||||
binarydata = bytes(bytearray(range(256)))
|
binarydata = bytes(bytearray(range(256)))
|
||||||
|
|
||||||
|
|
||||||
def gen_binary_data(idx):
|
def gen_binary_data(idx):
|
||||||
return binarydata[:idx % 300]
|
return binarydata[: idx % 300]
|
||||||
|
|
||||||
|
|
||||||
def test_exceeding_unpacker_read_size():
|
def test_exceeding_unpacker_read_size():
|
||||||
|
|
@ -18,10 +18,10 @@ def test_exceeding_unpacker_read_size():
|
||||||
|
|
||||||
NUMBER_OF_STRINGS = 6
|
NUMBER_OF_STRINGS = 6
|
||||||
read_size = 16
|
read_size = 16
|
||||||
# 5 ok for read_size=16, while 6 glibc detected *** python: double free or corruption (fasttop):
|
# 5 ok for read_size=16, while 6 glibc detected *** python: double free or corruption (fasttop):
|
||||||
# 20 ok for read_size=256, while 25 segfaults / glibc detected *** python: double free or corruption (!prev)
|
# 20 ok for read_size=256, while 25 segfaults / glibc detected *** python: double free or corruption (!prev)
|
||||||
# 40 ok for read_size=1024, while 50 introduces errors
|
# 40 ok for read_size=1024, while 50 introduces errors
|
||||||
# 7000 ok for read_size=1024*1024, while 8000 leads to glibc detected *** python: double free or corruption (!prev):
|
# 7000 ok for read_size=1024*1024, while 8000 leads to glibc detected *** python: double free or corruption (!prev):
|
||||||
|
|
||||||
for idx in range(NUMBER_OF_STRINGS):
|
for idx in range(NUMBER_OF_STRINGS):
|
||||||
data = gen_binary_data(idx)
|
data = gen_binary_data(idx)
|
||||||
|
|
@ -34,7 +34,7 @@ def test_exceeding_unpacker_read_size():
|
||||||
|
|
||||||
read_count = 0
|
read_count = 0
|
||||||
for idx, o in enumerate(unpacker):
|
for idx, o in enumerate(unpacker):
|
||||||
assert type(o) == bytes
|
assert isinstance(o, bytes)
|
||||||
assert o == gen_binary_data(idx)
|
assert o == gen_binary_data(idx)
|
||||||
read_count += 1
|
read_count += 1
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,117 +1,147 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding: utf-8
|
|
||||||
|
|
||||||
import io
|
import io
|
||||||
from msgpack import Unpacker, BufferFull
|
|
||||||
from msgpack import pack
|
|
||||||
from msgpack.exceptions import OutOfData
|
|
||||||
from pytest import raises
|
from pytest import raises
|
||||||
|
|
||||||
|
from msgpack import BufferFull, Unpacker, pack, packb
|
||||||
|
from msgpack.exceptions import OutOfData
|
||||||
|
|
||||||
|
|
||||||
def test_partialdata():
|
def test_partialdata():
|
||||||
unpacker = Unpacker()
|
unpacker = Unpacker()
|
||||||
unpacker.feed(b'\xa5')
|
unpacker.feed(b"\xa5")
|
||||||
with raises(StopIteration): next(iter(unpacker))
|
with raises(StopIteration):
|
||||||
unpacker.feed(b'h')
|
next(iter(unpacker))
|
||||||
with raises(StopIteration): next(iter(unpacker))
|
unpacker.feed(b"h")
|
||||||
unpacker.feed(b'a')
|
with raises(StopIteration):
|
||||||
with raises(StopIteration): next(iter(unpacker))
|
next(iter(unpacker))
|
||||||
unpacker.feed(b'l')
|
unpacker.feed(b"a")
|
||||||
with raises(StopIteration): next(iter(unpacker))
|
with raises(StopIteration):
|
||||||
unpacker.feed(b'l')
|
next(iter(unpacker))
|
||||||
with raises(StopIteration): next(iter(unpacker))
|
unpacker.feed(b"l")
|
||||||
unpacker.feed(b'o')
|
with raises(StopIteration):
|
||||||
assert next(iter(unpacker)) == b'hallo'
|
next(iter(unpacker))
|
||||||
|
unpacker.feed(b"l")
|
||||||
|
with raises(StopIteration):
|
||||||
|
next(iter(unpacker))
|
||||||
|
unpacker.feed(b"o")
|
||||||
|
assert next(iter(unpacker)) == "hallo"
|
||||||
|
|
||||||
|
|
||||||
def test_foobar():
|
def test_foobar():
|
||||||
unpacker = Unpacker(read_size=3, use_list=1)
|
unpacker = Unpacker(read_size=3, use_list=1)
|
||||||
unpacker.feed(b'foobar')
|
unpacker.feed(b"foobar")
|
||||||
assert unpacker.unpack() == ord(b'f')
|
assert unpacker.unpack() == ord(b"f")
|
||||||
assert unpacker.unpack() == ord(b'o')
|
assert unpacker.unpack() == ord(b"o")
|
||||||
assert unpacker.unpack() == ord(b'o')
|
assert unpacker.unpack() == ord(b"o")
|
||||||
assert unpacker.unpack() == ord(b'b')
|
assert unpacker.unpack() == ord(b"b")
|
||||||
assert unpacker.unpack() == ord(b'a')
|
assert unpacker.unpack() == ord(b"a")
|
||||||
assert unpacker.unpack() == ord(b'r')
|
assert unpacker.unpack() == ord(b"r")
|
||||||
with raises(OutOfData):
|
with raises(OutOfData):
|
||||||
unpacker.unpack()
|
unpacker.unpack()
|
||||||
|
|
||||||
unpacker.feed(b'foo')
|
unpacker.feed(b"foo")
|
||||||
unpacker.feed(b'bar')
|
unpacker.feed(b"bar")
|
||||||
|
|
||||||
k = 0
|
k = 0
|
||||||
for o, e in zip(unpacker, 'foobarbaz'):
|
for o, e in zip(unpacker, "foobarbaz"):
|
||||||
assert o == ord(e)
|
assert o == ord(e)
|
||||||
k += 1
|
k += 1
|
||||||
assert k == len(b'foobar')
|
assert k == len(b"foobar")
|
||||||
|
|
||||||
|
|
||||||
def test_foobar_skip():
|
def test_foobar_skip():
|
||||||
unpacker = Unpacker(read_size=3, use_list=1)
|
unpacker = Unpacker(read_size=3, use_list=1)
|
||||||
unpacker.feed(b'foobar')
|
unpacker.feed(b"foobar")
|
||||||
assert unpacker.unpack() == ord(b'f')
|
assert unpacker.unpack() == ord(b"f")
|
||||||
unpacker.skip()
|
unpacker.skip()
|
||||||
assert unpacker.unpack() == ord(b'o')
|
assert unpacker.unpack() == ord(b"o")
|
||||||
unpacker.skip()
|
unpacker.skip()
|
||||||
assert unpacker.unpack() == ord(b'a')
|
assert unpacker.unpack() == ord(b"a")
|
||||||
unpacker.skip()
|
unpacker.skip()
|
||||||
with raises(OutOfData):
|
with raises(OutOfData):
|
||||||
unpacker.unpack()
|
unpacker.unpack()
|
||||||
|
|
||||||
|
|
||||||
def test_maxbuffersize():
|
def test_maxbuffersize():
|
||||||
with raises(ValueError):
|
with raises(ValueError):
|
||||||
Unpacker(read_size=5, max_buffer_size=3)
|
Unpacker(read_size=5, max_buffer_size=3)
|
||||||
unpacker = Unpacker(read_size=3, max_buffer_size=3, use_list=1)
|
unpacker = Unpacker(read_size=3, max_buffer_size=3, use_list=1)
|
||||||
unpacker.feed(b'fo')
|
unpacker.feed(b"fo")
|
||||||
with raises(BufferFull):
|
with raises(BufferFull):
|
||||||
unpacker.feed(b'ob')
|
unpacker.feed(b"ob")
|
||||||
unpacker.feed(b'o')
|
unpacker.feed(b"o")
|
||||||
assert ord('f') == next(unpacker)
|
assert ord("f") == next(unpacker)
|
||||||
unpacker.feed(b'b')
|
unpacker.feed(b"b")
|
||||||
assert ord('o') == next(unpacker)
|
assert ord("o") == next(unpacker)
|
||||||
assert ord('o') == next(unpacker)
|
assert ord("o") == next(unpacker)
|
||||||
assert ord('b') == next(unpacker)
|
assert ord("b") == next(unpacker)
|
||||||
|
|
||||||
|
|
||||||
|
def test_maxbuffersize_file():
|
||||||
|
buff = io.BytesIO(packb(b"a" * 10) + packb([b"a" * 20] * 2))
|
||||||
|
unpacker = Unpacker(buff, read_size=1, max_buffer_size=19, max_bin_len=20)
|
||||||
|
assert unpacker.unpack() == b"a" * 10
|
||||||
|
# assert unpacker.unpack() == [b"a" * 20]*2
|
||||||
|
with raises(BufferFull):
|
||||||
|
print(unpacker.unpack())
|
||||||
|
|
||||||
|
|
||||||
def test_readbytes():
|
def test_readbytes():
|
||||||
unpacker = Unpacker(read_size=3)
|
unpacker = Unpacker(read_size=3)
|
||||||
unpacker.feed(b'foobar')
|
unpacker.feed(b"foobar")
|
||||||
assert unpacker.unpack() == ord(b'f')
|
assert unpacker.unpack() == ord(b"f")
|
||||||
assert unpacker.read_bytes(3) == b'oob'
|
assert unpacker.read_bytes(3) == b"oob"
|
||||||
assert unpacker.unpack() == ord(b'a')
|
assert unpacker.unpack() == ord(b"a")
|
||||||
assert unpacker.unpack() == ord(b'r')
|
assert unpacker.unpack() == ord(b"r")
|
||||||
|
|
||||||
# Test buffer refill
|
# Test buffer refill
|
||||||
unpacker = Unpacker(io.BytesIO(b'foobar'), read_size=3)
|
unpacker = Unpacker(io.BytesIO(b"foobar"), read_size=3)
|
||||||
assert unpacker.unpack() == ord(b'f')
|
assert unpacker.unpack() == ord(b"f")
|
||||||
assert unpacker.read_bytes(3) == b'oob'
|
assert unpacker.read_bytes(3) == b"oob"
|
||||||
assert unpacker.unpack() == ord(b'a')
|
assert unpacker.unpack() == ord(b"a")
|
||||||
assert unpacker.unpack() == ord(b'r')
|
assert unpacker.unpack() == ord(b"r")
|
||||||
|
|
||||||
|
# Issue 352
|
||||||
|
u = Unpacker()
|
||||||
|
u.feed(b"x")
|
||||||
|
assert bytes(u.read_bytes(1)) == b"x"
|
||||||
|
with raises(StopIteration):
|
||||||
|
next(u)
|
||||||
|
u.feed(b"\1")
|
||||||
|
assert next(u) == 1
|
||||||
|
|
||||||
|
|
||||||
def test_issue124():
|
def test_issue124():
|
||||||
unpacker = Unpacker()
|
unpacker = Unpacker()
|
||||||
unpacker.feed(b'\xa1?\xa1!')
|
unpacker.feed(b"\xa1?\xa1!")
|
||||||
assert tuple(unpacker) == (b'?', b'!')
|
assert tuple(unpacker) == ("?", "!")
|
||||||
assert tuple(unpacker) == ()
|
assert tuple(unpacker) == ()
|
||||||
unpacker.feed(b"\xa1?\xa1")
|
unpacker.feed(b"\xa1?\xa1")
|
||||||
assert tuple(unpacker) == (b'?',)
|
assert tuple(unpacker) == ("?",)
|
||||||
assert tuple(unpacker) == ()
|
assert tuple(unpacker) == ()
|
||||||
unpacker.feed(b"!")
|
unpacker.feed(b"!")
|
||||||
assert tuple(unpacker) == (b'!',)
|
assert tuple(unpacker) == ("!",)
|
||||||
assert tuple(unpacker) == ()
|
assert tuple(unpacker) == ()
|
||||||
|
|
||||||
|
|
||||||
def test_unpack_tell():
|
def test_unpack_tell():
|
||||||
stream = io.BytesIO()
|
stream = io.BytesIO()
|
||||||
messages = [2**i-1 for i in range(65)]
|
messages = [2**i - 1 for i in range(65)]
|
||||||
messages += [-(2**i) for i in range(1, 64)]
|
messages += [-(2**i) for i in range(1, 64)]
|
||||||
messages += [b'hello', b'hello'*1000, list(range(20)),
|
messages += [
|
||||||
{i: bytes(i)*i for i in range(10)},
|
b"hello",
|
||||||
{i: bytes(i)*i for i in range(32)}]
|
b"hello" * 1000,
|
||||||
|
list(range(20)),
|
||||||
|
{i: bytes(i) * i for i in range(10)},
|
||||||
|
{i: bytes(i) * i for i in range(32)},
|
||||||
|
]
|
||||||
offsets = []
|
offsets = []
|
||||||
for m in messages:
|
for m in messages:
|
||||||
pack(m, stream)
|
pack(m, stream)
|
||||||
offsets.append(stream.tell())
|
offsets.append(stream.tell())
|
||||||
stream.seek(0)
|
stream.seek(0)
|
||||||
unpacker = Unpacker(stream)
|
unpacker = Unpacker(stream, strict_map_key=False)
|
||||||
for m, o in zip(messages, offsets):
|
for m, o in zip(messages, offsets):
|
||||||
m2 = next(unpacker)
|
m2 = next(unpacker)
|
||||||
assert m == m2
|
assert m == m2
|
||||||
|
|
|
||||||
|
|
@ -1,34 +1,32 @@
|
||||||
# coding: utf-8
|
|
||||||
|
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from msgpack import packb, unpackb, ExtType
|
|
||||||
|
from msgpack import ExtType, packb, unpackb
|
||||||
|
|
||||||
|
|
||||||
def test_namedtuple():
|
def test_namedtuple():
|
||||||
T = namedtuple('T', "foo bar")
|
T = namedtuple("T", "foo bar")
|
||||||
|
|
||||||
def default(o):
|
def default(o):
|
||||||
if isinstance(o, T):
|
if isinstance(o, T):
|
||||||
return dict(o._asdict())
|
return dict(o._asdict())
|
||||||
raise TypeError('Unsupported type %s' % (type(o),))
|
raise TypeError(f"Unsupported type {type(o)}")
|
||||||
|
|
||||||
packed = packb(T(1, 42), strict_types=True, use_bin_type=True, default=default)
|
packed = packb(T(1, 42), strict_types=True, use_bin_type=True, default=default)
|
||||||
unpacked = unpackb(packed, raw=False)
|
unpacked = unpackb(packed, raw=False)
|
||||||
assert unpacked == {'foo': 1, 'bar': 42}
|
assert unpacked == {"foo": 1, "bar": 42}
|
||||||
|
|
||||||
|
|
||||||
def test_tuple():
|
def test_tuple():
|
||||||
t = ('one', 2, b'three', (4, ))
|
t = ("one", 2, b"three", (4,))
|
||||||
|
|
||||||
def default(o):
|
def default(o):
|
||||||
if isinstance(o, tuple):
|
if isinstance(o, tuple):
|
||||||
return {
|
return {"__type__": "tuple", "value": list(o)}
|
||||||
'__type__': 'tuple',
|
raise TypeError(f"Unsupported type {type(o)}")
|
||||||
'value': list(o),
|
|
||||||
}
|
|
||||||
raise TypeError('Unsupported type %s' % (type(o),))
|
|
||||||
|
|
||||||
def convert(o):
|
def convert(o):
|
||||||
if o.get('__type__') == 'tuple':
|
if o.get("__type__") == "tuple":
|
||||||
return tuple(o['value'])
|
return tuple(o["value"])
|
||||||
return o
|
return o
|
||||||
|
|
||||||
data = packb(t, strict_types=True, use_bin_type=True, default=default)
|
data = packb(t, strict_types=True, use_bin_type=True, default=default)
|
||||||
|
|
@ -38,15 +36,14 @@ def test_tuple():
|
||||||
|
|
||||||
|
|
||||||
def test_tuple_ext():
|
def test_tuple_ext():
|
||||||
t = ('one', 2, b'three', (4, ))
|
t = ("one", 2, b"three", (4,))
|
||||||
|
|
||||||
MSGPACK_EXT_TYPE_TUPLE = 0
|
MSGPACK_EXT_TYPE_TUPLE = 0
|
||||||
|
|
||||||
def default(o):
|
def default(o):
|
||||||
if isinstance(o, tuple):
|
if isinstance(o, tuple):
|
||||||
# Convert to list and pack
|
# Convert to list and pack
|
||||||
payload = packb(
|
payload = packb(list(o), strict_types=True, use_bin_type=True, default=default)
|
||||||
list(o), strict_types=True, use_bin_type=True, default=default)
|
|
||||||
return ExtType(MSGPACK_EXT_TYPE_TUPLE, payload)
|
return ExtType(MSGPACK_EXT_TYPE_TUPLE, payload)
|
||||||
raise TypeError(repr(o))
|
raise TypeError(repr(o))
|
||||||
|
|
||||||
|
|
@ -54,7 +51,7 @@ def test_tuple_ext():
|
||||||
if code == MSGPACK_EXT_TYPE_TUPLE:
|
if code == MSGPACK_EXT_TYPE_TUPLE:
|
||||||
# Unpack and convert to tuple
|
# Unpack and convert to tuple
|
||||||
return tuple(unpackb(payload, raw=False, ext_hook=convert))
|
return tuple(unpackb(payload, raw=False, ext_hook=convert))
|
||||||
raise ValueError('Unknown Ext code {}'.format(code))
|
raise ValueError(f"Unknown Ext code {code}")
|
||||||
|
|
||||||
data = packb(t, strict_types=True, use_bin_type=True, default=default)
|
data = packb(t, strict_types=True, use_bin_type=True, default=default)
|
||||||
expected = unpackb(data, raw=False, ext_hook=convert)
|
expected = unpackb(data, raw=False, ext_hook=convert)
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,24 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding: utf-8
|
|
||||||
|
|
||||||
from msgpack import packb, unpackb
|
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from msgpack import packb
|
||||||
|
|
||||||
|
|
||||||
class MyList(list):
|
class MyList(list):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class MyDict(dict):
|
class MyDict(dict):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class MyTuple(tuple):
|
class MyTuple(tuple):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
MyNamedTuple = namedtuple('MyNamedTuple', 'x y')
|
|
||||||
|
MyNamedTuple = namedtuple("MyNamedTuple", "x y")
|
||||||
|
|
||||||
|
|
||||||
def test_types():
|
def test_types():
|
||||||
assert packb(MyDict()) == packb(dict())
|
assert packb(MyDict()) == packb(dict())
|
||||||
|
|
|
||||||
171
test/test_timestamp.py
Normal file
171
test/test_timestamp.py
Normal file
|
|
@ -0,0 +1,171 @@
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import msgpack
|
||||||
|
from msgpack.ext import Timestamp
|
||||||
|
|
||||||
|
|
||||||
|
def test_timestamp():
|
||||||
|
# timestamp32
|
||||||
|
ts = Timestamp(2**32 - 1)
|
||||||
|
assert ts.to_bytes() == b"\xff\xff\xff\xff"
|
||||||
|
packed = msgpack.packb(ts)
|
||||||
|
assert packed == b"\xd6\xff" + ts.to_bytes()
|
||||||
|
unpacked = msgpack.unpackb(packed)
|
||||||
|
assert ts == unpacked
|
||||||
|
assert ts.seconds == 2**32 - 1 and ts.nanoseconds == 0
|
||||||
|
|
||||||
|
# timestamp64
|
||||||
|
ts = Timestamp(2**34 - 1, 999999999)
|
||||||
|
assert ts.to_bytes() == b"\xee\x6b\x27\xff\xff\xff\xff\xff"
|
||||||
|
packed = msgpack.packb(ts)
|
||||||
|
assert packed == b"\xd7\xff" + ts.to_bytes()
|
||||||
|
unpacked = msgpack.unpackb(packed)
|
||||||
|
assert ts == unpacked
|
||||||
|
assert ts.seconds == 2**34 - 1 and ts.nanoseconds == 999999999
|
||||||
|
|
||||||
|
# timestamp96
|
||||||
|
ts = Timestamp(2**63 - 1, 999999999)
|
||||||
|
assert ts.to_bytes() == b"\x3b\x9a\xc9\xff\x7f\xff\xff\xff\xff\xff\xff\xff"
|
||||||
|
packed = msgpack.packb(ts)
|
||||||
|
assert packed == b"\xc7\x0c\xff" + ts.to_bytes()
|
||||||
|
unpacked = msgpack.unpackb(packed)
|
||||||
|
assert ts == unpacked
|
||||||
|
assert ts.seconds == 2**63 - 1 and ts.nanoseconds == 999999999
|
||||||
|
|
||||||
|
# negative fractional
|
||||||
|
ts = Timestamp.from_unix(-2.3) # s: -3, ns: 700000000
|
||||||
|
assert ts.seconds == -3 and ts.nanoseconds == 700000000
|
||||||
|
assert ts.to_bytes() == b"\x29\xb9\x27\x00\xff\xff\xff\xff\xff\xff\xff\xfd"
|
||||||
|
packed = msgpack.packb(ts)
|
||||||
|
assert packed == b"\xc7\x0c\xff" + ts.to_bytes()
|
||||||
|
unpacked = msgpack.unpackb(packed)
|
||||||
|
assert ts == unpacked
|
||||||
|
|
||||||
|
|
||||||
|
def test_unpack_timestamp():
|
||||||
|
# timestamp 32
|
||||||
|
assert msgpack.unpackb(b"\xd6\xff\x00\x00\x00\x00") == Timestamp(0)
|
||||||
|
|
||||||
|
# timestamp 64
|
||||||
|
assert msgpack.unpackb(b"\xd7\xff" + b"\x00" * 8) == Timestamp(0)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
msgpack.unpackb(b"\xd7\xff" + b"\xff" * 8)
|
||||||
|
|
||||||
|
# timestamp 96
|
||||||
|
assert msgpack.unpackb(b"\xc7\x0c\xff" + b"\x00" * 12) == Timestamp(0)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
msgpack.unpackb(b"\xc7\x0c\xff" + b"\xff" * 12) == Timestamp(0)
|
||||||
|
|
||||||
|
# Undefined
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
msgpack.unpackb(b"\xd4\xff\x00") # fixext 1
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
msgpack.unpackb(b"\xd5\xff\x00\x00") # fixext 2
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
msgpack.unpackb(b"\xc7\x00\xff") # ext8 (len=0)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
msgpack.unpackb(b"\xc7\x03\xff\0\0\0") # ext8 (len=3)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
msgpack.unpackb(b"\xc7\x05\xff\0\0\0\0\0") # ext8 (len=5)
|
||||||
|
|
||||||
|
|
||||||
|
def test_timestamp_from():
|
||||||
|
t = Timestamp(42, 14000)
|
||||||
|
assert Timestamp.from_unix(42.000014) == t
|
||||||
|
assert Timestamp.from_unix_nano(42000014000) == t
|
||||||
|
|
||||||
|
|
||||||
|
def test_timestamp_to():
|
||||||
|
t = Timestamp(42, 14000)
|
||||||
|
assert t.to_unix() == 42.000014
|
||||||
|
assert t.to_unix_nano() == 42000014000
|
||||||
|
|
||||||
|
|
||||||
|
def test_timestamp_datetime():
|
||||||
|
t = Timestamp(42, 14)
|
||||||
|
utc = datetime.timezone.utc
|
||||||
|
assert t.to_datetime() == datetime.datetime(1970, 1, 1, 0, 0, 42, 0, tzinfo=utc)
|
||||||
|
|
||||||
|
ts = datetime.datetime(2024, 4, 16, 8, 43, 9, 420317, tzinfo=utc)
|
||||||
|
ts2 = datetime.datetime(2024, 4, 16, 8, 43, 9, 420318, tzinfo=utc)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
Timestamp.from_datetime(ts2).nanoseconds - Timestamp.from_datetime(ts).nanoseconds == 1000
|
||||||
|
)
|
||||||
|
|
||||||
|
ts3 = datetime.datetime(2024, 4, 16, 8, 43, 9, 4256)
|
||||||
|
ts4 = datetime.datetime(2024, 4, 16, 8, 43, 9, 4257)
|
||||||
|
assert (
|
||||||
|
Timestamp.from_datetime(ts4).nanoseconds - Timestamp.from_datetime(ts3).nanoseconds == 1000
|
||||||
|
)
|
||||||
|
|
||||||
|
assert Timestamp.from_datetime(ts).to_datetime() == ts
|
||||||
|
|
||||||
|
|
||||||
|
def test_unpack_datetime():
|
||||||
|
t = Timestamp(42, 14)
|
||||||
|
utc = datetime.timezone.utc
|
||||||
|
packed = msgpack.packb(t)
|
||||||
|
unpacked = msgpack.unpackb(packed, timestamp=3)
|
||||||
|
assert unpacked == datetime.datetime(1970, 1, 1, 0, 0, 42, 0, tzinfo=utc)
|
||||||
|
|
||||||
|
|
||||||
|
def test_pack_unpack_before_epoch():
|
||||||
|
utc = datetime.timezone.utc
|
||||||
|
t_in = datetime.datetime(1960, 1, 1, tzinfo=utc)
|
||||||
|
packed = msgpack.packb(t_in, datetime=True)
|
||||||
|
unpacked = msgpack.unpackb(packed, timestamp=3)
|
||||||
|
assert unpacked == t_in
|
||||||
|
|
||||||
|
|
||||||
|
def test_pack_datetime():
|
||||||
|
t = Timestamp(42, 14000)
|
||||||
|
dt = t.to_datetime()
|
||||||
|
utc = datetime.timezone.utc
|
||||||
|
assert dt == datetime.datetime(1970, 1, 1, 0, 0, 42, 14, tzinfo=utc)
|
||||||
|
|
||||||
|
packed = msgpack.packb(dt, datetime=True)
|
||||||
|
packed2 = msgpack.packb(t)
|
||||||
|
assert packed == packed2
|
||||||
|
|
||||||
|
unpacked = msgpack.unpackb(packed)
|
||||||
|
print(packed, unpacked)
|
||||||
|
assert unpacked == t
|
||||||
|
|
||||||
|
unpacked = msgpack.unpackb(packed, timestamp=3)
|
||||||
|
assert unpacked == dt
|
||||||
|
|
||||||
|
x = []
|
||||||
|
packed = msgpack.packb(dt, datetime=False, default=x.append)
|
||||||
|
assert x
|
||||||
|
assert x[0] == dt
|
||||||
|
assert msgpack.unpackb(packed) is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_issue451():
|
||||||
|
# https://github.com/msgpack/msgpack-python/issues/451
|
||||||
|
utc = datetime.timezone.utc
|
||||||
|
dt = datetime.datetime(2100, 1, 1, 1, 1, tzinfo=utc)
|
||||||
|
packed = msgpack.packb(dt, datetime=True)
|
||||||
|
assert packed == b"\xd6\xff\xf4\x86eL"
|
||||||
|
|
||||||
|
unpacked = msgpack.unpackb(packed, timestamp=3)
|
||||||
|
assert dt == unpacked
|
||||||
|
|
||||||
|
|
||||||
|
def test_pack_datetime_without_tzinfo():
|
||||||
|
dt = datetime.datetime(1970, 1, 1, 0, 0, 42, 14)
|
||||||
|
with pytest.raises(ValueError, match="where tzinfo=None"):
|
||||||
|
packed = msgpack.packb(dt, datetime=True)
|
||||||
|
|
||||||
|
dt = datetime.datetime(1970, 1, 1, 0, 0, 42, 14)
|
||||||
|
packed = msgpack.packb(dt, datetime=True, default=lambda x: None)
|
||||||
|
assert packed == msgpack.packb(None)
|
||||||
|
|
||||||
|
utc = datetime.timezone.utc
|
||||||
|
dt = datetime.datetime(1970, 1, 1, 0, 0, 42, 14, tzinfo=utc)
|
||||||
|
packed = msgpack.packb(dt, datetime=True)
|
||||||
|
unpacked = msgpack.unpackb(packed, timestamp=3)
|
||||||
|
assert unpacked == dt
|
||||||
|
|
@ -1,11 +1,13 @@
|
||||||
from io import BytesIO
|
|
||||||
import sys
|
import sys
|
||||||
from msgpack import Unpacker, packb, OutOfData, ExtType
|
from io import BytesIO
|
||||||
from pytest import raises, mark
|
|
||||||
|
from pytest import mark, raises
|
||||||
|
|
||||||
|
from msgpack import ExtType, OutOfData, Unpacker, packb
|
||||||
|
|
||||||
|
|
||||||
def test_unpack_array_header_from_file():
|
def test_unpack_array_header_from_file():
|
||||||
f = BytesIO(packb([1,2,3,4]))
|
f = BytesIO(packb([1, 2, 3, 4]))
|
||||||
unpacker = Unpacker(f)
|
unpacker = Unpacker(f)
|
||||||
assert unpacker.read_array_header() == 4
|
assert unpacker.read_array_header() == 4
|
||||||
assert unpacker.unpack() == 1
|
assert unpacker.unpack() == 1
|
||||||
|
|
@ -16,8 +18,10 @@ def test_unpack_array_header_from_file():
|
||||||
unpacker.unpack()
|
unpacker.unpack()
|
||||||
|
|
||||||
|
|
||||||
@mark.skipif("not hasattr(sys, 'getrefcount') == True",
|
@mark.skipif(
|
||||||
reason='sys.getrefcount() is needed to pass this test')
|
"not hasattr(sys, 'getrefcount') == True",
|
||||||
|
reason="sys.getrefcount() is needed to pass this test",
|
||||||
|
)
|
||||||
def test_unpacker_hook_refcnt():
|
def test_unpacker_hook_refcnt():
|
||||||
result = []
|
result = []
|
||||||
|
|
||||||
|
|
@ -43,12 +47,9 @@ def test_unpacker_hook_refcnt():
|
||||||
|
|
||||||
|
|
||||||
def test_unpacker_ext_hook():
|
def test_unpacker_ext_hook():
|
||||||
|
|
||||||
class MyUnpacker(Unpacker):
|
class MyUnpacker(Unpacker):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(MyUnpacker, self).__init__(
|
super().__init__(ext_hook=self._hook, raw=False)
|
||||||
ext_hook=self._hook, raw=False)
|
|
||||||
|
|
||||||
def _hook(self, code, data):
|
def _hook(self, code, data):
|
||||||
if code == 1:
|
if code == 1:
|
||||||
|
|
@ -57,15 +58,32 @@ def test_unpacker_ext_hook():
|
||||||
return ExtType(code, data)
|
return ExtType(code, data)
|
||||||
|
|
||||||
unpacker = MyUnpacker()
|
unpacker = MyUnpacker()
|
||||||
unpacker.feed(packb({'a': 1}))
|
unpacker.feed(packb({"a": 1}))
|
||||||
assert unpacker.unpack() == {'a': 1}
|
assert unpacker.unpack() == {"a": 1}
|
||||||
unpacker.feed(packb({'a': ExtType(1, b'123')}))
|
unpacker.feed(packb({"a": ExtType(1, b"123")}))
|
||||||
assert unpacker.unpack() == {'a': 123}
|
assert unpacker.unpack() == {"a": 123}
|
||||||
unpacker.feed(packb({'a': ExtType(2, b'321')}))
|
unpacker.feed(packb({"a": ExtType(2, b"321")}))
|
||||||
assert unpacker.unpack() == {'a': ExtType(2, b'321')}
|
assert unpacker.unpack() == {"a": ExtType(2, b"321")}
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
def test_unpacker_tell():
|
||||||
test_unpack_array_header_from_file()
|
objects = 1, 2, "abc", "def", "ghi"
|
||||||
test_unpacker_hook_refcnt()
|
packed = b"\x01\x02\xa3abc\xa3def\xa3ghi"
|
||||||
test_unpacker_ext_hook()
|
positions = 1, 2, 6, 10, 14
|
||||||
|
unpacker = Unpacker(BytesIO(packed))
|
||||||
|
for obj, unp, pos in zip(objects, unpacker, positions):
|
||||||
|
assert obj == unp
|
||||||
|
assert pos == unpacker.tell()
|
||||||
|
|
||||||
|
|
||||||
|
def test_unpacker_tell_read_bytes():
|
||||||
|
objects = 1, "abc", "ghi"
|
||||||
|
packed = b"\x01\x02\xa3abc\xa3def\xa3ghi"
|
||||||
|
raw_data = b"\x02", b"\xa3def", b""
|
||||||
|
lenghts = 1, 4, 999
|
||||||
|
positions = 1, 6, 14
|
||||||
|
unpacker = Unpacker(BytesIO(packed))
|
||||||
|
for obj, unp, pos, n, raw in zip(objects, unpacker, positions, lenghts, raw_data):
|
||||||
|
assert obj == unp
|
||||||
|
assert pos == unpacker.tell()
|
||||||
|
assert unpacker.read_bytes(n) == raw
|
||||||
|
|
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
"""Tests for cases where the user seeks to obtain packed msgpack objects"""
|
|
||||||
|
|
||||||
import io
|
|
||||||
from msgpack import Unpacker, packb
|
|
||||||
|
|
||||||
|
|
||||||
def test_write_bytes():
|
|
||||||
unpacker = Unpacker()
|
|
||||||
unpacker.feed(b'abc')
|
|
||||||
f = io.BytesIO()
|
|
||||||
assert unpacker.unpack(f.write) == ord('a')
|
|
||||||
assert f.getvalue() == b'a'
|
|
||||||
f = io.BytesIO()
|
|
||||||
assert unpacker.skip(f.write) is None
|
|
||||||
assert f.getvalue() == b'b'
|
|
||||||
f = io.BytesIO()
|
|
||||||
assert unpacker.skip() is None
|
|
||||||
assert f.getvalue() == b''
|
|
||||||
|
|
||||||
|
|
||||||
def test_write_bytes_multi_buffer():
|
|
||||||
long_val = (5) * 100
|
|
||||||
expected = packb(long_val)
|
|
||||||
unpacker = Unpacker(io.BytesIO(expected), read_size=3, max_buffer_size=3)
|
|
||||||
|
|
||||||
f = io.BytesIO()
|
|
||||||
unpacked = unpacker.unpack(f.write)
|
|
||||||
assert unpacked == long_val
|
|
||||||
assert f.getvalue() == expected
|
|
||||||
38
tox.ini
38
tox.ini
|
|
@ -1,38 +0,0 @@
|
||||||
[tox]
|
|
||||||
envlist = {py27,py35,py36}-{c,pure},{pypy,pypy3}-pure,py27-x86,py34-x86
|
|
||||||
|
|
||||||
[variants:pure]
|
|
||||||
setenv=
|
|
||||||
MSGPACK_PUREPYTHON=x
|
|
||||||
|
|
||||||
[testenv]
|
|
||||||
deps=
|
|
||||||
pytest
|
|
||||||
|
|
||||||
changedir=test
|
|
||||||
commands=
|
|
||||||
c,x86: python -c 'from msgpack import _packer, _unpacker'
|
|
||||||
c,x86: py.test
|
|
||||||
pure: py.test
|
|
||||||
|
|
||||||
[testenv:py27-x86]
|
|
||||||
basepython=python2.7-x86
|
|
||||||
deps=
|
|
||||||
pytest
|
|
||||||
|
|
||||||
changedir=test
|
|
||||||
commands=
|
|
||||||
python -c 'import sys; print(hex(sys.maxsize))'
|
|
||||||
python -c 'from msgpack import _packer, _unpacker'
|
|
||||||
py.test
|
|
||||||
|
|
||||||
[testenv:py34-x86]
|
|
||||||
basepython=python3.4-x86
|
|
||||||
deps=
|
|
||||||
pytest
|
|
||||||
|
|
||||||
changedir=test
|
|
||||||
commands=
|
|
||||||
python -c 'import sys; print(hex(sys.maxsize))'
|
|
||||||
python -c 'from msgpack import _packer, _unpacker'
|
|
||||||
py.test
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue