Merge branch 'main' into windows-socket-sendfile

This commit is contained in:
AN Long 2024-04-24 20:51:45 +08:00 committed by GitHub
commit 8bd91bb15d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
1889 changed files with 127947 additions and 60594 deletions

View file

@ -1,29 +0,0 @@
# gh-91960: Job disabled since Python is out of free credit (September 2023):
# https://discuss.python.org/t/freebsd-gets-a-new-cirrus-ci-github-action-job-and-a-new-buildbot/33122/26
freebsd_task:
freebsd_instance:
matrix:
- image: freebsd-13-2-release-amd64
# Turn off TCP and UDP blackhole. It is not enabled by default in FreeBSD,
# but it is in the FreeBSD GCE images as used by Cirrus-CI. It causes even
# local local connections to fail with ETIMEDOUT instead of ECONNREFUSED.
# For more information see https://reviews.freebsd.org/D41751 and
# https://github.com/cirruslabs/cirrus-ci-docs/issues/483.
sysctl_script:
- sysctl net.inet.tcp.blackhole=0
- sysctl net.inet.udp.blackhole=0
configure_script:
- mkdir build
- cd build
- ../configure --with-pydebug
build_script:
- cd build
- make -j$(sysctl -n hw.ncpu)
pythoninfo_script:
- cd build
- make pythoninfo
test_script:
- cd build
# dtrace fails to build on FreeBSD - see gh-73263
- make buildbottest TESTOPTS="-j0 -x test_dtrace --timeout=600"

View file

@ -1,12 +1,12 @@
FROM docker.io/library/fedora:37 FROM docker.io/library/fedora:40
ENV CC=clang ENV CC=clang
ENV WASI_SDK_VERSION=20 ENV WASI_SDK_VERSION=21
ENV WASI_SDK_PATH=/opt/wasi-sdk ENV WASI_SDK_PATH=/opt/wasi-sdk
ENV WASMTIME_HOME=/opt/wasmtime ENV WASMTIME_HOME=/opt/wasmtime
ENV WASMTIME_VERSION=14.0.4 ENV WASMTIME_VERSION=18.0.3
ENV WASMTIME_CPU_ARCH=x86_64 ENV WASMTIME_CPU_ARCH=x86_64
RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \ RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \

View file

@ -1,6 +1,6 @@
root = true root = true
[*.{py,c,cpp,h,rst,md,yml}] [*.{py,c,cpp,h,js,rst,md,yml}]
trim_trailing_whitespace = true trim_trailing_whitespace = true
insert_final_newline = true insert_final_newline = true
indent_style = space indent_style = space
@ -11,5 +11,5 @@ indent_size = 4
[*.rst] [*.rst]
indent_size = 3 indent_size = 3
[*.yml] [*.{js,yml}]
indent_size = 2 indent_size = 2

4
.gitattributes vendored
View file

@ -77,11 +77,14 @@ Include/internal/pycore_opcode.h generated
Include/internal/pycore_opcode_metadata.h generated Include/internal/pycore_opcode_metadata.h generated
Include/internal/pycore_*_generated.h generated Include/internal/pycore_*_generated.h generated
Include/internal/pycore_uop_ids.h generated Include/internal/pycore_uop_ids.h generated
Include/internal/pycore_uop_metadata.h generated
Include/opcode.h generated Include/opcode.h generated
Include/opcode_ids.h generated Include/opcode_ids.h generated
Include/token.h generated Include/token.h generated
Lib/_opcode_metadata.py generated Lib/_opcode_metadata.py generated
Lib/keyword.py generated Lib/keyword.py generated
Lib/test/certdata/*.pem generated
Lib/test/certdata/*.0 generated
Lib/test/levenshtein_examples.json generated Lib/test/levenshtein_examples.json generated
Lib/test/test_stable_abi_ctypes.py generated Lib/test/test_stable_abi_ctypes.py generated
Lib/token.py generated Lib/token.py generated
@ -94,6 +97,7 @@ Programs/test_frozenmain.h generated
Python/Python-ast.c generated Python/Python-ast.c generated
Python/executor_cases.c.h generated Python/executor_cases.c.h generated
Python/generated_cases.c.h generated Python/generated_cases.c.h generated
Python/optimizer_cases.c.h generated
Python/opcode_targets.h generated Python/opcode_targets.h generated
Python/stdlib_module_names.h generated Python/stdlib_module_names.h generated
Tools/peg_generator/pegen/grammar_parser.py generated Tools/peg_generator/pegen/grammar_parser.py generated

74
.github/CODEOWNERS vendored
View file

@ -21,6 +21,7 @@ configure* @erlend-aasland @corona10
**/*context* @1st1 **/*context* @1st1
**/*genobject* @markshannon **/*genobject* @markshannon
**/*hamt* @1st1 **/*hamt* @1st1
**/*jit* @brandtbucher
Objects/set* @rhettinger Objects/set* @rhettinger
Objects/dict* @methane @markshannon Objects/dict* @methane @markshannon
Objects/typevarobject.c @JelleZijlstra Objects/typevarobject.c @JelleZijlstra
@ -36,12 +37,40 @@ Python/flowgraph.c @markshannon @iritkatriel
Python/ast_opt.c @isidentical Python/ast_opt.c @isidentical
Python/bytecodes.c @markshannon @gvanrossum Python/bytecodes.c @markshannon @gvanrossum
Python/optimizer*.c @markshannon @gvanrossum Python/optimizer*.c @markshannon @gvanrossum
Python/optimizer_analysis.c @Fidget-Spinner
Python/optimizer_bytecodes.c @Fidget-Spinner
Lib/test/test_patma.py @brandtbucher Lib/test/test_patma.py @brandtbucher
Lib/test/test_peepholer.py @brandtbucher
Lib/test/test_type_*.py @JelleZijlstra Lib/test/test_type_*.py @JelleZijlstra
Lib/test/test_capi/test_misc.py @markshannon @gvanrossum Lib/test/test_capi/test_misc.py @markshannon @gvanrossum
Tools/c-analyzer/ @ericsnowcurrently Tools/c-analyzer/ @ericsnowcurrently
# dbm
**/*dbm* @corona10 @erlend-aasland @serhiy-storchaka
# runtime state/lifecycle
**/*pylifecycle* @ericsnowcurrently
**/*pystate* @ericsnowcurrently
**/*preconfig* @ericsnowcurrently
**/*initconfig* @ericsnowcurrently
**/*pathconfig* @ericsnowcurrently
**/*sysmodule* @ericsnowcurrently
**/*bltinmodule* @ericsnowcurrently
**/*gil* @ericsnowcurrently
Include/internal/pycore_runtime.h @ericsnowcurrently
Include/internal/pycore_interp.h @ericsnowcurrently
Include/internal/pycore_tstate.h @ericsnowcurrently
Include/internal/pycore_*_state.h @ericsnowcurrently
Include/internal/pycore_*_init.h @ericsnowcurrently
Include/internal/pycore_atexit.h @ericsnowcurrently
Include/internal/pycore_freelist.h @ericsnowcurrently
Include/internal/pycore_global_objects.h @ericsnowcurrently
Include/internal/pycore_obmalloc.h @ericsnowcurrently
Include/internal/pycore_pymem.h @ericsnowcurrently
Modules/main.c @ericsnowcurrently
Programs/_bootstrap_python.c @ericsnowcurrently
Programs/python.c @ericsnowcurrently
Tools/build/generate_global_objects.py @ericsnowcurrently
# Exceptions # Exceptions
Lib/traceback.py @iritkatriel Lib/traceback.py @iritkatriel
Lib/test/test_except*.py @iritkatriel Lib/test/test_except*.py @iritkatriel
@ -50,13 +79,13 @@ Objects/exceptions.c @iritkatriel
Python/traceback.c @iritkatriel Python/traceback.c @iritkatriel
# Hashing # Hashing
**/*hashlib* @tiran **/*hashlib* @gpshead @tiran
**/*pyhash* @tiran **/*pyhash* @gpshead @tiran
**/*sha* @tiran **/sha* @gpshead @tiran
**/*md5* @tiran Modules/md5* @gpshead @tiran
**/*blake* @tiran **/*blake* @gpshead @tiran
/Modules/_blake2/** @tiran Modules/_blake2/** @gpshead @tiran
/Modules/_sha3/** @tiran Modules/_hacl/** @gpshead
# logging # logging
**/*logging* @vsajip **/*logging* @vsajip
@ -76,8 +105,21 @@ Python/traceback.c @iritkatriel
# Import (including importlib). # Import (including importlib).
**/*import* @brettcannon @ericsnowcurrently @ncoghlan @warsaw **/*import* @brettcannon @ericsnowcurrently @ncoghlan @warsaw
/Python/import.c @kumaraditya303 /Python/import.c @kumaraditya303
Python/dynload_*.c @ericsnowcurrently
**/*freeze* @ericsnowcurrently
**/*frozen* @ericsnowcurrently
**/*modsupport* @ericsnowcurrently
**/*modulefinder* @ericsnowcurrently
**/*moduleobject* @ericsnowcurrently
**/*multiphase* @ericsnowcurrently
**/*pkgutil* @ericsnowcurrently
**/*pythonrun* @ericsnowcurrently
**/*runpy* @ericsnowcurrently
**/*singlephase* @ericsnowcurrently
Lib/test/test_module/ @ericsnowcurrently
Doc/c-api/module.rst @ericsnowcurrently
**/*importlib/resources/* @jaraco @warsaw @FFY00 **/*importlib/resources/* @jaraco @warsaw @FFY00
**/importlib/metadata/* @jaraco @warsaw **/*importlib/metadata/* @jaraco @warsaw
# Dates and times # Dates and times
**/*datetime* @pganssle @abalkin **/*datetime* @pganssle @abalkin
@ -120,6 +162,9 @@ Lib/ast.py @isidentical
/Lib/unittest/mock.py @cjw296 /Lib/unittest/mock.py @cjw296
/Lib/test/test_unittest/testmock/* @cjw296 /Lib/test/test_unittest/testmock/* @cjw296
# multiprocessing
**/*multiprocessing* @gpshead
# SQLite 3 # SQLite 3
**/*sqlite* @berkerpeksag @erlend-aasland **/*sqlite* @berkerpeksag @erlend-aasland
@ -187,11 +232,13 @@ Doc/c-api/stable.rst @encukou
**/*zipfile/_path/* @jaraco **/*zipfile/_path/* @jaraco
# Argument Clinic # Argument Clinic
/Tools/clinic/** @erlend-aasland @AlexWaygood /Tools/clinic/** @erlend-aasland
/Lib/test/test_clinic.py @erlend-aasland @AlexWaygood /Lib/test/test_clinic.py @erlend-aasland
Doc/howto/clinic.rst @erlend-aasland Doc/howto/clinic.rst @erlend-aasland
# Subinterpreters # Subinterpreters
**/*interpreteridobject.* @ericsnowcurrently
**/*crossinterp* @ericsnowcurrently
Lib/test/support/interpreters/ @ericsnowcurrently Lib/test/support/interpreters/ @ericsnowcurrently
Modules/_xx*interp*module.c @ericsnowcurrently Modules/_xx*interp*module.c @ericsnowcurrently
Lib/test/test_interpreters/ @ericsnowcurrently Lib/test/test_interpreters/ @ericsnowcurrently
@ -200,5 +247,10 @@ Lib/test/test_interpreters/ @ericsnowcurrently
/Tools/wasm/ @brettcannon /Tools/wasm/ @brettcannon
# SBOM # SBOM
/Misc/externals.spdx.json @sethmlarson
/Misc/sbom.spdx.json @sethmlarson /Misc/sbom.spdx.json @sethmlarson
/Tools/build/generate_sbom.py @sethmlarson /Tools/build/generate_sbom.py @sethmlarson
# Config Parser
Lib/configparser.py @jaraco
Lib/test/test_configparser.py @jaraco

View file

@ -97,7 +97,7 @@ jobs:
- name: Get a list of the changed documentation-related files - name: Get a list of the changed documentation-related files
if: github.event_name == 'pull_request' if: github.event_name == 'pull_request'
id: changed-docs-files id: changed-docs-files
uses: Ana06/get-changed-files@v2.2.0 uses: Ana06/get-changed-files@v2.3.0
with: with:
filter: | filter: |
Doc/** Doc/**
@ -131,17 +131,22 @@ jobs:
- uses: actions/setup-python@v5 - uses: actions/setup-python@v5
with: with:
python-version: '3.x' python-version: '3.x'
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache - name: Restore config.cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: config.cache path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ needs.check_source.outputs.config_hash }}-${{ env.pythonLocation }} # Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }}-${{ env.pythonLocation }}
- name: Install Dependencies - name: Install Dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Add ccache to PATH - name: Add ccache to PATH
run: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV run: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: Configure ccache action - name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2 uses: hendrikmuhs/ccache-action@v1.2
with:
save: false
- name: Check Autoconf and aclocal versions - name: Check Autoconf and aclocal versions
run: | run: |
grep "Generated by GNU Autoconf 2.71" configure grep "Generated by GNU Autoconf 2.71" configure
@ -158,7 +163,7 @@ jobs:
- name: Build CPython - name: Build CPython
run: | run: |
make -j4 regen-all make -j4 regen-all
make regen-stdlib-module-names make regen-stdlib-module-names regen-sbom
- name: Check for changes - name: Check for changes
run: | run: |
git add -u git add -u
@ -202,6 +207,8 @@ jobs:
uses: ./.github/workflows/reusable-macos.yml uses: ./.github/workflows/reusable-macos.yml
with: with:
config_hash: ${{ needs.check_source.outputs.config_hash }} config_hash: ${{ needs.check_source.outputs.config_hash }}
# macos-14 is M1, macos-13 is Intel
os-matrix: '["macos-14", "macos-13"]'
build_macos_free_threading: build_macos_free_threading:
name: 'macOS (free-threading)' name: 'macOS (free-threading)'
@ -211,6 +218,8 @@ jobs:
with: with:
config_hash: ${{ needs.check_source.outputs.config_hash }} config_hash: ${{ needs.check_source.outputs.config_hash }}
free-threading: true free-threading: true
# macos-14 is M1
os-matrix: '["macos-14"]'
build_ubuntu: build_ubuntu:
name: 'Ubuntu' name: 'Ubuntu'
@ -248,7 +257,7 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
openssl_ver: [1.1.1w, 3.0.11, 3.1.3] openssl_ver: [1.1.1w, 3.0.13, 3.1.5, 3.2.1]
env: env:
OPENSSL_VER: ${{ matrix.openssl_ver }} OPENSSL_VER: ${{ matrix.openssl_ver }}
MULTISSL_DIR: ${{ github.workspace }}/multissl MULTISSL_DIR: ${{ github.workspace }}/multissl
@ -256,11 +265,13 @@ jobs:
LD_LIBRARY_PATH: ${{ github.workspace }}/multissl/openssl/${{ matrix.openssl_ver }}/lib LD_LIBRARY_PATH: ${{ github.workspace }}/multissl/openssl/${{ matrix.openssl_ver }}/lib
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache - name: Restore config.cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: config.cache path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ needs.check_source.outputs.config_hash }} key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }}
- name: Register gcc problem matcher - name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json" run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install Dependencies - name: Install Dependencies
@ -272,7 +283,7 @@ jobs:
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
- name: 'Restore OpenSSL build' - name: 'Restore OpenSSL build'
id: cache-openssl id: cache-openssl
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: ./multissl/openssl/${{ env.OPENSSL_VER }} path: ./multissl/openssl/${{ env.OPENSSL_VER }}
key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }} key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
@ -284,6 +295,8 @@ jobs:
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: Configure ccache action - name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2 uses: hendrikmuhs/ccache-action@v1.2
with:
save: false
- name: Configure CPython - name: Configure CPython
run: ./configure --config-cache --with-pydebug --with-openssl=$OPENSSL_DIR run: ./configure --config-cache --with-pydebug --with-openssl=$OPENSSL_DIR
- name: Build CPython - name: Build CPython
@ -293,6 +306,14 @@ jobs:
- name: SSL tests - name: SSL tests
run: ./python Lib/test/ssltests.py run: ./python Lib/test/ssltests.py
build_wasi:
name: 'WASI'
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
uses: ./.github/workflows/reusable-wasi.yml
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
test_hypothesis: test_hypothesis:
name: "Hypothesis tests on Ubuntu" name: "Hypothesis tests on Ubuntu"
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
@ -300,7 +321,7 @@ jobs:
needs: check_source needs: check_source
if: needs.check_source.outputs.run_tests == 'true' && needs.check_source.outputs.run_hypothesis == 'true' if: needs.check_source.outputs.run_tests == 'true' && needs.check_source.outputs.run_hypothesis == 'true'
env: env:
OPENSSL_VER: 3.0.11 OPENSSL_VER: 3.0.13
PYTHONSTRICTEXTENSIONBUILD: 1 PYTHONSTRICTEXTENSIONBUILD: 1
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -315,7 +336,7 @@ jobs:
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
- name: 'Restore OpenSSL build' - name: 'Restore OpenSSL build'
id: cache-openssl id: cache-openssl
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: ./multissl/openssl/${{ env.OPENSSL_VER }} path: ./multissl/openssl/${{ env.OPENSSL_VER }}
key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }} key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
@ -327,6 +348,8 @@ jobs:
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: Configure ccache action - name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2 uses: hendrikmuhs/ccache-action@v1.2
with:
save: false
- name: Setup directory envs for out-of-tree builds - name: Setup directory envs for out-of-tree builds
run: | run: |
echo "CPYTHON_RO_SRCDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-ro-srcdir)" >> $GITHUB_ENV echo "CPYTHON_RO_SRCDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-ro-srcdir)" >> $GITHUB_ENV
@ -335,11 +358,13 @@ jobs:
run: mkdir -p $CPYTHON_RO_SRCDIR $CPYTHON_BUILDDIR run: mkdir -p $CPYTHON_RO_SRCDIR $CPYTHON_BUILDDIR
- name: Bind mount sources read-only - name: Bind mount sources read-only
run: sudo mount --bind -o ro $GITHUB_WORKSPACE $CPYTHON_RO_SRCDIR run: sudo mount --bind -o ro $GITHUB_WORKSPACE $CPYTHON_RO_SRCDIR
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache - name: Restore config.cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: ${{ env.CPYTHON_BUILDDIR }}/config.cache path: ${{ env.CPYTHON_BUILDDIR }}/config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ needs.check_source.outputs.config_hash }} key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }}
- name: Configure CPython out-of-tree - name: Configure CPython out-of-tree
working-directory: ${{ env.CPYTHON_BUILDDIR }} working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: | run: |
@ -369,7 +394,7 @@ jobs:
./python -m venv $VENV_LOC && $VENV_PYTHON -m pip install -r ${GITHUB_WORKSPACE}/Tools/requirements-hypothesis.txt ./python -m venv $VENV_LOC && $VENV_PYTHON -m pip install -r ${GITHUB_WORKSPACE}/Tools/requirements-hypothesis.txt
- name: 'Restore Hypothesis database' - name: 'Restore Hypothesis database'
id: cache-hypothesis-database id: cache-hypothesis-database
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: ./hypothesis path: ./hypothesis
key: hypothesis-database-${{ github.head_ref || github.run_id }} key: hypothesis-database-${{ github.head_ref || github.run_id }}
@ -409,16 +434,18 @@ jobs:
needs: check_source needs: check_source
if: needs.check_source.outputs.run_tests == 'true' if: needs.check_source.outputs.run_tests == 'true'
env: env:
OPENSSL_VER: 3.0.11 OPENSSL_VER: 3.0.13
PYTHONSTRICTEXTENSIONBUILD: 1 PYTHONSTRICTEXTENSIONBUILD: 1
ASAN_OPTIONS: detect_leaks=0:allocator_may_return_null=1:handle_segv=0 ASAN_OPTIONS: detect_leaks=0:allocator_may_return_null=1:handle_segv=0
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache - name: Restore config.cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: config.cache path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ needs.check_source.outputs.config_hash }} key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }}
- name: Register gcc problem matcher - name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json" run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install Dependencies - name: Install Dependencies
@ -434,7 +461,7 @@ jobs:
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
- name: 'Restore OpenSSL build' - name: 'Restore OpenSSL build'
id: cache-openssl id: cache-openssl
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: ./multissl/openssl/${{ env.OPENSSL_VER }} path: ./multissl/openssl/${{ env.OPENSSL_VER }}
key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }} key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
@ -446,6 +473,9 @@ jobs:
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: Configure ccache action - name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2 uses: hendrikmuhs/ccache-action@v1.2
with:
save: ${{ github.event_name == 'push' }}
max-size: "200M"
- name: Configure CPython - name: Configure CPython
run: ./configure --config-cache --with-address-sanitizer --without-pymalloc run: ./configure --config-cache --with-address-sanitizer --without-pymalloc
- name: Build CPython - name: Build CPython
@ -455,6 +485,26 @@ jobs:
- name: Tests - name: Tests
run: xvfb-run make test run: xvfb-run make test
build_tsan:
name: 'Thread sanitizer'
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
uses: ./.github/workflows/reusable-tsan.yml
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
options: ./configure --config-cache --with-thread-sanitizer --with-pydebug
suppressions_path: Tools/tsan/supressions.txt
build_tsan_free_threading:
name: 'Thread sanitizer (free-threading)'
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
uses: ./.github/workflows/reusable-tsan.yml
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
options: ./configure --config-cache --disable-gil --with-thread-sanitizer --with-pydebug
suppressions_path: Tools/tsan/suppressions_free_threading.txt
# CIFuzz job based on https://google.github.io/oss-fuzz/getting-started/continuous-integration/ # CIFuzz job based on https://google.github.io/oss-fuzz/getting-started/continuous-integration/
cifuzz: cifuzz:
name: CIFuzz name: CIFuzz
@ -508,10 +558,13 @@ jobs:
- build_ubuntu - build_ubuntu
- build_ubuntu_free_threading - build_ubuntu_free_threading
- build_ubuntu_ssltests - build_ubuntu_ssltests
- build_wasi
- build_windows - build_windows
- build_windows_free_threading - build_windows_free_threading
- test_hypothesis - test_hypothesis
- build_asan - build_asan
- build_tsan
- build_tsan_free_threading
- cifuzz - cifuzz
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -541,9 +594,12 @@ jobs:
build_ubuntu, build_ubuntu,
build_ubuntu_free_threading, build_ubuntu_free_threading,
build_ubuntu_ssltests, build_ubuntu_ssltests,
build_wasi,
build_windows, build_windows,
build_windows_free_threading, build_windows_free_threading,
build_asan, build_asan,
build_tsan,
build_tsan_free_threading,
' '
|| '' || ''
}} }}

View file

@ -32,6 +32,8 @@ jobs:
strategy: strategy:
matrix: matrix:
type: [x86, x64, arm64] type: [x86, x64, arm64]
env:
IncludeFreethreaded: true
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Build CPython installer - name: Build CPython installer

144
.github/workflows/jit.yml vendored Normal file
View file

@ -0,0 +1,144 @@
name: JIT
on:
pull_request:
paths:
- '**jit**'
- 'Python/bytecodes.c'
- 'Python/optimizer*.c'
push:
paths:
- '**jit**'
- 'Python/bytecodes.c'
- 'Python/optimizer*.c'
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
jit:
name: ${{ matrix.target }} (${{ matrix.debug && 'Debug' || 'Release' }})
runs-on: ${{ matrix.runner }}
timeout-minutes: 75
strategy:
fail-fast: false
matrix:
target:
- i686-pc-windows-msvc/msvc
- x86_64-pc-windows-msvc/msvc
- aarch64-pc-windows-msvc/msvc
- x86_64-apple-darwin/clang
- aarch64-apple-darwin/clang
- x86_64-unknown-linux-gnu/gcc
- x86_64-unknown-linux-gnu/clang
- aarch64-unknown-linux-gnu/gcc
- aarch64-unknown-linux-gnu/clang
debug:
- true
- false
llvm:
- 16
include:
- target: i686-pc-windows-msvc/msvc
architecture: Win32
runner: windows-latest
compiler: msvc
- target: x86_64-pc-windows-msvc/msvc
architecture: x64
runner: windows-latest
compiler: msvc
- target: aarch64-pc-windows-msvc/msvc
architecture: ARM64
runner: windows-latest
compiler: msvc
- target: x86_64-apple-darwin/clang
architecture: x86_64
runner: macos-13
compiler: clang
- target: aarch64-apple-darwin/clang
architecture: aarch64
runner: macos-14
compiler: clang
- target: x86_64-unknown-linux-gnu/gcc
architecture: x86_64
runner: ubuntu-latest
compiler: gcc
- target: x86_64-unknown-linux-gnu/clang
architecture: x86_64
runner: ubuntu-latest
compiler: clang
- target: aarch64-unknown-linux-gnu/gcc
architecture: aarch64
runner: ubuntu-latest
compiler: gcc
# These fail because of emulation, not because of the JIT:
exclude: test_unix_events test_init test_process_pool test_shutdown test_multiprocessing_fork test_cmd_line test_faulthandler test_os test_perf_profiler test_posix test_signal test_socket test_subprocess test_threading test_venv test_external_inspection
- target: aarch64-unknown-linux-gnu/clang
architecture: aarch64
runner: ubuntu-latest
compiler: clang
# These fail because of emulation, not because of the JIT:
exclude: test_unix_events test_init test_process_pool test_shutdown test_multiprocessing_fork test_cmd_line test_faulthandler test_os test_perf_profiler test_posix test_signal test_socket test_subprocess test_threading test_venv test_external_inspection
env:
CC: ${{ matrix.compiler }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Native Windows
if: runner.os == 'Windows' && matrix.architecture != 'ARM64'
run: |
choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}
./PCbuild/build.bat --experimental-jit ${{ matrix.debug && '-d' || '--pgo' }} -p ${{ matrix.architecture }}
./PCbuild/rt.bat ${{ matrix.debug && '-d' }} -p ${{ matrix.architecture }} -q --exclude ${{ matrix.exclude }} --multiprocess 0 --timeout 4500 --verbose2 --verbose3
# No PGO or tests (yet):
- name: Emulated Windows
if: runner.os == 'Windows' && matrix.architecture == 'ARM64'
run: |
choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}
./PCbuild/build.bat --experimental-jit ${{ matrix.debug && '-d' || '' }} -p ${{ matrix.architecture }}
- name: Native macOS
if: runner.os == 'macOS'
run: |
brew install llvm@${{ matrix.llvm }}
SDKROOT="$(xcrun --show-sdk-path)" \
./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--enable-optimizations --with-lto' }}
make all --jobs 4
./python.exe -m test --exclude ${{ matrix.exclude }} --multiprocess 0 --timeout 4500 --verbose2 --verbose3
- name: Native Linux
if: runner.os == 'Linux' && matrix.architecture == 'x86_64'
run: |
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }}
export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH"
./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--enable-optimizations --with-lto' }}
make all --jobs 4
./python -m test --exclude ${{ matrix.exclude }} --multiprocess 0 --timeout 4500 --verbose2 --verbose3
- name: Emulated Linux
if: runner.os == 'Linux' && matrix.architecture != 'x86_64'
run: |
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }}
export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH"
./configure --prefix="$(pwd)/../build"
make install --jobs 4
make clean --jobs 4
export HOST=${{ matrix.architecture }}-linux-gnu
sudo apt install --yes "gcc-$HOST" qemu-user
${{ !matrix.debug && matrix.compiler == 'clang' && './configure --enable-optimizations' || '' }}
${{ !matrix.debug && matrix.compiler == 'clang' && 'make profile-run-stamp --jobs 4' || '' }}
export QEMU_LD_PREFIX="/usr/$HOST"
CC="${{ matrix.compiler == 'clang' && 'clang --target=$HOST' || '$HOST-gcc' }}" \
CPP="$CC --preprocess" \
HOSTRUNNER=qemu-${{ matrix.architecture }} \
./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--enable-optimizations --with-lto' }} --build=x86_64-linux-gnu --host="$HOST" --with-build-python=../build/bin/python3 --with-pkg-config=no ac_cv_buggy_getaddrinfo=no ac_cv_file__dev_ptc=no ac_cv_file__dev_ptmx=yes
make all --jobs 4
./python -m test --exclude ${{ matrix.exclude }} --multiprocess 0 --timeout 4500 --verbose2 --verbose3

View file

@ -23,4 +23,4 @@ jobs:
- uses: actions/setup-python@v5 - uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: "3.x"
- uses: pre-commit/action@v3.0.0 - uses: pre-commit/action@v3.0.1

View file

@ -12,6 +12,7 @@ on:
- "Tools/build/generate_sbom.py" - "Tools/build/generate_sbom.py"
- "Tools/cases_generator/**" - "Tools/cases_generator/**"
- "Tools/clinic/**" - "Tools/clinic/**"
- "Tools/jit/**"
- "Tools/peg_generator/**" - "Tools/peg_generator/**"
- "Tools/requirements-dev.txt" - "Tools/requirements-dev.txt"
- "Tools/wasm/**" - "Tools/wasm/**"
@ -38,6 +39,7 @@ jobs:
"Tools/build/", "Tools/build/",
"Tools/cases_generator", "Tools/cases_generator",
"Tools/clinic", "Tools/clinic",
"Tools/jit",
"Tools/peg_generator", "Tools/peg_generator",
"Tools/wasm", "Tools/wasm",
] ]

View file

@ -23,7 +23,7 @@ jobs:
- { project: 32, label: sprint } - { project: 32, label: sprint }
steps: steps:
- uses: actions/add-to-project@v0.1.0 - uses: actions/add-to-project@v1.0.0
with: with:
project-url: https://github.com/orgs/python/projects/${{ matrix.project }} project-url: https://github.com/orgs/python/projects/${{ matrix.project }}
github-token: ${{ secrets.ADD_TO_PROJECT_PAT }} github-token: ${{ secrets.ADD_TO_PROJECT_PAT }}

View file

@ -11,6 +11,7 @@ permissions:
jobs: jobs:
label: label:
name: DO-NOT-MERGE / unresolved review name: DO-NOT-MERGE / unresolved review
if: github.repository_owner == 'python'
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 10 timeout-minutes: 10

View file

@ -89,7 +89,7 @@ jobs:
timeout-minutes: 60 timeout-minutes: 60
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/cache@v3 - uses: actions/cache@v4
with: with:
path: ~/.cache/pip path: ~/.cache/pip
key: ubuntu-doc-${{ hashFiles('Doc/requirements.txt') }} key: ubuntu-doc-${{ hashFiles('Doc/requirements.txt') }}

View file

@ -8,24 +8,34 @@ on:
required: false required: false
type: boolean type: boolean
default: false default: false
os-matrix:
required: false
type: string
jobs: jobs:
build_macos: build_macos:
name: 'build and test' name: 'build and test'
runs-on: macos-latest
timeout-minutes: 60 timeout-minutes: 60
env: env:
HOMEBREW_NO_ANALYTICS: 1 HOMEBREW_NO_ANALYTICS: 1
HOMEBREW_NO_AUTO_UPDATE: 1 HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1 HOMEBREW_NO_INSTALL_CLEANUP: 1
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
PYTHONSTRICTEXTENSIONBUILD: 1 PYTHONSTRICTEXTENSIONBUILD: 1
strategy:
fail-fast: false
matrix:
os: ${{fromJson(inputs.os-matrix)}}
runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache - name: Restore config.cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: config.cache path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ inputs.config_hash }} key: ${{ github.job }}-${{ matrix.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}
- name: Install Homebrew dependencies - name: Install Homebrew dependencies
run: brew install pkg-config openssl@3.0 xz gdbm tcl-tk run: brew install pkg-config openssl@3.0 xz gdbm tcl-tk
- name: Configure CPython - name: Configure CPython

55
.github/workflows/reusable-tsan.yml vendored Normal file
View file

@ -0,0 +1,55 @@
on:
workflow_call:
inputs:
config_hash:
required: true
type: string
options:
required: true
type: string
suppressions_path:
description: 'A repo relative path to the suppressions file'
required: true
type: string
jobs:
build_tsan_reusable:
name: 'Thread sanitizer'
runs-on: ubuntu-22.04
timeout-minutes: 60
steps:
- uses: actions/checkout@v4
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache
uses: actions/cache@v4
with:
path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}
- name: Install Dependencies
run: |
sudo ./.github/workflows/posix-deps-apt.sh
sudo apt install -y clang
# Reduce ASLR to avoid TSAN crashing
sudo sysctl -w vm.mmap_rnd_bits=28
- name: TSAN Option Setup
run: |
echo "TSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/${{ inputs.suppressions_path }}" >> $GITHUB_ENV
echo "CC=clang" >> $GITHUB_ENV
echo "CXX=clang++" >> $GITHUB_ENV
- name: Add ccache to PATH
run: |
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
with:
save: ${{ github.event_name == 'push' }}
max-size: "200M"
- name: Configure CPython
run: ${{ inputs.options }}
- name: Build CPython
run: make -j4
- name: Display build info
run: make pythoninfo
- name: Tests
run: ./python -m test --tsan -j4

View file

@ -14,7 +14,7 @@ jobs:
timeout-minutes: 60 timeout-minutes: 60
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
env: env:
OPENSSL_VER: 3.0.11 OPENSSL_VER: 3.0.13
PYTHONSTRICTEXTENSIONBUILD: 1 PYTHONSTRICTEXTENSIONBUILD: 1
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -29,7 +29,7 @@ jobs:
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
- name: 'Restore OpenSSL build' - name: 'Restore OpenSSL build'
id: cache-openssl id: cache-openssl
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: ./multissl/openssl/${{ env.OPENSSL_VER }} path: ./multissl/openssl/${{ env.OPENSSL_VER }}
key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }} key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
@ -41,6 +41,9 @@ jobs:
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: Configure ccache action - name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2 uses: hendrikmuhs/ccache-action@v1.2
with:
save: ${{ github.event_name == 'push' }}
max-size: "200M"
- name: Setup directory envs for out-of-tree builds - name: Setup directory envs for out-of-tree builds
run: | run: |
echo "CPYTHON_RO_SRCDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-ro-srcdir)" >> $GITHUB_ENV echo "CPYTHON_RO_SRCDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-ro-srcdir)" >> $GITHUB_ENV
@ -49,11 +52,13 @@ jobs:
run: mkdir -p $CPYTHON_RO_SRCDIR $CPYTHON_BUILDDIR run: mkdir -p $CPYTHON_RO_SRCDIR $CPYTHON_BUILDDIR
- name: Bind mount sources read-only - name: Bind mount sources read-only
run: sudo mount --bind -o ro $GITHUB_WORKSPACE $CPYTHON_RO_SRCDIR run: sudo mount --bind -o ro $GITHUB_WORKSPACE $CPYTHON_RO_SRCDIR
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache - name: Restore config.cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: ${{ env.CPYTHON_BUILDDIR }}/config.cache path: ${{ env.CPYTHON_BUILDDIR }}/config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ inputs.config_hash }} key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}
- name: Configure CPython out-of-tree - name: Configure CPython out-of-tree
working-directory: ${{ env.CPYTHON_BUILDDIR }} working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: ${{ inputs.options }} run: ${{ inputs.options }}

73
.github/workflows/reusable-wasi.yml vendored Normal file
View file

@ -0,0 +1,73 @@
on:
workflow_call:
inputs:
config_hash:
required: true
type: string
jobs:
build_wasi_reusable:
name: 'build and test'
timeout-minutes: 60
runs-on: ubuntu-20.04
env:
WASMTIME_VERSION: 18.0.3
WASI_SDK_VERSION: 21
WASI_SDK_PATH: /opt/wasi-sdk
CROSS_BUILD_PYTHON: cross-build/build
CROSS_BUILD_WASI: cross-build/wasm32-wasi
steps:
- uses: actions/checkout@v4
# No problem resolver registered as one doesn't currently exist for Clang.
- name: "Install wasmtime"
uses: jcbhmr/setup-wasmtime@v2
with:
wasmtime-version: ${{ env.WASMTIME_VERSION }}
- name: "Restore WASI SDK"
id: cache-wasi-sdk
uses: actions/cache@v4
with:
path: ${{ env.WASI_SDK_PATH }}
key: ${{ runner.os }}-wasi-sdk-${{ env.WASI_SDK_VERSION }}
- name: "Install WASI SDK"
if: steps.cache-wasi-sdk.outputs.cache-hit != 'true'
run: |
mkdir ${{ env.WASI_SDK_PATH }} && \
curl -s -S --location https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${{ env.WASI_SDK_VERSION }}/wasi-sdk-${{ env.WASI_SDK_VERSION }}.0-linux.tar.gz | \
tar --strip-components 1 --directory ${{ env.WASI_SDK_PATH }} --extract --gunzip
- name: "Configure ccache action"
uses: hendrikmuhs/ccache-action@v1.2
with:
save: ${{ github.event_name == 'push' }}
max-size: "200M"
- name: "Add ccache to PATH"
run: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: "Install Python"
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: "Restore Python build config.cache"
uses: actions/cache@v4
with:
path: ${{ env.CROSS_BUILD_PYTHON }}/config.cache
# Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}-${{ env.pythonLocation }}
- name: "Configure build Python"
run: python3 Tools/wasm/wasi.py configure-build-python -- --config-cache --with-pydebug
- name: "Make build Python"
run: python3 Tools/wasm/wasi.py make-build-python
- name: "Restore host config.cache"
uses: actions/cache@v4
with:
path: ${{ env.CROSS_BUILD_WASI }}/config.cache
# Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-wasi-sdk-${{ env.WASI_SDK_VERSION }}-${{ inputs.config_hash }}-${{ env.pythonLocation }}
- name: "Configure host"
# `--with-pydebug` inferred from configure-build-python
run: python3 Tools/wasm/wasi.py configure-host -- --config-cache
- name: "Make host"
run: python3 Tools/wasm/wasi.py make-host
- name: "Display build info"
run: make --directory ${{ env.CROSS_BUILD_WASI }} pythoninfo
- name: "Test"
run: make --directory ${{ env.CROSS_BUILD_WASI }} test

View file

@ -20,7 +20,7 @@ jobs:
- name: Display build info - name: Display build info
run: .\python.bat -m test.pythoninfo run: .\python.bat -m test.pythoninfo
- name: Tests - name: Tests
run: .\PCbuild\rt.bat -p Win32 -d -q --fast-ci run: .\PCbuild\rt.bat -p Win32 -d -q --fast-ci ${{ inputs.free-threading && '--disable-gil' || '' }}
build_win_amd64: build_win_amd64:
name: 'build and test (x64)' name: 'build and test (x64)'
@ -37,7 +37,7 @@ jobs:
- name: Display build info - name: Display build info
run: .\python.bat -m test.pythoninfo run: .\python.bat -m test.pythoninfo
- name: Tests - name: Tests
run: .\PCbuild\rt.bat -p x64 -d -q --fast-ci run: .\PCbuild\rt.bat -p x64 -d -q --fast-ci ${{ inputs.free-threading && '--disable-gil' || '' }}
build_win_arm64: build_win_arm64:
name: 'build (arm64)' name: 'build (arm64)'

View file

@ -2,7 +2,7 @@ name: Mark stale pull requests
on: on:
schedule: schedule:
- cron: "0 0 * * *" - cron: "0 */6 * * *"
permissions: permissions:
pull-requests: write pull-requests: write

15
.gitignore vendored
View file

@ -69,6 +69,17 @@ Lib/test/data/*
/_bootstrap_python /_bootstrap_python
/Makefile /Makefile
/Makefile.pre /Makefile.pre
/iOSTestbed.*
iOS/Frameworks/
iOS/Resources/Info.plist
iOS/testbed/build
iOS/testbed/Python.xcframework/ios-*/bin
iOS/testbed/Python.xcframework/ios-*/include
iOS/testbed/Python.xcframework/ios-*/lib
iOS/testbed/Python.xcframework/ios-*/Python.framework
iOS/testbed/iOSTestbed.xcodeproj/project.xcworkspace
iOS/testbed/iOSTestbed.xcodeproj/xcuserdata
iOS/testbed/iOSTestbed.xcodeproj/xcshareddata
Mac/Makefile Mac/Makefile
Mac/PythonLauncher/Info.plist Mac/PythonLauncher/Info.plist
Mac/PythonLauncher/Makefile Mac/PythonLauncher/Makefile
@ -126,11 +137,11 @@ Tools/unicode/data/
# hendrikmuhs/ccache-action@v1 # hendrikmuhs/ccache-action@v1
/.ccache /.ccache
/cross-build/ /cross-build/
/jit_stencils.h
/platform /platform
/profile-clean-stamp /profile-clean-stamp
/profile-run-stamp /profile-run-stamp
/profile-bolt-stamp /profile-bolt-stamp
/Python/deepfreeze/*.c
/pybuilddir.txt /pybuilddir.txt
/pyconfig.h /pyconfig.h
/python-config /python-config
@ -158,5 +169,5 @@ Python/frozen_modules/MANIFEST
/python /python
!/Python/ !/Python/
# main branch only: ABI files are not checked/maintained # main branch only: ABI files are not checked/maintained.
Doc/data/python*.abi Doc/data/python*.abi

View file

@ -1,6 +1,6 @@
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.7 rev: v0.3.4
hooks: hooks:
- id: ruff - id: ruff
name: Run Ruff on Lib/test/ name: Run Ruff on Lib/test/
@ -14,6 +14,8 @@ repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0 rev: v4.5.0
hooks: hooks:
- id: check-case-conflict
- id: check-merge-conflict
- id: check-toml - id: check-toml
exclude: ^Lib/test/test_tomllib/ exclude: ^Lib/test/test_tomllib/
- id: check-yaml - id: check-yaml
@ -28,7 +30,7 @@ repos:
hooks: hooks:
- id: sphinx-lint - id: sphinx-lint
args: [--enable=default-role] args: [--enable=default-role]
files: ^Doc/|^Misc/NEWS.d/next/ files: ^Doc/|^Misc/NEWS.d/
- repo: meta - repo: meta
hooks: hooks:

64
Android/README.md Normal file
View file

@ -0,0 +1,64 @@
# Python for Android
These instructions are only needed if you're planning to compile Python for
Android yourself. Most users should *not* need to do this. If you're looking to
use Python on Android, one of the following tools will provide a much more
approachable user experience:
* [Briefcase](https://briefcase.readthedocs.io), from the BeeWare project
* [Buildozer](https://buildozer.readthedocs.io), from the Kivy project
* [Chaquopy](https://chaquo.com/chaquopy/)
## Prerequisites
Export the `ANDROID_HOME` environment variable to point at your Android SDK. If
you don't already have the SDK, here's how to install it:
* Download the "Command line tools" from <https://developer.android.com/studio>.
* Create a directory `android-sdk/cmdline-tools`, and unzip the command line
tools package into it.
* Rename `android-sdk/cmdline-tools/cmdline-tools` to
`android-sdk/cmdline-tools/latest`.
* `export ANDROID_HOME=/path/to/android-sdk`
## Building
Building for Android requires doing a cross-build where you have a "build"
Python to help produce an Android build of CPython. This procedure has been
tested on Linux and macOS.
The easiest way to do a build is to use the `android.py` script. You can either
have it perform the entire build process from start to finish in one step, or
you can do it in discrete steps that mirror running `configure` and `make` for
each of the two builds of Python you end up producing.
The discrete steps for building via `android.py` are:
```sh
./android.py configure-build
./android.py make-build
./android.py configure-host HOST
./android.py make-host HOST
```
To see the possible values of HOST, run `./android.py configure-host --help`.
Or to do it all in a single command, run:
```sh
./android.py build HOST
```
In the end you should have a build Python in `cross-build/build`, and an Android
build in `cross-build/HOST`.
You can use `--` as a separator for any of the `configure`-related commands
including `build` itself to pass arguments to the underlying `configure`
call. For example, if you want a pydebug build that also caches the results from
`configure`, you can do:
```sh
./android.py build HOST -- -C --with-pydebug
```

87
Android/android-env.sh Normal file
View file

@ -0,0 +1,87 @@
# This script must be sourced with the following variables already set:
: ${ANDROID_HOME:?} # Path to Android SDK
: ${HOST:?} # GNU target triplet
# You may also override the following:
: ${api_level:=21} # Minimum Android API level the build will run on
: ${PREFIX:-} # Path in which to find required libraries
# Print all messages on stderr so they're visible when running within build-wheel.
log() {
echo "$1" >&2
}
fail() {
log "$1"
exit 1
}
# When moving to a new version of the NDK, carefully review the following:
#
# * https://developer.android.com/ndk/downloads/revision_history
#
# * https://android.googlesource.com/platform/ndk/+/ndk-rXX-release/docs/BuildSystemMaintainers.md
# where XX is the NDK version. Do a diff against the version you're upgrading from, e.g.:
# https://android.googlesource.com/platform/ndk/+/ndk-r25-release..ndk-r26-release/docs/BuildSystemMaintainers.md
ndk_version=26.2.11394342
ndk=$ANDROID_HOME/ndk/$ndk_version
if ! [ -e $ndk ]; then
log "Installing NDK: this may take several minutes"
yes | $ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager "ndk;$ndk_version"
fi
if [ $HOST = "arm-linux-androideabi" ]; then
clang_triplet=armv7a-linux-androideabi
else
clang_triplet=$HOST
fi
# These variables are based on BuildSystemMaintainers.md above, and
# $ndk/build/cmake/android.toolchain.cmake.
toolchain=$(echo $ndk/toolchains/llvm/prebuilt/*)
export AR="$toolchain/bin/llvm-ar"
export AS="$toolchain/bin/llvm-as"
export CC="$toolchain/bin/${clang_triplet}${api_level}-clang"
export CXX="${CC}++"
export LD="$toolchain/bin/ld"
export NM="$toolchain/bin/llvm-nm"
export RANLIB="$toolchain/bin/llvm-ranlib"
export READELF="$toolchain/bin/llvm-readelf"
export STRIP="$toolchain/bin/llvm-strip"
# The quotes make sure the wildcard in the `toolchain` assignment has been expanded.
for path in "$AR" "$AS" "$CC" "$CXX" "$LD" "$NM" "$RANLIB" "$READELF" "$STRIP"; do
if ! [ -e "$path" ]; then
fail "$path does not exist"
fi
done
export CFLAGS=""
export LDFLAGS="-Wl,--build-id=sha1 -Wl,--no-rosegment"
# Many packages get away with omitting -lm on Linux, but Android is stricter.
LDFLAGS="$LDFLAGS -lm"
# -mstackrealign is included where necessary in the clang launcher scripts which are
# pointed to by $CC, so we don't need to include it here.
if [ $HOST = "arm-linux-androideabi" ]; then
CFLAGS="$CFLAGS -march=armv7-a -mthumb"
fi
if [ -n "${PREFIX:-}" ]; then
abs_prefix=$(realpath $PREFIX)
CFLAGS="$CFLAGS -I$abs_prefix/include"
LDFLAGS="$LDFLAGS -L$abs_prefix/lib"
export PKG_CONFIG="pkg-config --define-prefix"
export PKG_CONFIG_LIBDIR="$abs_prefix/lib/pkgconfig"
fi
# Use the same variable name as conda-build
if [ $(uname) = "Darwin" ]; then
export CPU_COUNT=$(sysctl -n hw.ncpu)
else
export CPU_COUNT=$(nproc)
fi

202
Android/android.py Executable file
View file

@ -0,0 +1,202 @@
#!/usr/bin/env python3
import argparse
import os
import re
import shutil
import subprocess
import sys
import sysconfig
from os.path import relpath
from pathlib import Path
SCRIPT_NAME = Path(__file__).name
CHECKOUT = Path(__file__).resolve().parent.parent
CROSS_BUILD_DIR = CHECKOUT / "cross-build"
def delete_if_exists(path):
if path.exists():
print(f"Deleting {path} ...")
shutil.rmtree(path)
def subdir(name, *, clean=None):
path = CROSS_BUILD_DIR / name
if clean:
delete_if_exists(path)
if not path.exists():
if clean is None:
sys.exit(
f"{path} does not exist. Create it by running the appropriate "
f"`configure` subcommand of {SCRIPT_NAME}.")
else:
path.mkdir(parents=True)
return path
def run(command, *, host=None, **kwargs):
env = os.environ.copy()
if host:
env_script = CHECKOUT / "Android/android-env.sh"
env_output = subprocess.run(
f"set -eu; "
f"HOST={host}; "
f"PREFIX={subdir(host)}/prefix; "
f". {env_script}; "
f"export",
check=True, shell=True, text=True, stdout=subprocess.PIPE
).stdout
for line in env_output.splitlines():
# We don't require every line to match, as there may be some other
# output from installing the NDK.
if match := re.search(
"^(declare -x |export )?(\\w+)=['\"]?(.*?)['\"]?$", line
):
key, value = match[2], match[3]
if env.get(key) != value:
print(line)
env[key] = value
if env == os.environ:
raise ValueError(f"Found no variables in {env_script.name} output:\n"
+ env_output)
print(">", " ".join(map(str, command)))
try:
subprocess.run(command, check=True, env=env, **kwargs)
except subprocess.CalledProcessError as e:
sys.exit(e)
def build_python_path():
"""The path to the build Python binary."""
build_dir = subdir("build")
binary = build_dir / "python"
if not binary.is_file():
binary = binary.with_suffix(".exe")
if not binary.is_file():
raise FileNotFoundError("Unable to find `python(.exe)` in "
f"{build_dir}")
return binary
def configure_build_python(context):
os.chdir(subdir("build", clean=context.clean))
command = [relpath(CHECKOUT / "configure")]
if context.args:
command.extend(context.args)
run(command)
def make_build_python(context):
os.chdir(subdir("build"))
run(["make", "-j", str(os.cpu_count())])
def unpack_deps(host):
deps_url = "https://github.com/beeware/cpython-android-source-deps/releases/download"
for name_ver in ["bzip2-1.0.8-1", "libffi-3.4.4-2", "openssl-3.0.13-1",
"sqlite-3.45.1-0", "xz-5.4.6-0"]:
filename = f"{name_ver}-{host}.tar.gz"
run(["wget", f"{deps_url}/{name_ver}/{filename}"])
run(["tar", "-xf", filename])
os.remove(filename)
def configure_host_python(context):
host_dir = subdir(context.host, clean=context.clean)
prefix_dir = host_dir / "prefix"
if not prefix_dir.exists():
prefix_dir.mkdir()
os.chdir(prefix_dir)
unpack_deps(context.host)
build_dir = host_dir / "build"
build_dir.mkdir(exist_ok=True)
os.chdir(build_dir)
command = [
# Basic cross-compiling configuration
relpath(CHECKOUT / "configure"),
f"--host={context.host}",
f"--build={sysconfig.get_config_var('BUILD_GNU_TYPE')}",
f"--with-build-python={build_python_path()}",
"--without-ensurepip",
# Android always uses a shared libpython.
"--enable-shared",
"--without-static-libpython",
# Dependent libraries. The others are found using pkg-config: see
# android-env.sh.
f"--with-openssl={prefix_dir}",
]
if context.args:
command.extend(context.args)
run(command, host=context.host)
def make_host_python(context):
host_dir = subdir(context.host)
os.chdir(host_dir / "build")
run(["make", "-j", str(os.cpu_count())], host=context.host)
run(["make", "install", f"prefix={host_dir}/prefix"], host=context.host)
def build_all(context):
steps = [configure_build_python, make_build_python, configure_host_python,
make_host_python]
for step in steps:
step(context)
def clean_all(context):
delete_if_exists(CROSS_BUILD_DIR)
def main():
parser = argparse.ArgumentParser()
subcommands = parser.add_subparsers(dest="subcommand")
build = subcommands.add_parser("build", help="Build everything")
configure_build = subcommands.add_parser("configure-build",
help="Run `configure` for the "
"build Python")
make_build = subcommands.add_parser("make-build",
help="Run `make` for the build Python")
configure_host = subcommands.add_parser("configure-host",
help="Run `configure` for Android")
make_host = subcommands.add_parser("make-host",
help="Run `make` for Android")
clean = subcommands.add_parser("clean", help="Delete files and directories "
"created by this script")
for subcommand in build, configure_build, configure_host:
subcommand.add_argument(
"--clean", action="store_true", default=False, dest="clean",
help="Delete any relevant directories before building")
for subcommand in build, configure_host, make_host:
subcommand.add_argument(
"host", metavar="HOST",
choices=["aarch64-linux-android", "x86_64-linux-android"],
help="Host triplet: choices=[%(choices)s]")
for subcommand in build, configure_build, configure_host:
subcommand.add_argument("args", nargs="*",
help="Extra arguments to pass to `configure`")
context = parser.parse_args()
dispatch = {"configure-build": configure_build_python,
"make-build": make_build_python,
"configure-host": configure_host_python,
"make-host": make_host_python,
"build": build_all,
"clean": clean_all}
dispatch[context.subcommand](context)
if __name__ == "__main__":
main()

View file

@ -163,6 +163,7 @@ venv:
echo "venv already exists."; \ echo "venv already exists."; \
echo "To recreate it, remove it first with \`make clean-venv'."; \ echo "To recreate it, remove it first with \`make clean-venv'."; \
else \ else \
echo "Creating venv in $(VENVDIR)"; \
$(PYTHON) -m venv $(VENVDIR); \ $(PYTHON) -m venv $(VENVDIR); \
$(VENVDIR)/bin/python3 -m pip install --upgrade pip; \ $(VENVDIR)/bin/python3 -m pip install --upgrade pip; \
$(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \ $(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \

View file

@ -22,6 +22,10 @@ have a suggestion on how to fix it, include that as well.
You can also open a discussion item on our You can also open a discussion item on our
`Documentation Discourse forum <https://discuss.python.org/c/documentation/26>`_. `Documentation Discourse forum <https://discuss.python.org/c/documentation/26>`_.
If you find a bug in the theme (HTML / CSS / JavaScript) of the
documentation, please submit a bug report on the `python-doc-theme bug
tracker <https://github.com/python/python-docs-theme>`_.
If you're short on time, you can also email documentation bug reports to If you're short on time, you can also email documentation bug reports to
docs@python.org (behavioral bugs can be sent to python-list@python.org). docs@python.org (behavioral bugs can be sent to python-list@python.org).
'docs@' is a mailing list run by volunteers; your request will be noticed, 'docs@' is a mailing list run by volunteers; your request will be noticed,

View file

@ -29,7 +29,7 @@ without intermediate copying.
Python provides such a facility at the C level in the form of the :ref:`buffer Python provides such a facility at the C level in the form of the :ref:`buffer
protocol <bufferobjects>`. This protocol has two sides: protocol <bufferobjects>`. This protocol has two sides:
.. index:: single: PyBufferProcs .. index:: single: PyBufferProcs (C type)
- on the producer side, a type can export a "buffer interface" which allows - on the producer side, a type can export a "buffer interface" which allows
objects of that type to expose information about their underlying buffer. objects of that type to expose information about their underlying buffer.

View file

@ -191,10 +191,10 @@ called with a non-bytes parameter.
.. c:function:: int _PyBytes_Resize(PyObject **bytes, Py_ssize_t newsize) .. c:function:: int _PyBytes_Resize(PyObject **bytes, Py_ssize_t newsize)
A way to resize a bytes object even though it is "immutable". Only use this Resize a bytes object. *newsize* will be the new length of the bytes object.
to build up a brand new bytes object; don't use this if the bytes may already You can think of it as creating a new bytes object and destroying the old
be known in other parts of the code. It is an error to call this function if one, only more efficiently.
the refcount on the input bytes object is not one. Pass the address of an Pass the address of an
existing bytes object as an lvalue (it may be written into), and the new size existing bytes object as an lvalue (it may be written into), and the new size
desired. On success, *\*bytes* holds the resized bytes object and ``0`` is desired. On success, *\*bytes* holds the resized bytes object and ``0`` is
returned; the address in *\*bytes* may differ from its input value. If the returned; the address in *\*bytes* may differ from its input value. If the

View file

@ -22,16 +22,27 @@ bound into a function.
.. c:var:: PyTypeObject PyCode_Type .. c:var:: PyTypeObject PyCode_Type
This is an instance of :c:type:`PyTypeObject` representing the Python This is an instance of :c:type:`PyTypeObject` representing the Python
:class:`code` type. :ref:`code object <code-objects>`.
.. c:function:: int PyCode_Check(PyObject *co) .. c:function:: int PyCode_Check(PyObject *co)
Return true if *co* is a :class:`code` object. This function always succeeds. Return true if *co* is a :ref:`code object <code-objects>`.
This function always succeeds.
.. c:function:: int PyCode_GetNumFree(PyCodeObject *co) .. c:function:: Py_ssize_t PyCode_GetNumFree(PyCodeObject *co)
Return the number of free variables in *co*. Return the number of free variables in a code object.
.. c:function:: int PyUnstable_Code_GetFirstFree(PyCodeObject *co)
Return the position of the first free variable in a code object.
.. versionchanged:: 3.13
Renamed from ``PyCode_GetFirstFree`` as part of :ref:`unstable-c-api`.
The old name is deprecated, but will remain available until the
signature changes again.
.. c:function:: PyCodeObject* PyUnstable_Code_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, PyObject *qualname, int firstlineno, PyObject *linetable, PyObject *exceptiontable) .. c:function:: PyCodeObject* PyUnstable_Code_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, PyObject *qualname, int firstlineno, PyObject *linetable, PyObject *exceptiontable)
@ -48,7 +59,7 @@ bound into a function.
.. versionchanged:: 3.11 .. versionchanged:: 3.11
Added ``qualname`` and ``exceptiontable`` parameters. Added ``qualname`` and ``exceptiontable`` parameters.
.. index:: single: PyCode_New .. index:: single: PyCode_New (C function)
.. versionchanged:: 3.12 .. versionchanged:: 3.12
@ -61,7 +72,7 @@ bound into a function.
Similar to :c:func:`PyUnstable_Code_New`, but with an extra "posonlyargcount" for positional-only arguments. Similar to :c:func:`PyUnstable_Code_New`, but with an extra "posonlyargcount" for positional-only arguments.
The same caveats that apply to ``PyUnstable_Code_New`` also apply to this function. The same caveats that apply to ``PyUnstable_Code_New`` also apply to this function.
.. index:: single: PyCode_NewWithPosOnlyArgs .. index:: single: PyCode_NewWithPosOnlyArgs (C function)
.. versionadded:: 3.8 as ``PyCode_NewWithPosOnlyArgs`` .. versionadded:: 3.8 as ``PyCode_NewWithPosOnlyArgs``
@ -220,7 +231,7 @@ may change without deprecation warnings.
*free* will be called on non-``NULL`` data stored under the new index. *free* will be called on non-``NULL`` data stored under the new index.
Use :c:func:`Py_DecRef` when storing :c:type:`PyObject`. Use :c:func:`Py_DecRef` when storing :c:type:`PyObject`.
.. index:: single: _PyEval_RequestCodeExtraIndex .. index:: single: _PyEval_RequestCodeExtraIndex (C function)
.. versionadded:: 3.6 as ``_PyEval_RequestCodeExtraIndex`` .. versionadded:: 3.6 as ``_PyEval_RequestCodeExtraIndex``
@ -238,7 +249,7 @@ may change without deprecation warnings.
If no data was set under the index, set *extra* to ``NULL`` and return If no data was set under the index, set *extra* to ``NULL`` and return
0 without setting an exception. 0 without setting an exception.
.. index:: single: _PyCode_GetExtra .. index:: single: _PyCode_GetExtra (C function)
.. versionadded:: 3.6 as ``_PyCode_GetExtra`` .. versionadded:: 3.6 as ``_PyCode_GetExtra``
@ -253,7 +264,7 @@ may change without deprecation warnings.
Set the extra data stored under the given index to *extra*. Set the extra data stored under the given index to *extra*.
Return 0 on success. Set an exception and return -1 on failure. Return 0 on success. Set an exception and return -1 on failure.
.. index:: single: _PyCode_SetExtra .. index:: single: _PyCode_SetExtra (C function)
.. versionadded:: 3.6 as ``_PyCode_SetExtra`` .. versionadded:: 3.6 as ``_PyCode_SetExtra``

View file

@ -117,11 +117,29 @@ Complex Numbers as Python Objects
Return the real part of *op* as a C :c:expr:`double`. Return the real part of *op* as a C :c:expr:`double`.
If *op* is not a Python complex number object but has a
:meth:`~object.__complex__` method, this method will first be called to
convert *op* to a Python complex number object. If :meth:`!__complex__` is
not defined then it falls back to call :c:func:`PyFloat_AsDouble` and
returns its result. Upon failure, this method returns ``-1.0``, so one
should call :c:func:`PyErr_Occurred` to check for errors.
.. versionchanged:: 3.13
Use :meth:`~object.__complex__` if available.
.. c:function:: double PyComplex_ImagAsDouble(PyObject *op) .. c:function:: double PyComplex_ImagAsDouble(PyObject *op)
Return the imaginary part of *op* as a C :c:expr:`double`. Return the imaginary part of *op* as a C :c:expr:`double`.
If *op* is not a Python complex number object but has a
:meth:`~object.__complex__` method, this method will first be called to
convert *op* to a Python complex number object. If :meth:`!__complex__` is
not defined then it falls back to call :c:func:`PyFloat_AsDouble` and
returns ``0.0`` on success. Upon failure, this method returns ``-1.0``, so
one should call :c:func:`PyErr_Occurred` to check for errors.
.. versionchanged:: 3.13
Use :meth:`~object.__complex__` if available.
.. c:function:: Py_complex PyComplex_AsCComplex(PyObject *op) .. c:function:: Py_complex PyComplex_AsCComplex(PyObject *op)

View file

@ -6,6 +6,8 @@ Context Variables Objects
------------------------- -------------------------
.. _contextvarsobjects_pointertype_change: .. _contextvarsobjects_pointertype_change:
.. versionadded:: 3.7
.. versionchanged:: 3.7.1 .. versionchanged:: 3.7.1
.. note:: .. note::
@ -24,8 +26,6 @@ Context Variables Objects
See :issue:`34762` for more details. See :issue:`34762` for more details.
.. versionadded:: 3.7
This section details the public C API for the :mod:`contextvars` module. This section details the public C API for the :mod:`contextvars` module.
.. c:type:: PyContext .. c:type:: PyContext

View file

@ -48,6 +48,42 @@ The return value (*rv*) for these functions should be interpreted as follows:
The following functions provide locale-independent string to number conversions. The following functions provide locale-independent string to number conversions.
.. c:function:: unsigned long PyOS_strtoul(const char *str, char **ptr, int base)
Convert the initial part of the string in ``str`` to an :c:expr:`unsigned
long` value according to the given ``base``, which must be between ``2`` and
``36`` inclusive, or be the special value ``0``.
Leading white space and case of characters are ignored. If ``base`` is zero
it looks for a leading ``0b``, ``0o`` or ``0x`` to tell which base. If
these are absent it defaults to ``10``. Base must be 0 or between 2 and 36
(inclusive). If ``ptr`` is non-``NULL`` it will contain a pointer to the
end of the scan.
If the converted value falls out of range of corresponding return type,
range error occurs (:c:data:`errno` is set to :c:macro:`!ERANGE`) and
:c:macro:`!ULONG_MAX` is returned. If no conversion can be performed, ``0``
is returned.
See also the Unix man page :manpage:`strtoul(3)`.
.. versionadded:: 3.2
.. c:function:: long PyOS_strtol(const char *str, char **ptr, int base)
Convert the initial part of the string in ``str`` to an :c:expr:`long` value
according to the given ``base``, which must be between ``2`` and ``36``
inclusive, or be the special value ``0``.
Same as :c:func:`PyOS_strtoul`, but return a :c:expr:`long` value instead
and :c:macro:`LONG_MAX` on overflows.
See also the Unix man page :manpage:`strtol(3)`.
.. versionadded:: 3.2
.. c:function:: double PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception) .. c:function:: double PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception)
Convert a string ``s`` to a :c:expr:`double`, raising a Python Convert a string ``s`` to a :c:expr:`double`, raising a Python

View file

@ -174,6 +174,26 @@ Dictionary Objects
.. versionadded:: 3.4 .. versionadded:: 3.4
.. c:function:: int PyDict_SetDefaultRef(PyObject *p, PyObject *key, PyObject *default_value, PyObject **result)
Inserts *default_value* into the dictionary *p* with a key of *key* if the
key is not already present in the dictionary. If *result* is not ``NULL``,
then *\*result* is set to a :term:`strong reference` to either
*default_value*, if the key was not present, or the existing value, if *key*
was already present in the dictionary.
Returns ``1`` if the key was present and *default_value* was not inserted,
or ``0`` if the key was not present and *default_value* was inserted.
On failure, returns ``-1``, sets an exception, and sets ``*result``
to ``NULL``.
For clarity: if you have a strong reference to *default_value* before
calling this function, then after it returns, you hold a strong reference
to both *default_value* and *\*result* (if it's not ``NULL``).
These may refer to the same object: in that case you hold two separate
references to it.
.. versionadded:: 3.13
.. c:function:: int PyDict_Pop(PyObject *p, PyObject *key, PyObject **result) .. c:function:: int PyDict_Pop(PyObject *p, PyObject *key, PyObject **result)
Remove *key* from dictionary *p* and optionally return the removed value. Remove *key* from dictionary *p* and optionally return the removed value.

View file

@ -105,7 +105,7 @@ Printing and clearing
parameters help format the warning message; they have the same meaning and parameters help format the warning message; they have the same meaning and
values as in :c:func:`PyUnicode_FromFormat`. values as in :c:func:`PyUnicode_FromFormat`.
``PyErr_WriteUnraisable(obj)`` is roughtly equivalent to ``PyErr_WriteUnraisable(obj)`` is roughtly equivalent to
``PyErr_FormatUnraisable("Exception ignored in: %R, obj)``. ``PyErr_FormatUnraisable("Exception ignored in: %R", obj)``.
If *format* is ``NULL``, only the traceback is printed. If *format* is ``NULL``, only the traceback is printed.
.. versionadded:: 3.13 .. versionadded:: 3.13
@ -180,7 +180,7 @@ For convenience, some of these functions will always return a
.. c:function:: PyObject* PyErr_SetFromErrno(PyObject *type) .. c:function:: PyObject* PyErr_SetFromErrno(PyObject *type)
.. index:: single: strerror() .. index:: single: strerror (C function)
This is a convenience function to raise an exception when a C library function This is a convenience function to raise an exception when a C library function
has returned an error and set the C variable :c:data:`errno`. It constructs a has returned an error and set the C variable :c:data:`errno`. It constructs a
@ -221,13 +221,14 @@ For convenience, some of these functions will always return a
.. c:function:: PyObject* PyErr_SetFromWindowsErr(int ierr) .. c:function:: PyObject* PyErr_SetFromWindowsErr(int ierr)
This is a convenience function to raise :exc:`WindowsError`. If called with This is a convenience function to raise :exc:`OSError`. If called with
*ierr* of ``0``, the error code returned by a call to :c:func:`!GetLastError` *ierr* of ``0``, the error code returned by a call to :c:func:`!GetLastError`
is used instead. It calls the Win32 function :c:func:`!FormatMessage` to retrieve is used instead. It calls the Win32 function :c:func:`!FormatMessage` to retrieve
the Windows description of error code given by *ierr* or :c:func:`!GetLastError`, the Windows description of error code given by *ierr* or :c:func:`!GetLastError`,
then it constructs a tuple object whose first item is the *ierr* value and whose then it constructs a :exc:`OSError` object with the :attr:`~OSError.winerror`
second item is the corresponding error message (gotten from attribute set to the error code, the :attr:`~OSError.strerror` attribute
:c:func:`!FormatMessage`), and then calls ``PyErr_SetObject(PyExc_WindowsError, set to the corresponding error message (gotten from
:c:func:`!FormatMessage`), and then calls ``PyErr_SetObject(PyExc_OSError,
object)``. This function always returns ``NULL``. object)``. This function always returns ``NULL``.
.. availability:: Windows. .. availability:: Windows.
@ -396,7 +397,7 @@ an error value).
.. c:function:: int PyErr_ResourceWarning(PyObject *source, Py_ssize_t stack_level, const char *format, ...) .. c:function:: int PyErr_ResourceWarning(PyObject *source, Py_ssize_t stack_level, const char *format, ...)
Function similar to :c:func:`PyErr_WarnFormat`, but *category* is Function similar to :c:func:`PyErr_WarnFormat`, but *category* is
:exc:`ResourceWarning` and it passes *source* to :func:`warnings.WarningMessage`. :exc:`ResourceWarning` and it passes *source* to :class:`!warnings.WarningMessage`.
.. versionadded:: 3.6 .. versionadded:: 3.6
@ -635,7 +636,7 @@ Signal Handling
.. index:: .. index::
pair: module; signal pair: module; signal
single: SIGINT single: SIGINT (C macro)
single: KeyboardInterrupt (built-in exception) single: KeyboardInterrupt (built-in exception)
This function interacts with Python's signal handling. This function interacts with Python's signal handling.
@ -666,7 +667,7 @@ Signal Handling
.. index:: .. index::
pair: module; signal pair: module; signal
single: SIGINT single: SIGINT (C macro)
single: KeyboardInterrupt (built-in exception) single: KeyboardInterrupt (built-in exception)
Simulate the effect of a :c:macro:`!SIGINT` signal arriving. Simulate the effect of a :c:macro:`!SIGINT` signal arriving.
@ -732,7 +733,7 @@ Exception Classes
This creates a class object derived from :exc:`Exception` (accessible in C as This creates a class object derived from :exc:`Exception` (accessible in C as
:c:data:`PyExc_Exception`). :c:data:`PyExc_Exception`).
The :attr:`__module__` attribute of the new class is set to the first part (up The :attr:`!__module__` attribute of the new class is set to the first part (up
to the last dot) of the *name* argument, and the class name is set to the last to the last dot) of the *name* argument, and the class name is set to the last
part (after the last dot). The *base* argument can be used to specify alternate part (after the last dot). The *base* argument can be used to specify alternate
base classes; it can either be only one class or a tuple of classes. The *dict* base classes; it can either be only one class or a tuple of classes. The *dict*
@ -904,8 +905,8 @@ because the :ref:`call protocol <call>` takes care of recursion handling.
Marks a point where a recursive C-level call is about to be performed. Marks a point where a recursive C-level call is about to be performed.
If :c:macro:`USE_STACKCHECK` is defined, this function checks if the OS If :c:macro:`!USE_STACKCHECK` is defined, this function checks if the OS
stack overflowed using :c:func:`PyOS_CheckStack`. In this is the case, it stack overflowed using :c:func:`PyOS_CheckStack`. If this is the case, it
sets a :exc:`MemoryError` and returns a nonzero value. sets a :exc:`MemoryError` and returns a nonzero value.
The function then checks if the recursion limit is reached. If this is the The function then checks if the recursion limit is reached. If this is the
@ -968,59 +969,59 @@ All standard Python exceptions are available as global variables whose names are
the variables: the variables:
.. index:: .. index::
single: PyExc_BaseException single: PyExc_BaseException (C var)
single: PyExc_Exception single: PyExc_Exception (C var)
single: PyExc_ArithmeticError single: PyExc_ArithmeticError (C var)
single: PyExc_AssertionError single: PyExc_AssertionError (C var)
single: PyExc_AttributeError single: PyExc_AttributeError (C var)
single: PyExc_BlockingIOError single: PyExc_BlockingIOError (C var)
single: PyExc_BrokenPipeError single: PyExc_BrokenPipeError (C var)
single: PyExc_BufferError single: PyExc_BufferError (C var)
single: PyExc_ChildProcessError single: PyExc_ChildProcessError (C var)
single: PyExc_ConnectionAbortedError single: PyExc_ConnectionAbortedError (C var)
single: PyExc_ConnectionError single: PyExc_ConnectionError (C var)
single: PyExc_ConnectionRefusedError single: PyExc_ConnectionRefusedError (C var)
single: PyExc_ConnectionResetError single: PyExc_ConnectionResetError (C var)
single: PyExc_EOFError single: PyExc_EOFError (C var)
single: PyExc_FileExistsError single: PyExc_FileExistsError (C var)
single: PyExc_FileNotFoundError single: PyExc_FileNotFoundError (C var)
single: PyExc_FloatingPointError single: PyExc_FloatingPointError (C var)
single: PyExc_GeneratorExit single: PyExc_GeneratorExit (C var)
single: PyExc_ImportError single: PyExc_ImportError (C var)
single: PyExc_IndentationError single: PyExc_IndentationError (C var)
single: PyExc_IndexError single: PyExc_IndexError (C var)
single: PyExc_InterruptedError single: PyExc_InterruptedError (C var)
single: PyExc_IsADirectoryError single: PyExc_IsADirectoryError (C var)
single: PyExc_KeyError single: PyExc_KeyError (C var)
single: PyExc_KeyboardInterrupt single: PyExc_KeyboardInterrupt (C var)
single: PyExc_LookupError single: PyExc_LookupError (C var)
single: PyExc_MemoryError single: PyExc_MemoryError (C var)
single: PyExc_ModuleNotFoundError single: PyExc_ModuleNotFoundError (C var)
single: PyExc_NameError single: PyExc_NameError (C var)
single: PyExc_NotADirectoryError single: PyExc_NotADirectoryError (C var)
single: PyExc_NotImplementedError single: PyExc_NotImplementedError (C var)
single: PyExc_OSError single: PyExc_OSError (C var)
single: PyExc_OverflowError single: PyExc_OverflowError (C var)
single: PyExc_PermissionError single: PyExc_PermissionError (C var)
single: PyExc_ProcessLookupError single: PyExc_ProcessLookupError (C var)
single: PyExc_RecursionError single: PyExc_RecursionError (C var)
single: PyExc_ReferenceError single: PyExc_ReferenceError (C var)
single: PyExc_RuntimeError single: PyExc_RuntimeError (C var)
single: PyExc_StopAsyncIteration single: PyExc_StopAsyncIteration (C var)
single: PyExc_StopIteration single: PyExc_StopIteration (C var)
single: PyExc_SyntaxError single: PyExc_SyntaxError (C var)
single: PyExc_SystemError single: PyExc_SystemError (C var)
single: PyExc_SystemExit single: PyExc_SystemExit (C var)
single: PyExc_TabError single: PyExc_TabError (C var)
single: PyExc_TimeoutError single: PyExc_TimeoutError (C var)
single: PyExc_TypeError single: PyExc_TypeError (C var)
single: PyExc_UnboundLocalError single: PyExc_UnboundLocalError (C var)
single: PyExc_UnicodeDecodeError single: PyExc_UnicodeDecodeError (C var)
single: PyExc_UnicodeEncodeError single: PyExc_UnicodeEncodeError (C var)
single: PyExc_UnicodeError single: PyExc_UnicodeError (C var)
single: PyExc_UnicodeTranslateError single: PyExc_UnicodeTranslateError (C var)
single: PyExc_ValueError single: PyExc_ValueError (C var)
single: PyExc_ZeroDivisionError single: PyExc_ZeroDivisionError (C var)
+-----------------------------------------+---------------------------------+----------+ +-----------------------------------------+---------------------------------+----------+
| C Name | Python Name | Notes | | C Name | Python Name | Notes |
@ -1151,18 +1152,18 @@ the variables:
These are compatibility aliases to :c:data:`PyExc_OSError`: These are compatibility aliases to :c:data:`PyExc_OSError`:
.. index:: .. index::
single: PyExc_EnvironmentError single: PyExc_EnvironmentError (C var)
single: PyExc_IOError single: PyExc_IOError (C var)
single: PyExc_WindowsError single: PyExc_WindowsError (C var)
+-------------------------------------+----------+ +-------------------------------------+----------+
| C Name | Notes | | C Name | Notes |
+=====================================+==========+ +=====================================+==========+
| :c:data:`PyExc_EnvironmentError` | | | :c:data:`!PyExc_EnvironmentError` | |
+-------------------------------------+----------+ +-------------------------------------+----------+
| :c:data:`PyExc_IOError` | | | :c:data:`!PyExc_IOError` | |
+-------------------------------------+----------+ +-------------------------------------+----------+
| :c:data:`PyExc_WindowsError` | [2]_ | | :c:data:`!PyExc_WindowsError` | [2]_ |
+-------------------------------------+----------+ +-------------------------------------+----------+
.. versionchanged:: 3.3 .. versionchanged:: 3.3
@ -1188,17 +1189,17 @@ names are ``PyExc_`` followed by the Python exception name. These have the type
the variables: the variables:
.. index:: .. index::
single: PyExc_Warning single: PyExc_Warning (C var)
single: PyExc_BytesWarning single: PyExc_BytesWarning (C var)
single: PyExc_DeprecationWarning single: PyExc_DeprecationWarning (C var)
single: PyExc_FutureWarning single: PyExc_FutureWarning (C var)
single: PyExc_ImportWarning single: PyExc_ImportWarning (C var)
single: PyExc_PendingDeprecationWarning single: PyExc_PendingDeprecationWarning (C var)
single: PyExc_ResourceWarning single: PyExc_ResourceWarning (C var)
single: PyExc_RuntimeWarning single: PyExc_RuntimeWarning (C var)
single: PyExc_SyntaxWarning single: PyExc_SyntaxWarning (C var)
single: PyExc_UnicodeWarning single: PyExc_UnicodeWarning (C var)
single: PyExc_UserWarning single: PyExc_UserWarning (C var)
+------------------------------------------+---------------------------------+----------+ +------------------------------------------+---------------------------------+----------+
| C Name | Python Name | Notes | | C Name | Python Name | Notes |

View file

@ -65,8 +65,14 @@ the :mod:`io` APIs instead.
Overrides the normal behavior of :func:`io.open_code` to pass its parameter Overrides the normal behavior of :func:`io.open_code` to pass its parameter
through the provided handler. through the provided handler.
The handler is a function of type :c:expr:`PyObject *(\*)(PyObject *path, The *handler* is a function of type:
void *userData)`, where *path* is guaranteed to be :c:type:`PyUnicodeObject`.
.. c:namespace:: NULL
.. c:type:: PyObject * (*Py_OpenCodeHookFunction)(PyObject *, void *)
Equivalent of :c:expr:`PyObject *(\*)(PyObject *path,
void *userData)`, where *path* is guaranteed to be
:c:type:`PyUnicodeObject`.
The *userData* pointer is passed into the hook function. Since hook The *userData* pointer is passed into the hook function. Since hook
functions may be called from different runtimes, this pointer should not functions may be called from different runtimes, this pointer should not
@ -90,7 +96,7 @@ the :mod:`io` APIs instead.
.. c:function:: int PyFile_WriteObject(PyObject *obj, PyObject *p, int flags) .. c:function:: int PyFile_WriteObject(PyObject *obj, PyObject *p, int flags)
.. index:: single: Py_PRINT_RAW .. index:: single: Py_PRINT_RAW (C macro)
Write object *obj* to file object *p*. The only supported flag for *flags* is Write object *obj* to file object *p*. The only supported flag for *flags* is
:c:macro:`Py_PRINT_RAW`; if given, the :func:`str` of the object is written :c:macro:`Py_PRINT_RAW`; if given, the :func:`str` of the object is written

View file

@ -83,10 +83,15 @@ rules:
.. versionadded:: 3.12 .. versionadded:: 3.12
.. c:function:: TYPE* PyObject_GC_Resize(TYPE, PyVarObject *op, Py_ssize_t newsize) .. c:macro:: PyObject_GC_Resize(TYPE, op, newsize)
Resize an object allocated by :c:macro:`PyObject_NewVar`. Returns the Resize an object allocated by :c:macro:`PyObject_NewVar`.
resized object or ``NULL`` on failure. *op* must not be tracked by the collector yet. Returns the resized object of type ``TYPE*`` (refers to any C type)
or ``NULL`` on failure.
*op* must be of type :c:expr:`PyVarObject *`
and must not be tracked by the collector yet.
*newsize* must be of type :c:type:`Py_ssize_t`.
.. c:function:: void PyObject_GC_Track(PyObject *op) .. c:function:: void PyObject_GC_Track(PyObject *op)

View file

@ -3,7 +3,7 @@
PyHash API PyHash API
---------- ----------
See also the :c:member:`PyTypeObject.tp_hash` member. See also the :c:member:`PyTypeObject.tp_hash` member and :ref:`numeric-hash`.
.. c:type:: Py_hash_t .. c:type:: Py_hash_t
@ -17,6 +17,29 @@ See also the :c:member:`PyTypeObject.tp_hash` member.
.. versionadded:: 3.2 .. versionadded:: 3.2
.. c:macro:: PyHASH_MODULUS
The `Mersenne prime <https://en.wikipedia.org/wiki/Mersenne_prime>`_ ``P = 2**n -1``, used for numeric hash scheme.
.. versionadded:: 3.13
.. c:macro:: PyHASH_BITS
The exponent ``n`` of ``P`` in :c:macro:`PyHASH_MODULUS`.
.. versionadded:: 3.13
.. c:macro:: PyHASH_INF
The hash value returned for a positive infinity.
.. versionadded:: 3.13
.. c:macro:: PyHASH_IMAG
The multiplier used for the imaginary part of a complex number.
.. versionadded:: 3.13
.. c:type:: PyHash_FuncDef .. c:type:: PyHash_FuncDef
@ -59,3 +82,14 @@ See also the :c:member:`PyTypeObject.tp_hash` member.
The function cannot fail: it cannot return ``-1``. The function cannot fail: it cannot return ``-1``.
.. versionadded:: 3.13 .. versionadded:: 3.13
.. c:function:: Py_hash_t PyObject_GenericHash(PyObject *obj)
Generic hashing function that is meant to be put into a type
object's ``tp_hash`` slot.
Its result only depends on the object's identity.
.. impl-detail::
In CPython, it is equivalent to :c:func:`Py_HashPointer`.
.. versionadded:: 3.13

View file

@ -13,20 +13,8 @@ Importing Modules
single: __all__ (package variable) single: __all__ (package variable)
single: modules (in module sys) single: modules (in module sys)
This is a simplified interface to :c:func:`PyImport_ImportModuleEx` below, This is a wrapper around :c:func:`PyImport_Import()` which takes a
leaving the *globals* and *locals* arguments set to ``NULL`` and *level* set :c:expr:`const char *` as an argument instead of a :c:expr:`PyObject *`.
to 0. When the *name*
argument contains a dot (when it specifies a submodule of a package), the
*fromlist* argument is set to the list ``['*']`` so that the return value is the
named module rather than the top-level package containing it as would otherwise
be the case. (Unfortunately, this has an additional side effect when *name* in
fact specifies a subpackage instead of a submodule: the submodules specified in
the package's ``__all__`` variable are loaded.) Return a new reference to the
imported module, or ``NULL`` with an exception set on failure. A failing
import of a module doesn't leave the module in :data:`sys.modules`.
This function always uses absolute imports.
.. c:function:: PyObject* PyImport_ImportModuleNoBlock(const char *name) .. c:function:: PyObject* PyImport_ImportModuleNoBlock(const char *name)
@ -320,7 +308,7 @@ Importing Modules
The module name, as an ASCII encoded string. The module name, as an ASCII encoded string.
.. c: member:: PyObject* (*initfunc)(void) .. c:member:: PyObject* (*initfunc)(void)
Initialization function for a module built into the interpreter. Initialization function for a module built into the interpreter.

View file

@ -29,6 +29,8 @@ The following functions can be safely called before Python is initialized:
* :c:func:`PyMem_SetAllocator` * :c:func:`PyMem_SetAllocator`
* :c:func:`PyMem_SetupDebugHooks` * :c:func:`PyMem_SetupDebugHooks`
* :c:func:`PyObject_SetArenaAllocator` * :c:func:`PyObject_SetArenaAllocator`
* :c:func:`Py_SetProgramName`
* :c:func:`Py_SetPythonHome`
* :c:func:`PySys_ResetWarnOptions` * :c:func:`PySys_ResetWarnOptions`
* Informative functions: * Informative functions:
@ -59,7 +61,7 @@ The following functions can be safely called before Python is initialized:
:c:func:`Py_Initialize`: :c:func:`Py_EncodeLocale`, :c:func:`Py_GetPath`, :c:func:`Py_Initialize`: :c:func:`Py_EncodeLocale`, :c:func:`Py_GetPath`,
:c:func:`Py_GetPrefix`, :c:func:`Py_GetExecPrefix`, :c:func:`Py_GetPrefix`, :c:func:`Py_GetExecPrefix`,
:c:func:`Py_GetProgramFullPath`, :c:func:`Py_GetPythonHome`, :c:func:`Py_GetProgramFullPath`, :c:func:`Py_GetPythonHome`,
and :c:func:`Py_GetProgramName`. :c:func:`Py_GetProgramName` and :c:func:`PyEval_InitThreads`.
.. _global-conf-vars: .. _global-conf-vars:
@ -326,13 +328,14 @@ Initializing and finalizing the interpreter
.. c:function:: void Py_Initialize() .. c:function:: void Py_Initialize()
.. index:: .. index::
single: PyEval_InitThreads()
single: modules (in module sys) single: modules (in module sys)
single: path (in module sys) single: path (in module sys)
pair: module; builtins pair: module; builtins
pair: module; __main__ pair: module; __main__
pair: module; sys pair: module; sys
triple: module; search; path triple: module; search; path
single: Py_FinalizeEx() single: Py_FinalizeEx (C function)
Initialize the Python interpreter. In an application embedding Python, Initialize the Python interpreter. In an application embedding Python,
this should be called before using any other Python/C API functions; see this should be called before using any other Python/C API functions; see
@ -425,6 +428,34 @@ Process-wide parameters
======================= =======================
.. c:function:: void Py_SetProgramName(const wchar_t *name)
.. index::
single: Py_Initialize()
single: main()
single: Py_GetPath()
This API is kept for backward compatibility: setting
:c:member:`PyConfig.program_name` should be used instead, see :ref:`Python
Initialization Configuration <init-config>`.
This function should be called before :c:func:`Py_Initialize` is called for
the first time, if it is called at all. It tells the interpreter the value
of the ``argv[0]`` argument to the :c:func:`main` function of the program
(converted to wide characters).
This is used by :c:func:`Py_GetPath` and some other functions below to find
the Python run-time libraries relative to the interpreter executable. The
default value is ``'python'``. The argument should point to a
zero-terminated wide character string in static storage whose contents will not
change for the duration of the program's execution. No code in the Python
interpreter will change the contents of this storage.
Use :c:func:`Py_DecodeLocale` to decode a bytes string to get a
:c:expr:`wchar_*` string.
.. deprecated:: 3.11
.. c:function:: wchar_t* Py_GetProgramName() .. c:function:: wchar_t* Py_GetProgramName()
Return the program name set with :c:member:`PyConfig.program_name`, or the default. Return the program name set with :c:member:`PyConfig.program_name`, or the default.
@ -626,6 +657,106 @@ Process-wide parameters
``sys.version``. ``sys.version``.
.. c:function:: void PySys_SetArgvEx(int argc, wchar_t **argv, int updatepath)
.. index::
single: main()
single: Py_FatalError()
single: argv (in module sys)
This API is kept for backward compatibility: setting
:c:member:`PyConfig.argv`, :c:member:`PyConfig.parse_argv` and
:c:member:`PyConfig.safe_path` should be used instead, see :ref:`Python
Initialization Configuration <init-config>`.
Set :data:`sys.argv` based on *argc* and *argv*. These parameters are
similar to those passed to the program's :c:func:`main` function with the
difference that the first entry should refer to the script file to be
executed rather than the executable hosting the Python interpreter. If there
isn't a script that will be run, the first entry in *argv* can be an empty
string. If this function fails to initialize :data:`sys.argv`, a fatal
condition is signalled using :c:func:`Py_FatalError`.
If *updatepath* is zero, this is all the function does. If *updatepath*
is non-zero, the function also modifies :data:`sys.path` according to the
following algorithm:
- If the name of an existing script is passed in ``argv[0]``, the absolute
path of the directory where the script is located is prepended to
:data:`sys.path`.
- Otherwise (that is, if *argc* is ``0`` or ``argv[0]`` doesn't point
to an existing file name), an empty string is prepended to
:data:`sys.path`, which is the same as prepending the current working
directory (``"."``).
Use :c:func:`Py_DecodeLocale` to decode a bytes string to get a
:c:expr:`wchar_*` string.
See also :c:member:`PyConfig.orig_argv` and :c:member:`PyConfig.argv`
members of the :ref:`Python Initialization Configuration <init-config>`.
.. note::
It is recommended that applications embedding the Python interpreter
for purposes other than executing a single script pass ``0`` as *updatepath*,
and update :data:`sys.path` themselves if desired.
See :cve:`2008-5983`.
On versions before 3.1.3, you can achieve the same effect by manually
popping the first :data:`sys.path` element after having called
:c:func:`PySys_SetArgv`, for example using::
PyRun_SimpleString("import sys; sys.path.pop(0)\n");
.. versionadded:: 3.1.3
.. XXX impl. doesn't seem consistent in allowing ``0``/``NULL`` for the params;
check w/ Guido.
.. deprecated:: 3.11
.. c:function:: void PySys_SetArgv(int argc, wchar_t **argv)
This API is kept for backward compatibility: setting
:c:member:`PyConfig.argv` and :c:member:`PyConfig.parse_argv` should be used
instead, see :ref:`Python Initialization Configuration <init-config>`.
This function works like :c:func:`PySys_SetArgvEx` with *updatepath* set
to ``1`` unless the :program:`python` interpreter was started with the
:option:`-I`.
Use :c:func:`Py_DecodeLocale` to decode a bytes string to get a
:c:expr:`wchar_*` string.
See also :c:member:`PyConfig.orig_argv` and :c:member:`PyConfig.argv`
members of the :ref:`Python Initialization Configuration <init-config>`.
.. versionchanged:: 3.4 The *updatepath* value depends on :option:`-I`.
.. deprecated:: 3.11
.. c:function:: void Py_SetPythonHome(const wchar_t *home)
This API is kept for backward compatibility: setting
:c:member:`PyConfig.home` should be used instead, see :ref:`Python
Initialization Configuration <init-config>`.
Set the default "home" directory, that is, the location of the standard
Python libraries. See :envvar:`PYTHONHOME` for the meaning of the
argument string.
The argument should point to a zero-terminated character string in static
storage whose contents will not change for the duration of the program's
execution. No code in the Python interpreter will change the contents of
this storage.
Use :c:func:`Py_DecodeLocale` to decode a bytes string to get a
:c:expr:`wchar_*` string.
.. deprecated:: 3.11
.. c:function:: wchar_t* Py_GetPythonHome() .. c:function:: wchar_t* Py_GetPythonHome()
Return the default "home", that is, the value set by Return the default "home", that is, the value set by
@ -661,7 +792,7 @@ operations could cause problems in a multi-threaded program: for example, when
two threads simultaneously increment the reference count of the same object, the two threads simultaneously increment the reference count of the same object, the
reference count could end up being incremented only once instead of twice. reference count could end up being incremented only once instead of twice.
.. index:: single: setswitchinterval() (in module sys) .. index:: single: setswitchinterval (in module sys)
Therefore, the rule exists that only the thread that has acquired the Therefore, the rule exists that only the thread that has acquired the
:term:`GIL` may operate on Python objects or call Python/C API functions. :term:`GIL` may operate on Python objects or call Python/C API functions.
@ -671,8 +802,7 @@ released around potentially blocking I/O operations like reading or writing
a file, so that other Python threads can run in the meantime. a file, so that other Python threads can run in the meantime.
.. index:: .. index::
single: PyThreadState single: PyThreadState (C type)
single: PyThreadState
The Python interpreter keeps some thread-specific bookkeeping information The Python interpreter keeps some thread-specific bookkeeping information
inside a data structure called :c:type:`PyThreadState`. There's also one inside a data structure called :c:type:`PyThreadState`. There's also one
@ -698,8 +828,8 @@ This is so common that a pair of macros exists to simplify it::
Py_END_ALLOW_THREADS Py_END_ALLOW_THREADS
.. index:: .. index::
single: Py_BEGIN_ALLOW_THREADS single: Py_BEGIN_ALLOW_THREADS (C macro)
single: Py_END_ALLOW_THREADS single: Py_END_ALLOW_THREADS (C macro)
The :c:macro:`Py_BEGIN_ALLOW_THREADS` macro opens a new block and declares a The :c:macro:`Py_BEGIN_ALLOW_THREADS` macro opens a new block and declares a
hidden local variable; the :c:macro:`Py_END_ALLOW_THREADS` macro closes the hidden local variable; the :c:macro:`Py_END_ALLOW_THREADS` macro closes the
@ -714,8 +844,8 @@ The block above expands to the following code::
PyEval_RestoreThread(_save); PyEval_RestoreThread(_save);
.. index:: .. index::
single: PyEval_RestoreThread() single: PyEval_RestoreThread (C function)
single: PyEval_SaveThread() single: PyEval_SaveThread (C function)
Here is how these functions work: the global interpreter lock is used to protect the pointer to the Here is how these functions work: the global interpreter lock is used to protect the pointer to the
current thread state. When releasing the lock and saving the thread state, current thread state. When releasing the lock and saving the thread state,
@ -842,6 +972,33 @@ code, or when embedding the Python interpreter:
This thread's interpreter state. This thread's interpreter state.
.. c:function:: void PyEval_InitThreads()
.. index::
single: PyEval_AcquireThread()
single: PyEval_ReleaseThread()
single: PyEval_SaveThread()
single: PyEval_RestoreThread()
Deprecated function which does nothing.
In Python 3.6 and older, this function created the GIL if it didn't exist.
.. versionchanged:: 3.9
The function now does nothing.
.. versionchanged:: 3.7
This function is now called by :c:func:`Py_Initialize()`, so you don't
have to call it yourself anymore.
.. versionchanged:: 3.2
This function cannot be called before :c:func:`Py_Initialize()` anymore.
.. deprecated:: 3.9
.. index:: pair: module; _thread
.. c:function:: PyThreadState* PyEval_SaveThread() .. c:function:: PyThreadState* PyEval_SaveThread()
Release the global interpreter lock (if it has been created) and reset the Release the global interpreter lock (if it has been created) and reset the
@ -1399,8 +1556,8 @@ function. You can create and destroy them using the following functions:
may be stored internally on the :c:type:`PyInterpreterState`. may be stored internally on the :c:type:`PyInterpreterState`.
.. index:: .. index::
single: Py_FinalizeEx() single: Py_FinalizeEx (C function)
single: Py_Initialize() single: Py_Initialize (C function)
Extension modules are shared between (sub-)interpreters as follows: Extension modules are shared between (sub-)interpreters as follows:
@ -1428,7 +1585,7 @@ function. You can create and destroy them using the following functions:
As with multi-phase initialization, this means that only C-level static As with multi-phase initialization, this means that only C-level static
and global variables are shared between these modules. and global variables are shared between these modules.
.. index:: single: close() (in module os) .. index:: single: close (in module os)
.. c:function:: PyThreadState* Py_NewInterpreter(void) .. c:function:: PyThreadState* Py_NewInterpreter(void)
@ -1451,7 +1608,7 @@ function. You can create and destroy them using the following functions:
.. c:function:: void Py_EndInterpreter(PyThreadState *tstate) .. c:function:: void Py_EndInterpreter(PyThreadState *tstate)
.. index:: single: Py_FinalizeEx() .. index:: single: Py_FinalizeEx (C function)
Destroy the (sub-)interpreter represented by the given thread state. Destroy the (sub-)interpreter represented by the given thread state.
The given thread state must be the current thread state. See the The given thread state must be the current thread state. See the
@ -1543,8 +1700,6 @@ pointer and a void pointer argument.
.. c:function:: int Py_AddPendingCall(int (*func)(void *), void *arg) .. c:function:: int Py_AddPendingCall(int (*func)(void *), void *arg)
.. index:: single: Py_AddPendingCall()
Schedule a function to be called from the main interpreter thread. On Schedule a function to be called from the main interpreter thread. On
success, ``0`` is returned and *func* is queued for being called in the success, ``0`` is returned and *func* is queued for being called in the
main thread. On failure, ``-1`` is returned without setting any exception. main thread. On failure, ``-1`` is returned without setting any exception.
@ -1578,14 +1733,14 @@ pointer and a void pointer argument.
function is generally **not** suitable for calling Python code from function is generally **not** suitable for calling Python code from
arbitrary C threads. Instead, use the :ref:`PyGILState API<gilstate>`. arbitrary C threads. Instead, use the :ref:`PyGILState API<gilstate>`.
.. versionadded:: 3.1
.. versionchanged:: 3.9 .. versionchanged:: 3.9
If this function is called in a subinterpreter, the function *func* is If this function is called in a subinterpreter, the function *func* is
now scheduled to be called from the subinterpreter, rather than being now scheduled to be called from the subinterpreter, rather than being
called from the main interpreter. Each subinterpreter now has its own called from the main interpreter. Each subinterpreter now has its own
list of scheduled calls. list of scheduled calls.
.. versionadded:: 3.1
.. _profiling: .. _profiling:
Profiling and Tracing Profiling and Tracing

View file

@ -148,7 +148,7 @@ complete listing.
worse performances (due to increased code size for example). The compiler is worse performances (due to increased code size for example). The compiler is
usually smarter than the developer for the cost/benefit analysis. usually smarter than the developer for the cost/benefit analysis.
If Python is :ref:`built in debug mode <debug-build>` (if the ``Py_DEBUG`` If Python is :ref:`built in debug mode <debug-build>` (if the :c:macro:`Py_DEBUG`
macro is defined), the :c:macro:`Py_ALWAYS_INLINE` macro does nothing. macro is defined), the :c:macro:`Py_ALWAYS_INLINE` macro does nothing.
It must be specified before the function return type. Usage:: It must be specified before the function return type. Usage::
@ -325,8 +325,8 @@ objects that reference each other here; for now, the solution
is "don't do that.") is "don't do that.")
.. index:: .. index::
single: Py_INCREF() single: Py_INCREF (C function)
single: Py_DECREF() single: Py_DECREF (C function)
Reference counts are always manipulated explicitly. The normal way is Reference counts are always manipulated explicitly. The normal way is
to use the macro :c:func:`Py_INCREF` to take a new reference to an to use the macro :c:func:`Py_INCREF` to take a new reference to an
@ -401,8 +401,8 @@ function, that function assumes that it now owns that reference, and you are not
responsible for it any longer. responsible for it any longer.
.. index:: .. index::
single: PyList_SetItem() single: PyList_SetItem (C function)
single: PyTuple_SetItem() single: PyTuple_SetItem (C function)
Few functions steal references; the two notable exceptions are Few functions steal references; the two notable exceptions are
:c:func:`PyList_SetItem` and :c:func:`PyTuple_SetItem`, which steal a reference :c:func:`PyList_SetItem` and :c:func:`PyTuple_SetItem`, which steal a reference
@ -491,8 +491,8 @@ using :c:func:`PySequence_GetItem` (which happens to take exactly the same
arguments), you do own a reference to the returned object. arguments), you do own a reference to the returned object.
.. index:: .. index::
single: PyList_GetItem() single: PyList_GetItem (C function)
single: PySequence_GetItem() single: PySequence_GetItem (C function)
Here is an example of how you could write a function that computes the sum of Here is an example of how you could write a function that computes the sum of
the items in a list of integers; once using :c:func:`PyList_GetItem`, and once the items in a list of integers; once using :c:func:`PyList_GetItem`, and once
@ -587,7 +587,7 @@ caller, then to the caller's caller, and so on, until they reach the top-level
interpreter, where they are reported to the user accompanied by a stack interpreter, where they are reported to the user accompanied by a stack
traceback. traceback.
.. index:: single: PyErr_Occurred() .. index:: single: PyErr_Occurred (C function)
For C programmers, however, error checking always has to be explicit. All For C programmers, however, error checking always has to be explicit. All
functions in the Python/C API can raise exceptions, unless an explicit claim is functions in the Python/C API can raise exceptions, unless an explicit claim is
@ -601,8 +601,8 @@ ambiguous return value, and require explicit testing for errors with
:c:func:`PyErr_Occurred`. These exceptions are always explicitly documented. :c:func:`PyErr_Occurred`. These exceptions are always explicitly documented.
.. index:: .. index::
single: PyErr_SetString() single: PyErr_SetString (C function)
single: PyErr_Clear() single: PyErr_Clear (C function)
Exception state is maintained in per-thread storage (this is equivalent to Exception state is maintained in per-thread storage (this is equivalent to
using global storage in an unthreaded application). A thread can be in one of using global storage in an unthreaded application). A thread can be in one of
@ -624,7 +624,7 @@ an exception is being passed on between C functions until it reaches the Python
bytecode interpreter's main loop, which takes care of transferring it to bytecode interpreter's main loop, which takes care of transferring it to
``sys.exc_info()`` and friends. ``sys.exc_info()`` and friends.
.. index:: single: exc_info() (in module sys) .. index:: single: exc_info (in module sys)
Note that starting with Python 1.5, the preferred, thread-safe way to access the Note that starting with Python 1.5, the preferred, thread-safe way to access the
exception state from Python code is to call the function :func:`sys.exc_info`, exception state from Python code is to call the function :func:`sys.exc_info`,
@ -709,9 +709,9 @@ Here is the corresponding C code, in all its glory::
.. index:: single: incr_item() .. index:: single: incr_item()
.. index:: .. index::
single: PyErr_ExceptionMatches() single: PyErr_ExceptionMatches (C function)
single: PyErr_Clear() single: PyErr_Clear (C function)
single: Py_XDECREF() single: Py_XDECREF (C function)
This example represents an endorsed use of the ``goto`` statement in C! This example represents an endorsed use of the ``goto`` statement in C!
It illustrates the use of :c:func:`PyErr_ExceptionMatches` and It illustrates the use of :c:func:`PyErr_ExceptionMatches` and
@ -735,7 +735,7 @@ the finalization, of the Python interpreter. Most functionality of the
interpreter can only be used after the interpreter has been initialized. interpreter can only be used after the interpreter has been initialized.
.. index:: .. index::
single: Py_Initialize() single: Py_Initialize (C function)
pair: module; builtins pair: module; builtins
pair: module; __main__ pair: module; __main__
pair: module; sys pair: module; sys
@ -770,10 +770,10 @@ environment variable :envvar:`PYTHONHOME`, or insert additional directories in
front of the standard path by setting :envvar:`PYTHONPATH`. front of the standard path by setting :envvar:`PYTHONPATH`.
.. index:: .. index::
single: Py_GetPath() single: Py_GetPath (C function)
single: Py_GetPrefix() single: Py_GetPrefix (C function)
single: Py_GetExecPrefix() single: Py_GetExecPrefix (C function)
single: Py_GetProgramFullPath() single: Py_GetProgramFullPath (C function)
The embedding application can steer the search by setting The embedding application can steer the search by setting
:c:member:`PyConfig.program_name` *before* calling :c:member:`PyConfig.program_name` *before* calling
@ -784,7 +784,7 @@ control has to provide its own implementation of :c:func:`Py_GetPath`,
:c:func:`Py_GetPrefix`, :c:func:`Py_GetExecPrefix`, and :c:func:`Py_GetPrefix`, :c:func:`Py_GetExecPrefix`, and
:c:func:`Py_GetProgramFullPath` (all defined in :file:`Modules/getpath.c`). :c:func:`Py_GetProgramFullPath` (all defined in :file:`Modules/getpath.c`).
.. index:: single: Py_IsInitialized() .. index:: single: Py_IsInitialized (C function)
Sometimes, it is desirable to "uninitialize" Python. For instance, the Sometimes, it is desirable to "uninitialize" Python. For instance, the
application may want to start over (make another call to application may want to start over (make another call to
@ -812,12 +812,14 @@ available that support tracing of reference counts, debugging the memory
allocator, or low-level profiling of the main interpreter loop. Only the most allocator, or low-level profiling of the main interpreter loop. Only the most
frequently used builds will be described in the remainder of this section. frequently used builds will be described in the remainder of this section.
Compiling the interpreter with the :c:macro:`Py_DEBUG` macro defined produces .. c:macro:: Py_DEBUG
Compiling the interpreter with the :c:macro:`!Py_DEBUG` macro defined produces
what is generally meant by :ref:`a debug build of Python <debug-build>`. what is generally meant by :ref:`a debug build of Python <debug-build>`.
:c:macro:`Py_DEBUG` is enabled in the Unix build by adding :c:macro:`!Py_DEBUG` is enabled in the Unix build by adding
:option:`--with-pydebug` to the :file:`./configure` command. :option:`--with-pydebug` to the :file:`./configure` command.
It is also implied by the presence of the It is also implied by the presence of the
not-Python-specific :c:macro:`_DEBUG` macro. When :c:macro:`Py_DEBUG` is enabled not-Python-specific :c:macro:`!_DEBUG` macro. When :c:macro:`!Py_DEBUG` is enabled
in the Unix build, compiler optimization is disabled. in the Unix build, compiler optimization is disabled.
In addition to the reference count debugging described below, extra checks are In addition to the reference count debugging described below, extra checks are
@ -832,4 +834,3 @@ after every statement run by the interpreter.)
Please refer to :file:`Misc/SpecialBuilds.txt` in the Python source distribution Please refer to :file:`Misc/SpecialBuilds.txt` in the Python source distribution
for more detailed information. for more detailed information.

View file

@ -56,13 +56,21 @@ List Objects
Similar to :c:func:`PyList_Size`, but without error checking. Similar to :c:func:`PyList_Size`, but without error checking.
.. c:function:: PyObject* PyList_GetItem(PyObject *list, Py_ssize_t index) .. c:function:: PyObject* PyList_GetItemRef(PyObject *list, Py_ssize_t index)
Return the object at position *index* in the list pointed to by *list*. The Return the object at position *index* in the list pointed to by *list*. The
position must be non-negative; indexing from the end of the list is not position must be non-negative; indexing from the end of the list is not
supported. If *index* is out of bounds (<0 or >=len(list)), supported. If *index* is out of bounds (:code:`<0 or >=len(list)`),
return ``NULL`` and set an :exc:`IndexError` exception. return ``NULL`` and set an :exc:`IndexError` exception.
.. versionadded:: 3.13
.. c:function:: PyObject* PyList_GetItem(PyObject *list, Py_ssize_t index)
Like :c:func:`PyList_GetItemRef`, but returns a
:term:`borrowed reference` instead of a :term:`strong reference`.
.. c:function:: PyObject* PyList_GET_ITEM(PyObject *list, Py_ssize_t i) .. c:function:: PyObject* PyList_GET_ITEM(PyObject *list, Py_ssize_t i)

View file

@ -113,11 +113,37 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
retrieved from the resulting value using :c:func:`PyLong_AsVoidPtr`. retrieved from the resulting value using :c:func:`PyLong_AsVoidPtr`.
.. c:function:: PyObject* PyLong_FromNativeBytes(const void* buffer, size_t n_bytes, int flags)
Create a Python integer from the value contained in the first *n_bytes* of
*buffer*, interpreted as a two's-complement signed number.
*flags* are as for :c:func:`PyLong_AsNativeBytes`. Passing ``-1`` will select
the native endian that CPython was compiled with and assume that the
most-significant bit is a sign bit. Passing
``Py_ASNATIVEBYTES_UNSIGNED_BUFFER`` will produce the same result as calling
:c:func:`PyLong_FromUnsignedNativeBytes`. Other flags are ignored.
.. versionadded:: 3.13
.. c:function:: PyObject* PyLong_FromUnsignedNativeBytes(const void* buffer, size_t n_bytes, int flags)
Create a Python integer from the value contained in the first *n_bytes* of
*buffer*, interpreted as an unsigned number.
*flags* are as for :c:func:`PyLong_AsNativeBytes`. Passing ``-1`` will select
the native endian that CPython was compiled with and assume that the
most-significant bit is not a sign bit. Flags other than endian are ignored.
.. versionadded:: 3.13
.. XXX alias PyLong_AS_LONG (for now) .. XXX alias PyLong_AS_LONG (for now)
.. c:function:: long PyLong_AsLong(PyObject *obj) .. c:function:: long PyLong_AsLong(PyObject *obj)
.. index:: .. index::
single: LONG_MAX single: LONG_MAX (C macro)
single: OverflowError (built-in exception) single: OverflowError (built-in exception)
Return a C :c:expr:`long` representation of *obj*. If *obj* is not an Return a C :c:expr:`long` representation of *obj*. If *obj* is not an
@ -210,7 +236,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. c:function:: Py_ssize_t PyLong_AsSsize_t(PyObject *pylong) .. c:function:: Py_ssize_t PyLong_AsSsize_t(PyObject *pylong)
.. index:: .. index::
single: PY_SSIZE_T_MAX single: PY_SSIZE_T_MAX (C macro)
single: OverflowError (built-in exception) single: OverflowError (built-in exception)
Return a C :c:type:`Py_ssize_t` representation of *pylong*. *pylong* must Return a C :c:type:`Py_ssize_t` representation of *pylong*. *pylong* must
@ -225,7 +251,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. c:function:: unsigned long PyLong_AsUnsignedLong(PyObject *pylong) .. c:function:: unsigned long PyLong_AsUnsignedLong(PyObject *pylong)
.. index:: .. index::
single: ULONG_MAX single: ULONG_MAX (C macro)
single: OverflowError (built-in exception) single: OverflowError (built-in exception)
Return a C :c:expr:`unsigned long` representation of *pylong*. *pylong* Return a C :c:expr:`unsigned long` representation of *pylong*. *pylong*
@ -241,7 +267,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. c:function:: size_t PyLong_AsSize_t(PyObject *pylong) .. c:function:: size_t PyLong_AsSize_t(PyObject *pylong)
.. index:: .. index::
single: SIZE_MAX single: SIZE_MAX (C macro)
single: OverflowError (built-in exception) single: OverflowError (built-in exception)
Return a C :c:type:`size_t` representation of *pylong*. *pylong* must be Return a C :c:type:`size_t` representation of *pylong*. *pylong* must be
@ -332,6 +358,142 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
Returns ``NULL`` on error. Use :c:func:`PyErr_Occurred` to disambiguate. Returns ``NULL`` on error. Use :c:func:`PyErr_Occurred` to disambiguate.
.. c:function:: Py_ssize_t PyLong_AsNativeBytes(PyObject *pylong, void* buffer, Py_ssize_t n_bytes, int flags)
Copy the Python integer value *pylong* to a native *buffer* of size
*n_bytes*. The *flags* can be set to ``-1`` to behave similarly to a C cast,
or to values documented below to control the behavior.
Returns ``-1`` with an exception raised on error. This may happen if
*pylong* cannot be interpreted as an integer, or if *pylong* was negative
and the ``Py_ASNATIVEBYTES_REJECT_NEGATIVE`` flag was set.
Otherwise, returns the number of bytes required to store the value.
If this is equal to or less than *n_bytes*, the entire value was copied.
All *n_bytes* of the buffer are written: large buffers are padded with
zeroes.
If the returned value is greater than than *n_bytes*, the value was
truncated: as many of the lowest bits of the value as could fit are written,
and the higher bits are ignored. This matches the typical behavior
of a C-style downcast.
.. note::
Overflow is not considered an error. If the returned value
is larger than *n_bytes*, most significant bits were discarded.
``0`` will never be returned.
Values are always copied as two's-complement.
Usage example::
int32_t value;
Py_ssize_t bytes = PyLong_AsNativeBits(pylong, &value, sizeof(value), -1);
if (bytes < 0) {
// Failed. A Python exception was set with the reason.
return NULL;
}
else if (bytes <= (Py_ssize_t)sizeof(value)) {
// Success!
}
else {
// Overflow occurred, but 'value' contains the truncated
// lowest bits of pylong.
}
Passing zero to *n_bytes* will return the size of a buffer that would
be large enough to hold the value. This may be larger than technically
necessary, but not unreasonably so.
.. note::
Passing *n_bytes=0* to this function is not an accurate way to determine
the bit length of a value.
If *n_bytes=0*, *buffer* may be ``NULL``.
To get at the entire Python value of an unknown size, the function can be
called twice: first to determine the buffer size, then to fill it::
// Ask how much space we need.
Py_ssize_t expected = PyLong_AsNativeBits(pylong, NULL, 0, -1);
if (expected < 0) {
// Failed. A Python exception was set with the reason.
return NULL;
}
assert(expected != 0); // Impossible per the API definition.
uint8_t *bignum = malloc(expected);
if (!bignum) {
PyErr_SetString(PyExc_MemoryError, "bignum malloc failed.");
return NULL;
}
// Safely get the entire value.
Py_ssize_t bytes = PyLong_AsNativeBits(pylong, bignum, expected, -1);
if (bytes < 0) { // Exception has been set.
free(bignum);
return NULL;
}
else if (bytes > expected) { // This should not be possible.
PyErr_SetString(PyExc_RuntimeError,
"Unexpected bignum truncation after a size check.");
free(bignum);
return NULL;
}
// The expected success given the above pre-check.
// ... use bignum ...
free(bignum);
*flags* is either ``-1`` (``Py_ASNATIVEBYTES_DEFAULTS``) to select defaults
that behave most like a C cast, or a combintation of the other flags in
the table below.
Note that ``-1`` cannot be combined with other flags.
Currently, ``-1`` corresponds to
``Py_ASNATIVEBYTES_NATIVE_ENDIAN | Py_ASNATIVEBYTES_UNSIGNED_BUFFER``.
============================================= ======
Flag Value
============================================= ======
.. c:macro:: Py_ASNATIVEBYTES_DEFAULTS ``-1``
.. c:macro:: Py_ASNATIVEBYTES_BIG_ENDIAN ``0``
.. c:macro:: Py_ASNATIVEBYTES_LITTLE_ENDIAN ``1``
.. c:macro:: Py_ASNATIVEBYTES_NATIVE_ENDIAN ``3``
.. c:macro:: Py_ASNATIVEBYTES_UNSIGNED_BUFFER ``4``
.. c:macro:: Py_ASNATIVEBYTES_REJECT_NEGATIVE ``8``
============================================= ======
Specifying ``Py_ASNATIVEBYTES_NATIVE_ENDIAN`` will override any other endian
flags. Passing ``2`` is reserved.
By default, sufficient buffer will be requested to include a sign bit.
For example, when converting 128 with *n_bytes=1*, the function will return
2 (or more) in order to store a zero sign bit.
If ``Py_ASNATIVEBYTES_UNSIGNED_BUFFER`` is specified, a zero sign bit
will be omitted from size calculations. This allows, for example, 128 to fit
in a single-byte buffer. If the destination buffer is later treated as
signed, a positive input value may become negative.
Note that the flag does not affect handling of negative values: for those,
space for a sign bit is always requested.
Specifying ``Py_ASNATIVEBYTES_REJECT_NEGATIVE`` causes an exception to be set
if *pylong* is negative. Without this flag, negative values will be copied
provided there is enough space for at least one sign bit, regardless of
whether ``Py_ASNATIVEBYTES_UNSIGNED_BUFFER`` was specified.
.. note::
With the default *flags* (``-1``, or *UNSIGNED_BUFFER* without
*REJECT_NEGATIVE*), multiple Python integers can map to a single value
without overflow. For example, both ``255`` and ``-1`` fit a single-byte
buffer and set all its bits.
This matches typical C cast behavior.
.. versionadded:: 3.13
.. c:function:: int PyUnstable_Long_IsCompact(const PyLongObject* op) .. c:function:: int PyUnstable_Long_IsCompact(const PyLongObject* op)
Return 1 if *op* is compact, 0 otherwise. Return 1 if *op* is compact, 0 otherwise.
@ -340,7 +502,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
a “fast path” for small integers. For compact values use a “fast path” for small integers. For compact values use
:c:func:`PyUnstable_Long_CompactValue`; for others fall back to a :c:func:`PyUnstable_Long_CompactValue`; for others fall back to a
:c:func:`PyLong_As* <PyLong_AsSize_t>` function or :c:func:`PyLong_As* <PyLong_AsSize_t>` function or
:c:func:`calling <PyObject_CallMethod>` :meth:`int.to_bytes`. :c:func:`PyLong_AsNativeBytes`.
The speedup is expected to be negligible for most users. The speedup is expected to be negligible for most users.

View file

@ -41,10 +41,10 @@ buffers is performed on demand by the Python memory manager through the Python/C
API functions listed in this document. API functions listed in this document.
.. index:: .. index::
single: malloc() single: malloc (C function)
single: calloc() single: calloc (C function)
single: realloc() single: realloc (C function)
single: free() single: free (C function)
To avoid memory corruption, extension writers should never try to operate on To avoid memory corruption, extension writers should never try to operate on
Python objects with the functions exported by the C library: :c:func:`malloc`, Python objects with the functions exported by the C library: :c:func:`malloc`,
@ -267,14 +267,14 @@ The following type-oriented macros are provided for convenience. Note that
.. c:macro:: PyMem_New(TYPE, n) .. c:macro:: PyMem_New(TYPE, n)
Same as :c:func:`PyMem_Malloc`, but allocates ``(n * sizeof(TYPE))`` bytes of Same as :c:func:`PyMem_Malloc`, but allocates ``(n * sizeof(TYPE))`` bytes of
memory. Returns a pointer cast to :c:expr:`TYPE*`. The memory will not have memory. Returns a pointer cast to ``TYPE*``. The memory will not have
been initialized in any way. been initialized in any way.
.. c:macro:: PyMem_Resize(p, TYPE, n) .. c:macro:: PyMem_Resize(p, TYPE, n)
Same as :c:func:`PyMem_Realloc`, but the memory block is resized to ``(n * Same as :c:func:`PyMem_Realloc`, but the memory block is resized to ``(n *
sizeof(TYPE))`` bytes. Returns a pointer cast to :c:expr:`TYPE*`. On return, sizeof(TYPE))`` bytes. Returns a pointer cast to ``TYPE*``. On return,
*p* will be a pointer to the new memory area, or ``NULL`` in the event of *p* will be a pointer to the new memory area, or ``NULL`` in the event of
failure. failure.

View file

@ -20,6 +20,17 @@ any other object.
read/write, otherwise it may be either read-only or read/write at the read/write, otherwise it may be either read-only or read/write at the
discretion of the exporter. discretion of the exporter.
.. c:macro:: PyBUF_READ
Flag to request a readonly buffer.
.. c:macro:: PyBUF_WRITE
Flag to request a writable buffer.
.. c:function:: PyObject *PyMemoryView_FromMemory(char *mem, Py_ssize_t size, int flags) .. c:function:: PyObject *PyMemoryView_FromMemory(char *mem, Py_ssize_t size, int flags)
Create a memoryview object using *mem* as the underlying buffer. Create a memoryview object using *mem* as the underlying buffer.
@ -41,6 +52,8 @@ any other object.
original memory. Otherwise, a copy is made and the memoryview points to a original memory. Otherwise, a copy is made and the memoryview points to a
new bytes object. new bytes object.
*buffertype* can be one of :c:macro:`PyBUF_READ` or :c:macro:`PyBUF_WRITE`.
.. c:function:: int PyMemoryView_Check(PyObject *obj) .. c:function:: int PyMemoryView_Check(PyObject *obj)

View file

@ -6,6 +6,55 @@ Object Protocol
=============== ===============
.. c:function:: PyObject* Py_GetConstant(unsigned int constant_id)
Get a :term:`strong reference` to a constant.
Set an exception and return ``NULL`` if *constant_id* is invalid.
*constant_id* must be one of these constant identifiers:
.. c:namespace:: NULL
======================================== ===== =========================
Constant Identifier Value Returned object
======================================== ===== =========================
.. c:macro:: Py_CONSTANT_NONE ``0`` :py:data:`None`
.. c:macro:: Py_CONSTANT_FALSE ``1`` :py:data:`False`
.. c:macro:: Py_CONSTANT_TRUE ``2`` :py:data:`True`
.. c:macro:: Py_CONSTANT_ELLIPSIS ``3`` :py:data:`Ellipsis`
.. c:macro:: Py_CONSTANT_NOT_IMPLEMENTED ``4`` :py:data:`NotImplemented`
.. c:macro:: Py_CONSTANT_ZERO ``5`` ``0``
.. c:macro:: Py_CONSTANT_ONE ``6`` ``1``
.. c:macro:: Py_CONSTANT_EMPTY_STR ``7`` ``''``
.. c:macro:: Py_CONSTANT_EMPTY_BYTES ``8`` ``b''``
.. c:macro:: Py_CONSTANT_EMPTY_TUPLE ``9`` ``()``
======================================== ===== =========================
Numeric values are only given for projects which cannot use the constant
identifiers.
.. versionadded:: 3.13
.. impl-detail::
In CPython, all of these constants are :term:`immortal`.
.. c:function:: PyObject* Py_GetConstantBorrowed(unsigned int constant_id)
Similar to :c:func:`Py_GetConstant`, but return a :term:`borrowed
reference`.
This function is primarily intended for backwards compatibility:
using :c:func:`Py_GetConstant` is recommended for new code.
The reference is borrowed from the interpreter, and is valid until the
interpreter finalization.
.. versionadded:: 3.13
.. c:var:: PyObject* Py_NotImplemented .. c:var:: PyObject* Py_NotImplemented
The ``NotImplemented`` singleton, used to signal that an operation is The ``NotImplemented`` singleton, used to signal that an operation is
@ -19,6 +68,14 @@ Object Protocol
to NotImplemented and return it). to NotImplemented and return it).
.. c:macro:: Py_PRINT_RAW
Flag to be used with multiple functions that print the object (like
:c:func:`PyObject_Print` and :c:func:`PyFile_WriteObject`).
If passed, these function would use the :func:`str` of the object
instead of the :func:`repr`.
.. c:function:: int PyObject_Print(PyObject *o, FILE *fp, int flags) .. c:function:: int PyObject_Print(PyObject *o, FILE *fp, int flags)
Print an object *o*, on file *fp*. Returns ``-1`` on error. The flags argument Print an object *o*, on file *fp*. Returns ``-1`` on error. The flags argument
@ -47,9 +104,8 @@ Object Protocol
.. c:function:: int PyObject_HasAttr(PyObject *o, PyObject *attr_name) .. c:function:: int PyObject_HasAttr(PyObject *o, PyObject *attr_name)
Returns ``1`` if *o* has the attribute *attr_name*, and ``0`` otherwise. This Returns ``1`` if *o* has the attribute *attr_name*, and ``0`` otherwise.
is equivalent to the Python expression ``hasattr(o, attr_name)``. This function This function always succeeds.
always succeeds.
.. note:: .. note::
@ -222,12 +278,8 @@ Object Protocol
.. c:function:: int PyObject_RichCompareBool(PyObject *o1, PyObject *o2, int opid) .. c:function:: int PyObject_RichCompareBool(PyObject *o1, PyObject *o2, int opid)
Compare the values of *o1* and *o2* using the operation specified by *opid*, Compare the values of *o1* and *o2* using the operation specified by *opid*,
which must be one of :c:macro:`Py_LT`, :c:macro:`Py_LE`, :c:macro:`Py_EQ`, like :c:func:`PyObject_RichCompare`, but returns ``-1`` on error, ``0`` if
:c:macro:`Py_NE`, :c:macro:`Py_GT`, or :c:macro:`Py_GE`, corresponding to ``<``, the result is false, ``1`` otherwise.
``<=``, ``==``, ``!=``, ``>``, or ``>=`` respectively. Returns ``-1`` on error,
``0`` if the result is false, ``1`` otherwise. This is the equivalent of the
Python expression ``o1 op o2``, where ``op`` is the operator corresponding to
*opid*.
.. note:: .. note::
If *o1* and *o2* are the same object, :c:func:`PyObject_RichCompareBool` If *o1* and *o2* are the same object, :c:func:`PyObject_RichCompareBool`

View file

@ -23,12 +23,12 @@ of Python objects.
Use the :c:func:`Py_SET_REFCNT()` function to set an object reference count. Use the :c:func:`Py_SET_REFCNT()` function to set an object reference count.
.. versionchanged:: 3.11
The parameter type is no longer :c:expr:`const PyObject*`.
.. versionchanged:: 3.10 .. versionchanged:: 3.10
:c:func:`Py_REFCNT()` is changed to the inline static function. :c:func:`Py_REFCNT()` is changed to the inline static function.
.. versionchanged:: 3.11
The parameter type is no longer :c:expr:`const PyObject*`.
.. c:function:: void Py_SET_REFCNT(PyObject *o, Py_ssize_t refcnt) .. c:function:: void Py_SET_REFCNT(PyObject *o, Py_ssize_t refcnt)

View file

@ -16,7 +16,7 @@ CPython's Application Binary Interface (ABI) is forward- and
backwards-compatible across a minor release (if these are compiled the same backwards-compatible across a minor release (if these are compiled the same
way; see :ref:`stable-abi-platform` below). way; see :ref:`stable-abi-platform` below).
So, code compiled for Python 3.10.0 will work on 3.10.8 and vice versa, So, code compiled for Python 3.10.0 will work on 3.10.8 and vice versa,
but will need to be compiled separately for 3.9.x and 3.10.x. but will need to be compiled separately for 3.9.x and 3.11.x.
There are two tiers of C API with different stability expectations: There are two tiers of C API with different stability expectations:

View file

@ -187,23 +187,23 @@ Implementing functions and methods
PyObject *kwargs); PyObject *kwargs);
.. c:type:: _PyCFunctionFast .. c:type:: PyCFunctionFast
Type of the functions used to implement Python callables in C Type of the functions used to implement Python callables in C
with signature :c:macro:`METH_FASTCALL`. with signature :c:macro:`METH_FASTCALL`.
The function signature is:: The function signature is::
PyObject *_PyCFunctionFast(PyObject *self, PyObject *PyCFunctionFast(PyObject *self,
PyObject *const *args, PyObject *const *args,
Py_ssize_t nargs); Py_ssize_t nargs);
.. c:type:: _PyCFunctionFastWithKeywords .. c:type:: PyCFunctionFastWithKeywords
Type of the functions used to implement Python callables in C Type of the functions used to implement Python callables in C
with signature :ref:`METH_FASTCALL | METH_KEYWORDS <METH_FASTCALL-METH_KEYWORDS>`. with signature :ref:`METH_FASTCALL | METH_KEYWORDS <METH_FASTCALL-METH_KEYWORDS>`.
The function signature is:: The function signature is::
PyObject *_PyCFunctionFastWithKeywords(PyObject *self, PyObject *PyCFunctionFastWithKeywords(PyObject *self,
PyObject *const *args, PyObject *const *args,
Py_ssize_t nargs, Py_ssize_t nargs,
PyObject *kwnames); PyObject *kwnames);
@ -290,7 +290,7 @@ There are these calling conventions:
.. c:macro:: METH_FASTCALL .. c:macro:: METH_FASTCALL
Fast calling convention supporting only positional arguments. Fast calling convention supporting only positional arguments.
The methods have the type :c:type:`_PyCFunctionFast`. The methods have the type :c:type:`PyCFunctionFast`.
The first parameter is *self*, the second parameter is a C array The first parameter is *self*, the second parameter is a C array
of :c:expr:`PyObject*` values indicating the arguments and the third of :c:expr:`PyObject*` values indicating the arguments and the third
parameter is the number of arguments (the length of the array). parameter is the number of arguments (the length of the array).
@ -306,7 +306,7 @@ There are these calling conventions:
:c:expr:`METH_FASTCALL | METH_KEYWORDS` :c:expr:`METH_FASTCALL | METH_KEYWORDS`
Extension of :c:macro:`METH_FASTCALL` supporting also keyword arguments, Extension of :c:macro:`METH_FASTCALL` supporting also keyword arguments,
with methods of type :c:type:`_PyCFunctionFastWithKeywords`. with methods of type :c:type:`PyCFunctionFastWithKeywords`.
Keyword arguments are passed the same way as in the Keyword arguments are passed the same way as in the
:ref:`vectorcall protocol <vectorcall>`: :ref:`vectorcall protocol <vectorcall>`:
there is an additional fourth :c:expr:`PyObject*` parameter there is an additional fourth :c:expr:`PyObject*` parameter
@ -399,6 +399,40 @@ definition with the same method name.
slot. This is helpful because calls to PyCFunctions are optimized more slot. This is helpful because calls to PyCFunctions are optimized more
than wrapper object calls. than wrapper object calls.
.. c:function:: PyObject * PyCMethod_New(PyMethodDef *ml, PyObject *self, PyObject *module, PyTypeObject *cls)
Turn *ml* into a Python :term:`callable` object.
The caller must ensure that *ml* outlives the :term:`callable`.
Typically, *ml* is defined as a static variable.
The *self* parameter will be passed as the *self* argument
to the C function in ``ml->ml_meth`` when invoked.
*self* can be ``NULL``.
The :term:`callable` object's ``__module__`` attribute
can be set from the given *module* argument.
*module* should be a Python string,
which will be used as name of the module the function is defined in.
If unavailable, it can be set to :const:`None` or ``NULL``.
.. seealso:: :attr:`function.__module__`
The *cls* parameter will be passed as the *defining_class*
argument to the C function.
Must be set if :c:macro:`METH_METHOD` is set on ``ml->ml_flags``.
.. versionadded:: 3.9
.. c:function:: PyObject * PyCFunction_NewEx(PyMethodDef *ml, PyObject *self, PyObject *module)
Equivalent to ``PyCMethod_New(ml, self, module, NULL)``.
.. c:function:: PyObject * PyCFunction_New(PyMethodDef *ml, PyObject *self)
Equivalent to ``PyCMethod_New(ml, self, NULL, NULL)``.
Accessing attributes of extension types Accessing attributes of extension types
--------------------------------------- ---------------------------------------
@ -517,19 +551,19 @@ The following flags can be used with :c:member:`PyMemberDef.flags`:
from ``PyObject``. from ``PyObject``.
Can only be used as part of :c:member:`Py_tp_members <PyTypeObject.tp_members>` Can only be used as part of :c:member:`Py_tp_members <PyTypeObject.tp_members>`
:c:type:`slot <PyTypeSlot>` when creating a class using negative :c:type:`slot <PyType_Slot>` when creating a class using negative
:c:member:`~PyType_Spec.basicsize`. :c:member:`~PyType_Spec.basicsize`.
It is mandatory in that case. It is mandatory in that case.
This flag is only used in :c:type:`PyTypeSlot`. This flag is only used in :c:type:`PyType_Slot`.
When setting :c:member:`~PyTypeObject.tp_members` during When setting :c:member:`~PyTypeObject.tp_members` during
class creation, Python clears it and sets class creation, Python clears it and sets
:c:member:`PyMemberDef.offset` to the offset from the ``PyObject`` struct. :c:member:`PyMemberDef.offset` to the offset from the ``PyObject`` struct.
.. index:: .. index::
single: READ_RESTRICTED single: READ_RESTRICTED (C macro)
single: WRITE_RESTRICTED single: WRITE_RESTRICTED (C macro)
single: RESTRICTED single: RESTRICTED (C macro)
.. versionchanged:: 3.10 .. versionchanged:: 3.10
@ -540,7 +574,7 @@ The following flags can be used with :c:member:`PyMemberDef.flags`:
:c:macro:`Py_AUDIT_READ`; :c:macro:`!WRITE_RESTRICTED` does nothing. :c:macro:`Py_AUDIT_READ`; :c:macro:`!WRITE_RESTRICTED` does nothing.
.. index:: .. index::
single: READONLY single: READONLY (C macro)
.. versionchanged:: 3.12 .. versionchanged:: 3.12
@ -603,24 +637,24 @@ Macro name C type Python type
Reading a ``NULL`` pointer raises :py:exc:`AttributeError`. Reading a ``NULL`` pointer raises :py:exc:`AttributeError`.
.. index:: .. index::
single: T_BYTE single: T_BYTE (C macro)
single: T_SHORT single: T_SHORT (C macro)
single: T_INT single: T_INT (C macro)
single: T_LONG single: T_LONG (C macro)
single: T_LONGLONG single: T_LONGLONG (C macro)
single: T_UBYTE single: T_UBYTE (C macro)
single: T_USHORT single: T_USHORT (C macro)
single: T_UINT single: T_UINT (C macro)
single: T_ULONG single: T_ULONG (C macro)
single: T_ULONGULONG single: T_ULONGULONG (C macro)
single: T_PYSSIZET single: T_PYSSIZET (C macro)
single: T_FLOAT single: T_FLOAT (C macro)
single: T_DOUBLE single: T_DOUBLE (C macro)
single: T_BOOL single: T_BOOL (C macro)
single: T_CHAR single: T_CHAR (C macro)
single: T_STRING single: T_STRING (C macro)
single: T_STRING_INPLACE single: T_STRING_INPLACE (C macro)
single: T_OBJECT_EX single: T_OBJECT_EX (C macro)
single: structmember.h single: structmember.h
.. versionadded:: 3.12 .. versionadded:: 3.12
@ -659,7 +693,8 @@ Defining Getters and Setters
.. c:member:: setter set .. c:member:: setter set
Optional C function to set or delete the attribute, if omitted the attribute is readonly. Optional C function to set or delete the attribute.
If ``NULL``, the attribute is read-only.
.. c:member:: const char* doc .. c:member:: const char* doc
@ -667,20 +702,20 @@ Defining Getters and Setters
.. c:member:: void* closure .. c:member:: void* closure
Optional function pointer, providing additional data for getter and setter. Optional user data pointer, providing additional data for getter and setter.
.. c:type:: PyObject *(*getter)(PyObject *, void *)
The ``get`` function takes one :c:expr:`PyObject*` parameter (the The ``get`` function takes one :c:expr:`PyObject*` parameter (the
instance) and a function pointer (the associated ``closure``):: instance) and a user data pointer (the associated ``closure``):
typedef PyObject *(*getter)(PyObject *, void *);
It should return a new reference on success or ``NULL`` with a set exception It should return a new reference on success or ``NULL`` with a set exception
on failure. on failure.
``set`` functions take two :c:expr:`PyObject*` parameters (the instance and .. c:type:: int (*setter)(PyObject *, PyObject *, void *)
the value to be set) and a function pointer (the associated ``closure``)::
typedef int (*setter)(PyObject *, PyObject *, void *); ``set`` functions take two :c:expr:`PyObject*` parameters (the instance and
the value to be set) and a user data pointer (the associated ``closure``):
In case the attribute should be deleted the second parameter is ``NULL``. In case the attribute should be deleted the second parameter is ``NULL``.
Should return ``0`` on success or ``-1`` with a set exception on failure. Should return ``0`` on success or ``-1`` with a set exception on failure.

View file

@ -5,6 +5,7 @@
Operating System Utilities Operating System Utilities
========================== ==========================
.. c:function:: PyObject* PyOS_FSPath(PyObject *path) .. c:function:: PyObject* PyOS_FSPath(PyObject *path)
Return the file system representation for *path*. If the object is a Return the file system representation for *path*. If the object is a
@ -97,27 +98,30 @@ Operating System Utilities
.. c:function:: int PyOS_CheckStack() .. c:function:: int PyOS_CheckStack()
.. index:: single: USE_STACKCHECK (C macro)
Return true when the interpreter runs out of stack space. This is a reliable Return true when the interpreter runs out of stack space. This is a reliable
check, but is only available when :c:macro:`USE_STACKCHECK` is defined (currently check, but is only available when :c:macro:`!USE_STACKCHECK` is defined (currently
on certain versions of Windows using the Microsoft Visual C++ compiler). on certain versions of Windows using the Microsoft Visual C++ compiler).
:c:macro:`USE_STACKCHECK` will be defined automatically; you should never :c:macro:`!USE_STACKCHECK` will be defined automatically; you should never
change the definition in your own code. change the definition in your own code.
.. c:type:: void (*PyOS_sighandler_t)(int)
.. c:function:: PyOS_sighandler_t PyOS_getsig(int i) .. c:function:: PyOS_sighandler_t PyOS_getsig(int i)
Return the current signal handler for signal *i*. This is a thin wrapper around Return the current signal handler for signal *i*. This is a thin wrapper around
either :c:func:`!sigaction` or :c:func:`!signal`. Do not call those functions either :c:func:`!sigaction` or :c:func:`!signal`. Do not call those functions
directly! :c:type:`PyOS_sighandler_t` is a typedef alias for :c:expr:`void directly!
(\*)(int)`.
.. c:function:: PyOS_sighandler_t PyOS_setsig(int i, PyOS_sighandler_t h) .. c:function:: PyOS_sighandler_t PyOS_setsig(int i, PyOS_sighandler_t h)
Set the signal handler for signal *i* to be *h*; return the old signal handler. Set the signal handler for signal *i* to be *h*; return the old signal handler.
This is a thin wrapper around either :c:func:`!sigaction` or :c:func:`!signal`. Do This is a thin wrapper around either :c:func:`!sigaction` or :c:func:`!signal`. Do
not call those functions directly! :c:type:`PyOS_sighandler_t` is a typedef not call those functions directly!
alias for :c:expr:`void (\*)(int)`.
.. c:function:: wchar_t* Py_DecodeLocale(const char* arg, size_t *size) .. c:function:: wchar_t* Py_DecodeLocale(const char* arg, size_t *size)
@ -342,10 +346,8 @@ accessible to C code. They all work with the current interpreter thread's
silently abort the operation by raising an error subclassed from silently abort the operation by raising an error subclassed from
:class:`Exception` (other errors will not be silenced). :class:`Exception` (other errors will not be silenced).
The hook function is of type :c:expr:`int (*)(const char *event, PyObject The hook function is always called with the GIL held by the Python
*args, void *userData)`, where *args* is guaranteed to be a interpreter that raised the event.
:c:type:`PyTupleObject`. The hook function is always called with the GIL
held by the Python interpreter that raised the event.
See :pep:`578` for a detailed description of auditing. Functions in the See :pep:`578` for a detailed description of auditing. Functions in the
runtime and standard library that raise events are listed in the runtime and standard library that raise events are listed in the
@ -354,12 +356,21 @@ accessible to C code. They all work with the current interpreter thread's
.. audit-event:: sys.addaudithook "" c.PySys_AddAuditHook .. audit-event:: sys.addaudithook "" c.PySys_AddAuditHook
If the interpreter is initialized, this function raises a auditing event If the interpreter is initialized, this function raises an auditing event
``sys.addaudithook`` with no arguments. If any existing hooks raise an ``sys.addaudithook`` with no arguments. If any existing hooks raise an
exception derived from :class:`Exception`, the new hook will not be exception derived from :class:`Exception`, the new hook will not be
added and the exception is cleared. As a result, callers cannot assume added and the exception is cleared. As a result, callers cannot assume
that their hook has been added unless they control all existing hooks. that their hook has been added unless they control all existing hooks.
.. c:namespace:: NULL
.. c:type:: int (*Py_AuditHookFunction) (const char *event, PyObject *args, void *userData)
The type of the hook function.
*event* is the C string event argument passed to :c:func:`PySys_Audit` or
:c:func:`PySys_AuditTuple`.
*args* is guaranteed to be a :c:type:`PyTupleObject`.
*userData* is the argument passed to PySys_AddAuditHook().
.. versionadded:: 3.8 .. versionadded:: 3.8
@ -371,7 +382,7 @@ Process Control
.. c:function:: void Py_FatalError(const char *message) .. c:function:: void Py_FatalError(const char *message)
.. index:: single: abort() .. index:: single: abort (C function)
Print a fatal error message and kill the process. No cleanup is performed. Print a fatal error message and kill the process. No cleanup is performed.
This function should only be invoked when a condition is detected that would This function should only be invoked when a condition is detected that would
@ -391,8 +402,8 @@ Process Control
.. c:function:: void Py_Exit(int status) .. c:function:: void Py_Exit(int status)
.. index:: .. index::
single: Py_FinalizeEx() single: Py_FinalizeEx (C function)
single: exit() single: exit (C function)
Exit the current process. This calls :c:func:`Py_FinalizeEx` and then calls the Exit the current process. This calls :c:func:`Py_FinalizeEx` and then calls the
standard C library function ``exit(status)``. If :c:func:`Py_FinalizeEx` standard C library function ``exit(status)``. If :c:func:`Py_FinalizeEx`
@ -405,7 +416,7 @@ Process Control
.. c:function:: int Py_AtExit(void (*func) ()) .. c:function:: int Py_AtExit(void (*func) ())
.. index:: .. index::
single: Py_FinalizeEx() single: Py_FinalizeEx (C function)
single: cleanup functions single: cleanup functions
Register a cleanup function to be called by :c:func:`Py_FinalizeEx`. The cleanup Register a cleanup function to be called by :c:func:`Py_FinalizeEx`. The cleanup

83
Doc/c-api/time.rst Normal file
View file

@ -0,0 +1,83 @@
.. highlight:: c
PyTime C API
============
.. versionadded:: 3.13
The clock C API provides access to system clocks.
It is similar to the Python :mod:`time` module.
For C API related to the :mod:`datetime` module, see :ref:`datetimeobjects`.
Types
-----
.. c:type:: PyTime_t
A timestamp or duration in nanoseconds, represented as a signed 64-bit
integer.
The reference point for timestamps depends on the clock used. For example,
:c:func:`PyTime_Time` returns timestamps relative to the UNIX epoch.
The supported range is around [-292.3 years; +292.3 years].
Using the Unix epoch (January 1st, 1970) as reference, the supported date
range is around [1677-09-21; 2262-04-11].
The exact limits are exposed as constants:
.. c:var:: PyTime_t PyTime_MIN
Minimum value of :c:type:`PyTime_t`.
.. c:var:: PyTime_t PyTime_MAX
Maximum value of :c:type:`PyTime_t`.
Clock Functions
---------------
The following functions take a pointer to a :c:expr:`PyTime_t` that they
set to the value of a particular clock.
Details of each clock are given in the documentation of the corresponding
Python function.
The functions return ``0`` on success, or ``-1`` (with an exception set)
on failure.
On integer overflow, they set the :c:data:`PyExc_OverflowError` exception and
set ``*result`` to the value clamped to the ``[PyTime_MIN; PyTime_MAX]``
range.
(On current systems, integer overflows are likely caused by misconfigured
system time.)
As any other C API (unless otherwise specified), the functions must be called
with the :term:`GIL` held.
.. c:function:: int PyTime_Monotonic(PyTime_t *result)
Read the monotonic clock.
See :func:`time.monotonic` for important details on this clock.
.. c:function:: int PyTime_PerfCounter(PyTime_t *result)
Read the performance counter.
See :func:`time.perf_counter` for important details on this clock.
.. c:function:: int PyTime_Time(PyTime_t *result)
Read the “wall clock” time.
See :func:`time.time` for details important on this clock.
Conversion functions
--------------------
.. c:function:: double PyTime_AsSecondsDouble(PyTime_t t)
Convert a timestamp to a number of seconds as a C :c:expr:`double`.
The function cannot fail, but note that :c:expr:`double` has limited
accuracy for large values.

View file

@ -59,6 +59,12 @@ Tuple Objects
Return the object at position *pos* in the tuple pointed to by *p*. If *pos* is Return the object at position *pos* in the tuple pointed to by *p*. If *pos* is
negative or out of bounds, return ``NULL`` and set an :exc:`IndexError` exception. negative or out of bounds, return ``NULL`` and set an :exc:`IndexError` exception.
The returned reference is borrowed from the tuple *p*
(that is: it is only valid as long as you hold a reference to *p*).
To get a :term:`strong reference`, use
:c:func:`Py_NewRef(PyTuple_GetItem(...)) <Py_NewRef>`
or :c:func:`PySequence_GetItem`.
.. c:function:: PyObject* PyTuple_GET_ITEM(PyObject *p, Py_ssize_t pos) .. c:function:: PyObject* PyTuple_GET_ITEM(PyObject *p, Py_ssize_t pos)

View file

@ -185,6 +185,21 @@ Type Objects
.. versionadded:: 3.11 .. versionadded:: 3.11
.. c:function:: PyObject* PyType_GetFullyQualifiedName(PyTypeObject *type)
Return the type's fully qualified name. Equivalent to
``f"{type.__module__}.{type.__qualname__}"``, or ``type.__qualname__`` if
``type.__module__`` is not a string or is equal to ``"builtins"``.
.. versionadded:: 3.13
.. c:function:: PyObject* PyType_GetModuleName(PyTypeObject *type)
Return the type's module name. Equivalent to getting the ``type.__module__``
attribute.
.. versionadded:: 3.13
.. c:function:: void* PyType_GetSlot(PyTypeObject *type, int slot) .. c:function:: void* PyType_GetSlot(PyTypeObject *type, int slot)
Return the function pointer stored in the given slot. If the Return the function pointer stored in the given slot. If the

View file

@ -883,6 +883,10 @@ and :c:data:`PyType_Type` effectively act as defaults.)
:c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash`, when the subtype's :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash`, when the subtype's
:c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash` are both ``NULL``. :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash` are both ``NULL``.
**Default:**
:c:data:`PyBaseObject_Type` uses :c:func:`PyObject_GenericHash`.
.. c:member:: ternaryfunc PyTypeObject.tp_call .. c:member:: ternaryfunc PyTypeObject.tp_call
@ -1030,7 +1034,8 @@ and :c:data:`PyType_Type` effectively act as defaults.)
the type, and the type object is INCREF'ed when a new instance is created, and the type, and the type object is INCREF'ed when a new instance is created, and
DECREF'ed when an instance is destroyed (this does not apply to instances of DECREF'ed when an instance is destroyed (this does not apply to instances of
subtypes; only the type referenced by the instance's ob_type gets INCREF'ed or subtypes; only the type referenced by the instance's ob_type gets INCREF'ed or
DECREF'ed). DECREF'ed). Heap types should also :ref:`support garbage collection <supporting-cycle-detection>`
as they can form a reference cycle with their own module object.
**Inheritance:** **Inheritance:**

View file

@ -518,6 +518,26 @@ APIs:
- :c:expr:`PyObject*` - :c:expr:`PyObject*`
- The result of calling :c:func:`PyObject_Repr`. - The result of calling :c:func:`PyObject_Repr`.
* - ``T``
- :c:expr:`PyObject*`
- Get the fully qualified name of an object type;
call :c:func:`PyType_GetFullyQualifiedName`.
* - ``#T``
- :c:expr:`PyObject*`
- Similar to ``T`` format, but use a colon (``:``) as separator between
the module name and the qualified name.
* - ``N``
- :c:expr:`PyTypeObject*`
- Get the fully qualified name of a type;
call :c:func:`PyType_GetFullyQualifiedName`.
* - ``#N``
- :c:expr:`PyTypeObject*`
- Similar to ``N`` format, but use a colon (``:``) as separator between
the module name and the qualified name.
.. note:: .. note::
The width formatter unit is number of characters rather than bytes. The width formatter unit is number of characters rather than bytes.
The precision formatter unit is number of bytes or :c:type:`wchar_t` The precision formatter unit is number of bytes or :c:type:`wchar_t`
@ -553,6 +573,9 @@ APIs:
In previous versions it caused all the rest of the format string to be In previous versions it caused all the rest of the format string to be
copied as-is to the result string, and any extra arguments discarded. copied as-is to the result string, and any extra arguments discarded.
.. versionchanged:: 3.13
Support for ``%T``, ``%#T``, ``%N`` and ``%#N`` formats added.
.. c:function:: PyObject* PyUnicode_FromFormatV(const char *format, va_list vargs) .. c:function:: PyObject* PyUnicode_FromFormatV(const char *format, va_list vargs)
@ -854,7 +877,12 @@ wchar_t Support
Copy the Unicode object contents into the :c:type:`wchar_t` buffer *wstr*. At most Copy the Unicode object contents into the :c:type:`wchar_t` buffer *wstr*. At most
*size* :c:type:`wchar_t` characters are copied (excluding a possibly trailing *size* :c:type:`wchar_t` characters are copied (excluding a possibly trailing
null termination character). Return the number of :c:type:`wchar_t` characters null termination character). Return the number of :c:type:`wchar_t` characters
copied or ``-1`` in case of an error. Note that the resulting :c:expr:`wchar_t*` copied or ``-1`` in case of an error.
When *wstr* is ``NULL``, instead return the *size* that would be required
to store all of *unicode* including a terminating null.
Note that the resulting :c:expr:`wchar_t*`
string may or may not be null-terminated. It is the responsibility of the caller string may or may not be null-terminated. It is the responsibility of the caller
to make sure that the :c:expr:`wchar_t*` string is null-terminated in case this is to make sure that the :c:expr:`wchar_t*` string is null-terminated in case this is
required by the application. Also, note that the :c:expr:`wchar_t*` string required by the application. Also, note that the :c:expr:`wchar_t*` string

View file

@ -20,4 +20,5 @@ and parsing function arguments and constructing Python values from C values.
hash.rst hash.rst
reflection.rst reflection.rst
codec.rst codec.rst
time.rst
perfmaps.rst perfmaps.rst

View file

@ -322,7 +322,7 @@ the same library that the Python runtime is using.
.. c:var:: int Py_eval_input .. c:var:: int Py_eval_input
.. index:: single: Py_CompileString() .. index:: single: Py_CompileString (C function)
The start symbol from the Python grammar for isolated expressions; for use with The start symbol from the Python grammar for isolated expressions; for use with
:c:func:`Py_CompileString`. :c:func:`Py_CompileString`.
@ -330,7 +330,7 @@ the same library that the Python runtime is using.
.. c:var:: int Py_file_input .. c:var:: int Py_file_input
.. index:: single: Py_CompileString() .. index:: single: Py_CompileString (C function)
The start symbol from the Python grammar for sequences of statements as read The start symbol from the Python grammar for sequences of statements as read
from a file or other source; for use with :c:func:`Py_CompileString`. This is from a file or other source; for use with :c:func:`Py_CompileString`. This is
@ -339,7 +339,7 @@ the same library that the Python runtime is using.
.. c:var:: int Py_single_input .. c:var:: int Py_single_input
.. index:: single: Py_CompileString() .. index:: single: Py_CompileString (C function)
The start symbol from the Python grammar for a single statement; for use with The start symbol from the Python grammar for a single statement; for use with
:c:func:`Py_CompileString`. This is the symbol used for the interactive :c:func:`Py_CompileString`. This is the symbol used for the interactive

View file

@ -6,10 +6,14 @@
# The contents of this file are pickled, so don't put values in the namespace # The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically). # that aren't pickleable (module imports are okay, they're removed automatically).
import sys, os, time import os
import sys
import time
sys.path.append(os.path.abspath('tools/extensions')) sys.path.append(os.path.abspath('tools/extensions'))
sys.path.append(os.path.abspath('includes')) sys.path.append(os.path.abspath('includes'))
from pyspecific import SOURCE_URI
# General configuration # General configuration
# --------------------- # ---------------------
@ -22,6 +26,7 @@
'pyspecific', 'pyspecific',
'sphinx.ext.coverage', 'sphinx.ext.coverage',
'sphinx.ext.doctest', 'sphinx.ext.doctest',
'sphinx.ext.extlinks',
] ]
# Skip if downstream redistributors haven't installed them # Skip if downstream redistributors haven't installed them
@ -55,13 +60,19 @@
# General substitutions. # General substitutions.
project = 'Python' project = 'Python'
copyright = '2001-%s, Python Software Foundation' % time.strftime('%Y') copyright = f"2001-{time.strftime('%Y')}, Python Software Foundation"
# We look for the Include/patchlevel.h file in the current Python source tree # We look for the Include/patchlevel.h file in the current Python source tree
# and replace the values accordingly. # and replace the values accordingly.
import patchlevel import patchlevel
version, release = patchlevel.get_version_info() version, release = patchlevel.get_version_info()
rst_epilog = f"""
.. |python_version_literal| replace:: ``Python {version}``
.. |python_x_dot_y_literal| replace:: ``python{version}``
.. |usr_local_bin_python_x_dot_y_literal| replace:: ``/usr/local/bin/python{version}``
"""
# There are two options for replacing |today|: either, you set today to some # There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used: # non-false value, then it is used:
today = '' today = ''
@ -74,6 +85,10 @@
# Minimum version of sphinx required # Minimum version of sphinx required
needs_sphinx = '4.2' needs_sphinx = '4.2'
# Create table of contents entries for domain objects (e.g. functions, classes,
# attributes, etc.). Default is True.
toc_object_entries = False
# Ignore any .rst files in the includes/ directory; # Ignore any .rst files in the includes/ directory;
# they're embedded in pages but not rendered individually. # they're embedded in pages but not rendered individually.
# Ignore any .rst files in the venv/ directory. # Ignore any .rst files in the venv/ directory.
@ -85,22 +100,34 @@
nitpick_ignore = [ nitpick_ignore = [
# Standard C functions # Standard C functions
('c:func', 'calloc'), ('c:func', 'calloc'),
('c:func', 'ctime'),
('c:func', 'dlopen'), ('c:func', 'dlopen'),
('c:func', 'exec'), ('c:func', 'exec'),
('c:func', 'fcntl'), ('c:func', 'fcntl'),
('c:func', 'flock'),
('c:func', 'fork'), ('c:func', 'fork'),
('c:func', 'free'), ('c:func', 'free'),
('c:func', 'gettimeofday'),
('c:func', 'gmtime'), ('c:func', 'gmtime'),
('c:func', 'grantpt'),
('c:func', 'ioctl'),
('c:func', 'localeconv'),
('c:func', 'localtime'), ('c:func', 'localtime'),
('c:func', 'main'), ('c:func', 'main'),
('c:func', 'malloc'), ('c:func', 'malloc'),
('c:func', 'mktime'),
('c:func', 'posix_openpt'),
('c:func', 'printf'), ('c:func', 'printf'),
('c:func', 'ptsname'),
('c:func', 'ptsname_r'),
('c:func', 'realloc'), ('c:func', 'realloc'),
('c:func', 'snprintf'), ('c:func', 'snprintf'),
('c:func', 'sprintf'), ('c:func', 'sprintf'),
('c:func', 'stat'), ('c:func', 'stat'),
('c:func', 'strftime'),
('c:func', 'system'), ('c:func', 'system'),
('c:func', 'time'), ('c:func', 'time'),
('c:func', 'unlockpt'),
('c:func', 'vsnprintf'), ('c:func', 'vsnprintf'),
# Standard C types # Standard C types
('c:type', 'FILE'), ('c:type', 'FILE'),
@ -119,11 +146,14 @@
('c:type', 'wchar_t'), ('c:type', 'wchar_t'),
('c:type', '__int64'), ('c:type', '__int64'),
('c:type', 'unsigned __int64'), ('c:type', 'unsigned __int64'),
('c:type', 'double'),
# Standard C structures # Standard C structures
('c:struct', 'in6_addr'), ('c:struct', 'in6_addr'),
('c:struct', 'in_addr'), ('c:struct', 'in_addr'),
('c:struct', 'stat'), ('c:struct', 'stat'),
('c:struct', 'statvfs'), ('c:struct', 'statvfs'),
('c:struct', 'timeval'),
('c:struct', 'timespec'),
# Standard C macros # Standard C macros
('c:macro', 'LLONG_MAX'), ('c:macro', 'LLONG_MAX'),
('c:macro', 'LLONG_MIN'), ('c:macro', 'LLONG_MIN'),
@ -250,15 +280,16 @@
('py:attr', '__annotations__'), ('py:attr', '__annotations__'),
('py:meth', '__missing__'), ('py:meth', '__missing__'),
('py:attr', '__wrapped__'), ('py:attr', '__wrapped__'),
('py:attr', 'decimal.Context.clamp'),
('py:meth', 'index'), # list.index, tuple.index, etc. ('py:meth', 'index'), # list.index, tuple.index, etc.
] ]
# gh-106948: Copy standard C types declared in the "c:type" domain to the # gh-106948: Copy standard C types declared in the "c:type" domain and C
# "c:identifier" domain, since "c:function" markup looks for types in the # structures declared in the "c:struct" domain to the "c:identifier" domain,
# "c:identifier" domain. Use list() to not iterate on items which are being # since "c:function" markup looks for types in the "c:identifier" domain. Use
# added # list() to not iterate on items which are being added
for role, name in list(nitpick_ignore): for role, name in list(nitpick_ignore):
if role == 'c:type': if role in ('c:type', 'c:struct'):
nitpick_ignore.append(('c:identifier', name)) nitpick_ignore.append(('c:identifier', name))
del role, name del role, name
@ -288,6 +319,9 @@
'root_include_title': False # We use the version switcher instead. 'root_include_title': False # We use the version switcher instead.
} }
if os.getenv("READTHEDOCS"):
html_theme_options["hosted_on"] = '<a href="https://about.readthedocs.com/">Read the Docs</a>'
# Override stylesheet fingerprinting for Windows CHM htmlhelp to fix GH-91207 # Override stylesheet fingerprinting for Windows CHM htmlhelp to fix GH-91207
# https://github.com/python/cpython/issues/91207 # https://github.com/python/cpython/issues/91207
if any('htmlhelp' in arg for arg in sys.argv): if any('htmlhelp' in arg for arg in sys.argv):
@ -296,7 +330,7 @@
print("It may be removed in the future\n") print("It may be removed in the future\n")
# Short title used e.g. for <title> HTML tags. # Short title used e.g. for <title> HTML tags.
html_short_title = '%s Documentation' % release html_short_title = f'{release} Documentation'
# Deployment preview information # Deployment preview information
# (See .readthedocs.yml and https://docs.readthedocs.io/en/stable/reference/environment-variables.html) # (See .readthedocs.yml and https://docs.readthedocs.io/en/stable/reference/environment-variables.html)
@ -345,12 +379,9 @@
latex_engine = 'xelatex' latex_engine = 'xelatex'
# Get LaTeX to handle Unicode correctly
latex_elements = { latex_elements = {
} # For the LaTeX preamble.
'preamble': r'''
# Additional stuff for the LaTeX preamble.
latex_elements['preamble'] = r'''
\authoraddress{ \authoraddress{
\sphinxstrong{Python Software Foundation}\\ \sphinxstrong{Python Software Foundation}\\
Email: \sphinxemail{docs@python.org} Email: \sphinxemail{docs@python.org}
@ -358,13 +389,12 @@
\let\Verbatim=\OriginalVerbatim \let\Verbatim=\OriginalVerbatim
\let\endVerbatim=\endOriginalVerbatim \let\endVerbatim=\endOriginalVerbatim
\setcounter{tocdepth}{2} \setcounter{tocdepth}{2}
''' ''',
# The paper size ('letter' or 'a4').
# The paper size ('letter' or 'a4'). 'papersize': 'a4',
latex_elements['papersize'] = 'a4' # The font size ('10pt', '11pt' or '12pt').
'pointsize': '10pt',
# The font size ('10pt', '11pt' or '12pt'). }
latex_elements['pointsize'] = '10pt'
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]). # (source start file, target name, title, author, document class [howto/manual]).
@ -427,9 +457,9 @@
# Regexes to find C items in the source files. # Regexes to find C items in the source files.
coverage_c_regexes = { coverage_c_regexes = {
'cfunction': (r'^PyAPI_FUNC\(.*\)\s+([^_][\w_]+)'), 'cfunction': r'^PyAPI_FUNC\(.*\)\s+([^_][\w_]+)',
'data': (r'^PyAPI_DATA\(.*\)\s+([^_][\w_]+)'), 'data': r'^PyAPI_DATA\(.*\)\s+([^_][\w_]+)',
'macro': (r'^#define ([^_][\w_]+)\(.*\)[\s|\\]'), 'macro': r'^#define ([^_][\w_]+)\(.*\)[\s|\\]',
} }
# The coverage checker will ignore all C items whose names match these regexes # The coverage checker will ignore all C items whose names match these regexes
@ -486,6 +516,19 @@
r'https://unix.org/version2/whatsnew/lp64_wp.html', r'https://unix.org/version2/whatsnew/lp64_wp.html',
] ]
# Options for sphinx.ext.extlinks
# -------------------------------
# This config is a dictionary of external sites,
# mapping unique short aliases to a base URL and a prefix.
# https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html
extlinks = {
"cve": ("https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-%s", "CVE-%s"),
"cwe": ("https://cwe.mitre.org/data/definitions/%s.html", "CWE-%s"),
"pypi": ("https://pypi.org/project/%s/", "%s"),
"source": (SOURCE_URI, "%s"),
}
extlinks_detect_hardcoded_links = True
# Options for extensions # Options for extensions
# ---------------------- # ----------------------

View file

@ -4,7 +4,7 @@ Copyright
Python and this documentation is: Python and this documentation is:
Copyright © 2001-2023 Python Software Foundation. All rights reserved. Copyright © 2001-2024 Python Software Foundation. All rights reserved.
Copyright © 2000 BeOpen.com. All rights reserved. Copyright © 2000 BeOpen.com. All rights reserved.

View file

@ -402,6 +402,21 @@ PyContextVar_Reset:int:::
PyContextVar_Reset:PyObject*:var:0: PyContextVar_Reset:PyObject*:var:0:
PyContextVar_Reset:PyObject*:token:-1: PyContextVar_Reset:PyObject*:token:-1:
PyCFunction_New:PyObject*::+1:
PyCFunction_New:PyMethodDef*:ml::
PyCFunction_New:PyObject*:self:+1:
PyCFunction_NewEx:PyObject*::+1:
PyCFunction_NewEx:PyMethodDef*:ml::
PyCFunction_NewEx:PyObject*:self:+1:
PyCFunction_NewEx:PyObject*:module:+1:
PyCMethod_New:PyObject*::+1:
PyCMethod_New:PyMethodDef*:ml::
PyCMethod_New:PyObject*:self:+1:
PyCMethod_New:PyObject*:module:+1:
PyCMethod_New:PyObject*:cls:+1:
PyDate_Check:int::: PyDate_Check:int:::
PyDate_Check:PyObject*:ob:0: PyDate_Check:PyObject*:ob:0:
@ -1118,6 +1133,10 @@ PyList_GetItem:PyObject*::0:
PyList_GetItem:PyObject*:list:0: PyList_GetItem:PyObject*:list:0:
PyList_GetItem:Py_ssize_t:index:: PyList_GetItem:Py_ssize_t:index::
PyList_GetItemRef:PyObject*::+1:
PyList_GetItemRef:PyObject*:list:0:
PyList_GetItemRef:Py_ssize_t:index::
PyList_GetSlice:PyObject*::+1: PyList_GetSlice:PyObject*::+1:
PyList_GetSlice:PyObject*:list:0: PyList_GetSlice:PyObject*:list:0:
PyList_GetSlice:Py_ssize_t:low:: PyList_GetSlice:Py_ssize_t:low::

View file

@ -42,6 +42,8 @@ function,PyBytes_Repr,3.2,,
function,PyBytes_Size,3.2,, function,PyBytes_Size,3.2,,
var,PyBytes_Type,3.2,, var,PyBytes_Type,3.2,,
type,PyCFunction,3.2,, type,PyCFunction,3.2,,
type,PyCFunctionFast,3.13,,
type,PyCFunctionFastWithKeywords,3.13,,
type,PyCFunctionWithKeywords,3.2,, type,PyCFunctionWithKeywords,3.2,,
function,PyCFunction_GetFlags,3.2,, function,PyCFunction_GetFlags,3.2,,
function,PyCFunction_GetFunction,3.2,, function,PyCFunction_GetFunction,3.2,,
@ -190,6 +192,7 @@ function,PyEval_GetFuncDesc,3.2,,
function,PyEval_GetFuncName,3.2,, function,PyEval_GetFuncName,3.2,,
function,PyEval_GetGlobals,3.2,, function,PyEval_GetGlobals,3.2,,
function,PyEval_GetLocals,3.2,, function,PyEval_GetLocals,3.2,,
function,PyEval_InitThreads,3.2,,
function,PyEval_ReleaseThread,3.2,, function,PyEval_ReleaseThread,3.2,,
function,PyEval_RestoreThread,3.2,, function,PyEval_RestoreThread,3.2,,
function,PyEval_SaveThread,3.2,, function,PyEval_SaveThread,3.2,,
@ -220,6 +223,7 @@ var,PyExc_GeneratorExit,3.2,,
var,PyExc_IOError,3.2,, var,PyExc_IOError,3.2,,
var,PyExc_ImportError,3.2,, var,PyExc_ImportError,3.2,,
var,PyExc_ImportWarning,3.2,, var,PyExc_ImportWarning,3.2,,
var,PyExc_IncompleteInputError,3.13,,
var,PyExc_IndentationError,3.2,, var,PyExc_IndentationError,3.2,,
var,PyExc_IndexError,3.2,, var,PyExc_IndexError,3.2,,
var,PyExc_InterruptedError,3.7,, var,PyExc_InterruptedError,3.7,,
@ -335,6 +339,7 @@ var,PyListRevIter_Type,3.2,,
function,PyList_Append,3.2,, function,PyList_Append,3.2,,
function,PyList_AsTuple,3.2,, function,PyList_AsTuple,3.2,,
function,PyList_GetItem,3.2,, function,PyList_GetItem,3.2,,
function,PyList_GetItemRef,3.13,,
function,PyList_GetSlice,3.2,, function,PyList_GetSlice,3.2,,
function,PyList_Insert,3.2,, function,PyList_Insert,3.2,,
function,PyList_New,3.2,, function,PyList_New,3.2,,
@ -613,6 +618,8 @@ function,PySys_FormatStdout,3.2,,
function,PySys_GetObject,3.2,, function,PySys_GetObject,3.2,,
function,PySys_GetXOptions,3.7,, function,PySys_GetXOptions,3.7,,
function,PySys_ResetWarnOptions,3.2,, function,PySys_ResetWarnOptions,3.2,,
function,PySys_SetArgv,3.2,,
function,PySys_SetArgvEx,3.2,,
function,PySys_SetObject,3.2,, function,PySys_SetObject,3.2,,
function,PySys_WriteStderr,3.2,, function,PySys_WriteStderr,3.2,,
function,PySys_WriteStdout,3.2,, function,PySys_WriteStdout,3.2,,
@ -673,7 +680,10 @@ function,PyType_FromSpecWithBases,3.3,,
function,PyType_GenericAlloc,3.2,, function,PyType_GenericAlloc,3.2,,
function,PyType_GenericNew,3.2,, function,PyType_GenericNew,3.2,,
function,PyType_GetFlags,3.2,, function,PyType_GetFlags,3.2,,
function,PyType_GetFullyQualifiedName,3.13,,
function,PyType_GetModule,3.10,, function,PyType_GetModule,3.10,,
function,PyType_GetModuleByDef,3.13,,
function,PyType_GetModuleName,3.13,,
function,PyType_GetModuleState,3.10,, function,PyType_GetModuleState,3.10,,
function,PyType_GetName,3.11,, function,PyType_GetName,3.11,,
function,PyType_GetQualName,3.11,, function,PyType_GetQualName,3.11,,
@ -832,6 +842,8 @@ function,Py_GenericAlias,3.9,,
var,Py_GenericAliasType,3.9,, var,Py_GenericAliasType,3.9,,
function,Py_GetBuildInfo,3.2,, function,Py_GetBuildInfo,3.2,,
function,Py_GetCompiler,3.2,, function,Py_GetCompiler,3.2,,
function,Py_GetConstant,3.13,,
function,Py_GetConstantBorrowed,3.13,,
function,Py_GetCopyright,3.2,, function,Py_GetCopyright,3.2,,
function,Py_GetExecPrefix,3.2,, function,Py_GetExecPrefix,3.2,,
function,Py_GetPath,3.2,, function,Py_GetPath,3.2,,
@ -859,6 +871,8 @@ function,Py_NewInterpreter,3.2,,
function,Py_NewRef,3.10,, function,Py_NewRef,3.10,,
function,Py_ReprEnter,3.2,, function,Py_ReprEnter,3.2,,
function,Py_ReprLeave,3.2,, function,Py_ReprLeave,3.2,,
function,Py_SetProgramName,3.2,,
function,Py_SetPythonHome,3.2,,
function,Py_SetRecursionLimit,3.2,, function,Py_SetRecursionLimit,3.2,,
type,Py_UCS4,3.2,, type,Py_UCS4,3.2,,
macro,Py_UNBLOCK_THREADS,3.2,, macro,Py_UNBLOCK_THREADS,3.2,,

View file

@ -547,7 +547,7 @@ reference count of an object and are safe in the presence of ``NULL`` pointers
(but note that *temp* will not be ``NULL`` in this context). More info on them (but note that *temp* will not be ``NULL`` in this context). More info on them
in section :ref:`refcounts`. in section :ref:`refcounts`.
.. index:: single: PyObject_CallObject() .. index:: single: PyObject_CallObject (C function)
Later, when it is time to call the function, you call the C function Later, when it is time to call the function, you call the C function
:c:func:`PyObject_CallObject`. This function has two arguments, both pointers to :c:func:`PyObject_CallObject`. This function has two arguments, both pointers to
@ -638,7 +638,7 @@ the above example, we use :c:func:`Py_BuildValue` to construct the dictionary. :
Extracting Parameters in Extension Functions Extracting Parameters in Extension Functions
============================================ ============================================
.. index:: single: PyArg_ParseTuple() .. index:: single: PyArg_ParseTuple (C function)
The :c:func:`PyArg_ParseTuple` function is declared as follows:: The :c:func:`PyArg_ParseTuple` function is declared as follows::
@ -730,7 +730,7 @@ Some example calls::
Keyword Parameters for Extension Functions Keyword Parameters for Extension Functions
========================================== ==========================================
.. index:: single: PyArg_ParseTupleAndKeywords() .. index:: single: PyArg_ParseTupleAndKeywords (C function)
The :c:func:`PyArg_ParseTupleAndKeywords` function is declared as follows:: The :c:func:`PyArg_ParseTupleAndKeywords` function is declared as follows::

View file

@ -89,8 +89,8 @@ If your type supports garbage collection, the destructor should call
} }
.. index:: .. index::
single: PyErr_Fetch() single: PyErr_Fetch (C function)
single: PyErr_Restore() single: PyErr_Restore (C function)
One important requirement of the deallocator function is that it leaves any One important requirement of the deallocator function is that it leaves any
pending exceptions alone. This is important since deallocators are frequently pending exceptions alone. This is important since deallocators are frequently

View file

@ -259,9 +259,11 @@ is evaluated in all cases.
Why isn't there a switch or case statement in Python? Why isn't there a switch or case statement in Python?
----------------------------------------------------- -----------------------------------------------------
You can do this easily enough with a sequence of ``if... elif... elif... else``. In general, structured switch statements execute one block of code
For literal values, or constants within a namespace, you can also use a when an expression has a particular value or set of values.
``match ... case`` statement. Since Python 3.10 one can easily match literal values, or constants
within a namespace, with a ``match ... case`` statement.
An older alternative is a sequence of ``if... elif... elif... else``.
For cases where you need to choose from a very large number of possibilities, For cases where you need to choose from a very large number of possibilities,
you can create a dictionary mapping case values to functions to call. For you can create a dictionary mapping case values to functions to call. For
@ -290,6 +292,9 @@ It's suggested that you use a prefix for the method names, such as ``visit_`` in
this example. Without such a prefix, if values are coming from an untrusted this example. Without such a prefix, if values are coming from an untrusted
source, an attacker would be able to call any method on your object. source, an attacker would be able to call any method on your object.
Imitating switch with fallthrough, as with C's switch-case-default,
is possible, much harder, and less needed.
Can't you emulate threads in the interpreter instead of relying on an OS-specific thread implementation? Can't you emulate threads in the interpreter instead of relying on an OS-specific thread implementation?
-------------------------------------------------------------------------------------------------------- --------------------------------------------------------------------------------------------------------
@ -451,7 +456,7 @@ on the key and a per-process seed; for example, ``'Python'`` could hash to
to ``1142331976``. The hash code is then used to calculate a location in an to ``1142331976``. The hash code is then used to calculate a location in an
internal array where the value will be stored. Assuming that you're storing internal array where the value will be stored. Assuming that you're storing
keys that all have different hash values, this means that dictionaries take keys that all have different hash values, this means that dictionaries take
constant time -- O(1), in Big-O notation -- to retrieve a key. constant time -- *O*\ (1), in Big-O notation -- to retrieve a key.
Why must dictionary keys be immutable? Why must dictionary keys be immutable?

View file

@ -50,7 +50,7 @@ to learn Python's C API.
If you need to interface to some C or C++ library for which no Python extension If you need to interface to some C or C++ library for which no Python extension
currently exists, you can try wrapping the library's data types and functions currently exists, you can try wrapping the library's data types and functions
with a tool such as `SWIG <https://www.swig.org>`_. `SIP with a tool such as `SWIG <https://www.swig.org>`_. `SIP
<https://riverbankcomputing.com/software/sip/intro>`__, `CXX <https://github.com/Python-SIP/sip>`__, `CXX
<https://cxx.sourceforge.net/>`_ `Boost <https://cxx.sourceforge.net/>`_ `Boost
<https://www.boost.org/libs/python/doc/index.html>`_, or `Weave <https://www.boost.org/libs/python/doc/index.html>`_, or `Weave
<https://github.com/scipy/weave>`_ are also <https://github.com/scipy/weave>`_ are also

View file

@ -133,8 +133,6 @@ Python versions are numbered "A.B.C" or "A.B":
changes. changes.
* *C* is the micro version number -- it is incremented for each bugfix release. * *C* is the micro version number -- it is incremented for each bugfix release.
See :pep:`6` for more information about bugfix releases.
Not all releases are bugfix releases. In the run-up to a new feature release, a Not all releases are bugfix releases. In the run-up to a new feature release, a
series of development releases are made, denoted as alpha, beta, or release series of development releases are made, denoted as alpha, beta, or release
candidate. Alphas are early releases in which interfaces aren't yet finalized; candidate. Alphas are early releases in which interfaces aren't yet finalized;
@ -157,7 +155,11 @@ unreleased versions, built directly from the CPython development repository. In
practice, after a final minor release is made, the version is incremented to the practice, after a final minor release is made, the version is incremented to the
next minor version, which becomes the "a0" version, e.g. "2.4a0". next minor version, which becomes the "a0" version, e.g. "2.4a0".
See also the documentation for :data:`sys.version`, :data:`sys.hexversion`, and See the `Developer's Guide
<https://devguide.python.org/developer-workflow/development-cycle/>`__
for more information about the development cycle, and
:pep:`387` to learn more about Python's backward compatibility policy. See also
the documentation for :data:`sys.version`, :data:`sys.hexversion`, and
:data:`sys.version_info`. :data:`sys.version_info`.

View file

@ -405,22 +405,37 @@ lists. When in doubt, use a mutex!
Can't we get rid of the Global Interpreter Lock? Can't we get rid of the Global Interpreter Lock?
------------------------------------------------ ------------------------------------------------
.. XXX link to dbeazley's talk about GIL?
The :term:`global interpreter lock` (GIL) is often seen as a hindrance to Python's The :term:`global interpreter lock` (GIL) is often seen as a hindrance to Python's
deployment on high-end multiprocessor server machines, because a multi-threaded deployment on high-end multiprocessor server machines, because a multi-threaded
Python program effectively only uses one CPU, due to the insistence that Python program effectively only uses one CPU, due to the insistence that
(almost) all Python code can only run while the GIL is held. (almost) all Python code can only run while the GIL is held.
Back in the days of Python 1.5, Greg Stein actually implemented a comprehensive With the approval of :pep:`703` work is now underway to remove the GIL from the
patch set (the "free threading" patches) that removed the GIL and replaced it CPython implementation of Python. Initially it will be implemented as an
with fine-grained locking. Adam Olsen recently did a similar experiment optional compiler flag when building the interpreter, and so separate
in his `python-safethread <https://code.google.com/archive/p/python-safethread>`_ builds will be available with and without the GIL. Long-term, the hope is
project. Unfortunately, both experiments exhibited a sharp drop in single-thread to settle on a single build, once the performance implications of removing the
performance (at least 30% slower), due to the amount of fine-grained locking GIL are fully understood. Python 3.13 is likely to be the first release
necessary to compensate for the removal of the GIL. containing this work, although it may not be completely functional in this
release.
This doesn't mean that you can't make good use of Python on multi-CPU machines! The current work to remove the GIL is based on a
`fork of Python 3.9 with the GIL removed <https://github.com/colesbury/nogil>`_
by Sam Gross.
Prior to that,
in the days of Python 1.5, Greg Stein actually implemented a comprehensive
patch set (the "free threading" patches) that removed the GIL and replaced it
with fine-grained locking. Adam Olsen did a similar experiment
in his `python-safethread <https://code.google.com/archive/p/python-safethread>`_
project. Unfortunately, both of these earlier experiments exhibited a sharp
drop in single-thread
performance (at least 30% slower), due to the amount of fine-grained locking
necessary to compensate for the removal of the GIL. The Python 3.9 fork
is the first attempt at removing the GIL with an acceptable performance
impact.
The presence of the GIL in current Python releases
doesn't mean that you can't make good use of Python on multi-CPU machines!
You just have to be creative with dividing the work up between multiple You just have to be creative with dividing the work up between multiple
*processes* rather than multiple *threads*. The *processes* rather than multiple *threads*. The
:class:`~concurrent.futures.ProcessPoolExecutor` class in the new :class:`~concurrent.futures.ProcessPoolExecutor` class in the new
@ -434,22 +449,13 @@ thread of execution is in the C code and allow other threads to get some work
done. Some standard library modules such as :mod:`zlib` and :mod:`hashlib` done. Some standard library modules such as :mod:`zlib` and :mod:`hashlib`
already do this. already do this.
It has been suggested that the GIL should be a per-interpreter-state lock rather An alternative approach to reducing the impact of the GIL is
than truly global; interpreters then wouldn't be able to share objects. to make the GIL a per-interpreter-state lock rather than truly global.
Unfortunately, this isn't likely to happen either. It would be a tremendous This was :ref:`first implemented in Python 3.12 <whatsnew312-pep684>` and is
amount of work, because many object implementations currently have global state. available in the C API. A Python interface to it is expected in Python 3.13.
For example, small integers and short strings are cached; these caches would The main limitation to it at the moment is likely to be 3rd party extension
have to be moved to the interpreter state. Other object types have their own modules, since these must be written with multiple interpreters in mind in
free list; these free lists would have to be moved to the interpreter state. order to be usable, so many older extension modules will not be usable.
And so on.
And I doubt that it can even be done in finite time, because the same problem
exists for 3rd party extensions. It is likely that 3rd party extensions are
being written at a faster rate than you can convert them to store all their
global state in the interpreter state.
And finally, once you have multiple interpreters not sharing any state, what
have you gained over running each interpreter in a separate process?
Input and Output Input and Output
@ -610,8 +616,7 @@ use ``p.read(n)``.
("ptys") instead of pipes. Or you can use a Python interface to Don Libes' ("ptys") instead of pipes. Or you can use a Python interface to Don Libes'
"expect" library. A Python extension that interfaces to expect is called "expect" library. A Python extension that interfaces to expect is called
"expy" and available from https://expectpy.sourceforge.net. A pure Python "expy" and available from https://expectpy.sourceforge.net. A pure Python
solution that works like expect is `pexpect solution that works like expect is :pypi:`pexpect`.
<https://pypi.org/project/pexpect/>`_.
How do I access the serial (RS232) port? How do I access the serial (RS232) port?
@ -619,7 +624,7 @@ How do I access the serial (RS232) port?
For Win32, OSX, Linux, BSD, Jython, IronPython: For Win32, OSX, Linux, BSD, Jython, IronPython:
https://pypi.org/project/pyserial/ :pypi:`pyserial`
For Unix, see a Usenet post by Mitch Chapman: For Unix, see a Usenet post by Mitch Chapman:

View file

@ -341,7 +341,7 @@ Glossary
docstring docstring
A string literal which appears as the first expression in a class, A string literal which appears as the first expression in a class,
function or module. While ignored when the suite is executed, it is function or module. While ignored when the suite is executed, it is
recognized by the compiler and put into the :attr:`__doc__` attribute recognized by the compiler and put into the :attr:`!__doc__` attribute
of the enclosing class, function or module. Since it is available via of the enclosing class, function or module. Since it is available via
introspection, it is the canonical place for documentation of the introspection, it is the canonical place for documentation of the
object. object.
@ -547,12 +547,12 @@ Glossary
tasks such as compression or hashing. Also, the GIL is always released tasks such as compression or hashing. Also, the GIL is always released
when doing I/O. when doing I/O.
Past efforts to create a "free-threaded" interpreter (one which locks As of Python 3.13, the GIL can be disabled using the :option:`--disable-gil`
shared data at a much finer granularity) have not been successful build configuration. After building Python with this option, code must be
because performance suffered in the common single-processor case. It run with :option:`-X gil 0 <-X>` or after setting the :envvar:`PYTHON_GIL=0 <PYTHON_GIL>`
is believed that overcoming this performance issue would make the environment variable. This feature enables improved performance for
implementation much more complicated and therefore costlier to maintain. multi-threaded applications and makes it easier to use multi-core CPUs
efficiently. For more details, see :pep:`703`.
hash-based pyc hash-based pyc
A bytecode cache file that uses the hash rather than the last-modified A bytecode cache file that uses the hash rather than the last-modified
@ -727,22 +727,10 @@ Glossary
thread removes *key* from *mapping* after the test, but before the lookup. thread removes *key* from *mapping* after the test, but before the lookup.
This issue can be solved with locks or by using the EAFP approach. This issue can be solved with locks or by using the EAFP approach.
locale encoding
On Unix, it is the encoding of the LC_CTYPE locale. It can be set with
:func:`locale.setlocale(locale.LC_CTYPE, new_locale) <locale.setlocale>`.
On Windows, it is the ANSI code page (ex: ``"cp1252"``).
On Android and VxWorks, Python uses ``"utf-8"`` as the locale encoding.
``locale.getencoding()`` can be used to get the locale encoding.
See also the :term:`filesystem encoding and error handler`.
list list
A built-in Python :term:`sequence`. Despite its name it is more akin A built-in Python :term:`sequence`. Despite its name it is more akin
to an array in other languages than to a linked list since access to to an array in other languages than to a linked list since access to
elements is O(1). elements is *O*\ (1).
list comprehension list comprehension
A compact way to process all or part of the elements in a sequence and A compact way to process all or part of the elements in a sequence and
@ -758,6 +746,18 @@ Glossary
:term:`finder`. See :pep:`302` for details and :term:`finder`. See :pep:`302` for details and
:class:`importlib.abc.Loader` for an :term:`abstract base class`. :class:`importlib.abc.Loader` for an :term:`abstract base class`.
locale encoding
On Unix, it is the encoding of the LC_CTYPE locale. It can be set with
:func:`locale.setlocale(locale.LC_CTYPE, new_locale) <locale.setlocale>`.
On Windows, it is the ANSI code page (ex: ``"cp1252"``).
On Android and VxWorks, Python uses ``"utf-8"`` as the locale encoding.
:func:`locale.getencoding` can be used to get the locale encoding.
See also the :term:`filesystem encoding and error handler`.
magic method magic method
.. index:: pair: magic; method .. index:: pair: magic; method
@ -800,8 +800,7 @@ Glossary
method resolution order method resolution order
Method Resolution Order is the order in which base classes are searched Method Resolution Order is the order in which base classes are searched
for a member during lookup. See `The Python 2.3 Method Resolution Order for a member during lookup. See :ref:`python_2.3_mro` for details of the
<https://www.python.org/download/releases/2.3/mro/>`_ for details of the
algorithm used by the Python interpreter since the 2.3 release. algorithm used by the Python interpreter since the 2.3 release.
module module
@ -841,10 +840,11 @@ Glossary
Some named tuples are built-in types (such as the above examples). Some named tuples are built-in types (such as the above examples).
Alternatively, a named tuple can be created from a regular class Alternatively, a named tuple can be created from a regular class
definition that inherits from :class:`tuple` and that defines named definition that inherits from :class:`tuple` and that defines named
fields. Such a class can be written by hand or it can be created with fields. Such a class can be written by hand, or it can be created by
the factory function :func:`collections.namedtuple`. The latter inheriting :class:`typing.NamedTuple`, or with the factory function
technique also adds some extra methods that may not be found in :func:`collections.namedtuple`. The latter techniques also add some
hand-written or built-in named tuples. extra methods that may not be found in hand-written or built-in named
tuples.
namespace namespace
The place where a variable is stored. Namespaces are implemented as The place where a variable is stored. Namespaces are implemented as
@ -1104,10 +1104,12 @@ Glossary
The :class:`collections.abc.Sequence` abstract base class The :class:`collections.abc.Sequence` abstract base class
defines a much richer interface that goes beyond just defines a much richer interface that goes beyond just
:meth:`~object.__getitem__` and :meth:`~object.__len__`, adding :meth:`~object.__getitem__` and :meth:`~object.__len__`, adding
:meth:`count`, :meth:`index`, :meth:`~object.__contains__`, and :meth:`!count`, :meth:`!index`, :meth:`~object.__contains__`, and
:meth:`~object.__reversed__`. Types that implement this expanded :meth:`~object.__reversed__`. Types that implement this expanded
interface can be registered explicitly using interface can be registered explicitly using
:func:`~abc.ABCMeta.register`. :func:`~abc.ABCMeta.register`. For more documentation on sequence
methods generally, see
:ref:`Common Sequence Operations <typesseq-common>`.
set comprehension set comprehension
A compact way to process all or part of the elements in an iterable and A compact way to process all or part of the elements in an iterable and

View file

@ -43,7 +43,7 @@ appearance---and the curses library will figure out what control codes
need to be sent to the terminal to produce the right output. curses need to be sent to the terminal to produce the right output. curses
doesn't provide many user-interface concepts such as buttons, checkboxes, doesn't provide many user-interface concepts such as buttons, checkboxes,
or dialogs; if you need such features, consider a user interface library such as or dialogs; if you need such features, consider a user interface library such as
`Urwid <https://pypi.org/project/urwid/>`_. :pypi:`Urwid`.
The curses library was originally written for BSD Unix; the later System V The curses library was originally written for BSD Unix; the later System V
versions of Unix from AT&T added many enhancements and new functions. BSD curses versions of Unix from AT&T added many enhancements and new functions. BSD curses
@ -56,8 +56,7 @@ versions of curses carried by some proprietary Unixes may not support
everything, though. everything, though.
The Windows version of Python doesn't include the :mod:`curses` The Windows version of Python doesn't include the :mod:`curses`
module. A ported version called `UniCurses module. A ported version called :pypi:`UniCurses` is available.
<https://pypi.org/project/UniCurses>`_ is available.
The Python curses module The Python curses module
@ -429,8 +428,7 @@ User Input
The C curses library offers only very simple input mechanisms. Python's The C curses library offers only very simple input mechanisms. Python's
:mod:`curses` module adds a basic text-input widget. (Other libraries :mod:`curses` module adds a basic text-input widget. (Other libraries
such as `Urwid <https://pypi.org/project/urwid/>`_ have more extensive such as :pypi:`Urwid` have more extensive collections of widgets.)
collections of widgets.)
There are two methods for getting input from a window: There are two methods for getting input from a window:

View file

@ -1,8 +1,8 @@
.. _descriptorhowto: .. _descriptorhowto:
====================== ================
Descriptor HowTo Guide Descriptor Guide
====================== ================
:Author: Raymond Hettinger :Author: Raymond Hettinger
:Contact: <python at rcn dot com> :Contact: <python at rcn dot com>
@ -1004,31 +1004,42 @@ here is a pure Python equivalent:
if doc is None and fget is not None: if doc is None and fget is not None:
doc = fget.__doc__ doc = fget.__doc__
self.__doc__ = doc self.__doc__ = doc
self._name = '' self._name = None
def __set_name__(self, owner, name): def __set_name__(self, owner, name):
self._name = name self._name = name
@property
def __name__(self):
return self._name if self._name is not None else self.fget.__name__
@__name__.setter
def __name__(self, value):
self._name = value
def __get__(self, obj, objtype=None): def __get__(self, obj, objtype=None):
if obj is None: if obj is None:
return self return self
if self.fget is None: if self.fget is None:
raise AttributeError( raise AttributeError(
f'property {self._name!r} of {type(obj).__name__!r} object has no getter' f'property {self.__name__!r} of {type(obj).__name__!r} '
'object has no getter'
) )
return self.fget(obj) return self.fget(obj)
def __set__(self, obj, value): def __set__(self, obj, value):
if self.fset is None: if self.fset is None:
raise AttributeError( raise AttributeError(
f'property {self._name!r} of {type(obj).__name__!r} object has no setter' f'property {self.__name__!r} of {type(obj).__name__!r} '
'object has no setter'
) )
self.fset(obj, value) self.fset(obj, value)
def __delete__(self, obj): def __delete__(self, obj):
if self.fdel is None: if self.fdel is None:
raise AttributeError( raise AttributeError(
f'property {self._name!r} of {type(obj).__name__!r} object has no deleter' f'property {self.__name__!r} of {type(obj).__name__!r} '
'object has no deleter'
) )
self.fdel(obj) self.fdel(obj)
@ -1192,6 +1203,10 @@ roughly equivalent to:
"Emulate method_getattro() in Objects/classobject.c" "Emulate method_getattro() in Objects/classobject.c"
return getattr(self.__func__, name) return getattr(self.__func__, name)
def __get__(self, obj, objtype=None):
"Emulate method_descr_get() in Objects/classobject.c"
return self
To support automatic creation of methods, functions include the To support automatic creation of methods, functions include the
:meth:`__get__` method for binding methods during attribute access. This :meth:`__get__` method for binding methods during attribute access. This
means that functions are non-data descriptors that return bound methods means that functions are non-data descriptors that return bound methods
@ -1214,8 +1229,20 @@ descriptor works in practice:
.. testcode:: .. testcode::
class D: class D:
def f(self, x): def f(self):
return x return self
class D2:
pass
.. doctest::
:hide:
>>> d = D()
>>> d2 = D2()
>>> d2.f = d.f.__get__(d2, D2)
>>> d2.f() is d
True
The function has a :term:`qualified name` attribute to support introspection: The function has a :term:`qualified name` attribute to support introspection:
@ -1250,7 +1277,7 @@ instance::
<function D.f at 0x00C45070> <function D.f at 0x00C45070>
>>> d.f.__self__ >>> d.f.__self__
<__main__.D object at 0x1012e1f98> <__main__.D object at 0x00B18C90>
If you have ever wondered where *self* comes from in regular methods or where If you have ever wondered where *self* comes from in regular methods or where
*cls* comes from in class methods, this is it! *cls* comes from in class methods, this is it!

View file

@ -497,13 +497,30 @@ the :meth:`~Enum.__repr__` omits the inherited class' name. For example::
>>> Creature.DOG >>> Creature.DOG
<Creature.DOG: size='medium', legs=4> <Creature.DOG: size='medium', legs=4>
Use the :func:`!dataclass` argument ``repr=False`` Use the :func:`~dataclasses.dataclass` argument ``repr=False``
to use the standard :func:`repr`. to use the standard :func:`repr`.
.. versionchanged:: 3.12 .. versionchanged:: 3.12
Only the dataclass fields are shown in the value area, not the dataclass' Only the dataclass fields are shown in the value area, not the dataclass'
name. name.
.. note::
Adding :func:`~dataclasses.dataclass` decorator to :class:`Enum`
and its subclasses is not supported. It will not raise any errors,
but it will produce very strange results at runtime, such as members
being equal to each other::
>>> @dataclass # don't do this: it does not make any sense
... class Color(Enum):
... RED = 1
... BLUE = 2
...
>>> Color.RED is Color.BLUE
False
>>> Color.RED == Color.BLUE # problem is here: they should not be equal
True
Pickling Pickling
-------- --------

449
Doc/howto/gdb_helpers.rst Normal file
View file

@ -0,0 +1,449 @@
.. _gdb:
=========================================================
Debugging C API extensions and CPython Internals with GDB
=========================================================
.. highlight:: none
This document explains how the Python GDB extension, ``python-gdb.py``, can
be used with the GDB debugger to debug CPython extensions and the
CPython interpreter itself.
When debugging low-level problems such as crashes or deadlocks, a low-level
debugger, such as GDB, is useful to diagnose and correct the issue.
By default, GDB (or any of its front-ends) doesn't support high-level
information specific to the CPython interpreter.
The ``python-gdb.py`` extension adds CPython interpreter information to GDB.
The extension helps introspect the stack of currently executing Python functions.
Given a Python object represented by a :c:expr:`PyObject *` pointer,
the extension surfaces the type and value of the object.
Developers who are working on CPython extensions or tinkering with parts
of CPython that are written in C can use this document to learn how to use the
``python-gdb.py`` extension with GDB.
.. note::
This document assumes that you are familiar with the basics of GDB and the
CPython C API. It consolidates guidance from the
`devguide <https://devguide.python.org>`_ and the
`Python wiki <https://wiki.python.org/moin/DebuggingWithGdb>`_.
Prerequisites
=============
You need to have:
- GDB 7 or later. (For earlier versions of GDB, see ``Misc/gdbinit`` in the
sources of Python 3.11 or earlier.)
- GDB-compatible debugging information for Python and any extension you are
debugging.
- The ``python-gdb.py`` extension.
The extension is built with Python, but might be distributed separately or
not at all. Below, we include tips for a few common systems as examples.
Note that even if the instructions match your system, they might be outdated.
Setup with Python built from source
-----------------------------------
When you build CPython from source, debugging information should be available,
and the build should add a ``python-gdb.py`` file to the root directory of
your repository.
To activate support, you must add the directory containing ``python-gdb.py``
to GDB's "auto-load-safe-path".
If you haven't done this, recent versions of GDB will print out a warning
with instructions on how to do this.
.. note::
If you do not see instructions for your version of GDB, put this in your
configuration file (``~/.gdbinit`` or ``~/.config/gdb/gdbinit``)::
add-auto-load-safe-path /path/to/cpython
You can also add multiple paths, separated by ``:``.
Setup for Python from a Linux distro
------------------------------------
Most Linux systems provide debug information for the system Python
in a package called ``python-debuginfo``, ``python-dbg`` or similar.
For example:
- Fedora:
.. code-block:: shell
sudo dnf install gdb
sudo dnf debuginfo-install python3
- Ubuntu:
.. code-block:: shell
sudo apt install gdb python3-dbg
On several recent Linux systems, GDB can download debugging symbols
automatically using *debuginfod*.
However, this will not install the ``python-gdb.py`` extension;
you generally do need to install the debug info package separately.
Using the Debug build and Development mode
==========================================
For easier debugging, you might want to:
- Use a :ref:`debug build <debug-build>` of Python. (When building from source,
use ``configure --with-pydebug``. On Linux distros, install and run a package
like ``python-debug`` or ``python-dbg``, if available.)
- Use the runtime :ref:`development mode <devmode>` (``-X dev``).
Both enable extra assertions and disable some optimizations.
Sometimes this hides the bug you are trying to find, but in most cases they
make the process easier.
Using the ``python-gdb`` extension
==================================
When the extension is loaded, it provides two main features:
pretty printers for Python values, and additional commands.
Pretty-printers
---------------
This is what a GDB backtrace looks like (truncated) when this extension is
enabled::
#0 0x000000000041a6b1 in PyObject_Malloc (nbytes=Cannot access memory at address 0x7fffff7fefe8
) at Objects/obmalloc.c:748
#1 0x000000000041b7c0 in _PyObject_DebugMallocApi (id=111 'o', nbytes=24) at Objects/obmalloc.c:1445
#2 0x000000000041b717 in _PyObject_DebugMalloc (nbytes=24) at Objects/obmalloc.c:1412
#3 0x000000000044060a in _PyUnicode_New (length=11) at Objects/unicodeobject.c:346
#4 0x00000000004466aa in PyUnicodeUCS2_DecodeUTF8Stateful (s=0x5c2b8d "__lltrace__", size=11, errors=0x0, consumed=
0x0) at Objects/unicodeobject.c:2531
#5 0x0000000000446647 in PyUnicodeUCS2_DecodeUTF8 (s=0x5c2b8d "__lltrace__", size=11, errors=0x0)
at Objects/unicodeobject.c:2495
#6 0x0000000000440d1b in PyUnicodeUCS2_FromStringAndSize (u=0x5c2b8d "__lltrace__", size=11)
at Objects/unicodeobject.c:551
#7 0x0000000000440d94 in PyUnicodeUCS2_FromString (u=0x5c2b8d "__lltrace__") at Objects/unicodeobject.c:569
#8 0x0000000000584abd in PyDict_GetItemString (v=
{'Yuck': <type at remote 0xad4730>, '__builtins__': <module at remote 0x7ffff7fd5ee8>, '__file__': 'Lib/test/crashers/nasty_eq_vs_dict.py', '__package__': None, 'y': <Yuck(i=0) at remote 0xaacd80>, 'dict': {0: 0, 1: 1, 2: 2, 3: 3}, '__cached__': None, '__name__': '__main__', 'z': <Yuck(i=0) at remote 0xaace60>, '__doc__': None}, key=
0x5c2b8d "__lltrace__") at Objects/dictobject.c:2171
Notice how the dictionary argument to ``PyDict_GetItemString`` is displayed
as its ``repr()``, rather than an opaque ``PyObject *`` pointer.
The extension works by supplying a custom printing routine for values of type
``PyObject *``. If you need to access lower-level details of an object, then
cast the value to a pointer of the appropriate type. For example::
(gdb) p globals
$1 = {'__builtins__': <module at remote 0x7ffff7fb1868>, '__name__':
'__main__', 'ctypes': <module at remote 0x7ffff7f14360>, '__doc__': None,
'__package__': None}
(gdb) p *(PyDictObject*)globals
$2 = {ob_refcnt = 3, ob_type = 0x3dbdf85820, ma_fill = 5, ma_used = 5,
ma_mask = 7, ma_table = 0x63d0f8, ma_lookup = 0x3dbdc7ea70
<lookdict_string>, ma_smalltable = {{me_hash = 7065186196740147912,
me_key = '__builtins__', me_value = <module at remote 0x7ffff7fb1868>},
{me_hash = -368181376027291943, me_key = '__name__',
me_value ='__main__'}, {me_hash = 0, me_key = 0x0, me_value = 0x0},
{me_hash = 0, me_key = 0x0, me_value = 0x0},
{me_hash = -9177857982131165996, me_key = 'ctypes',
me_value = <module at remote 0x7ffff7f14360>},
{me_hash = -8518757509529533123, me_key = '__doc__', me_value = None},
{me_hash = 0, me_key = 0x0, me_value = 0x0}, {
me_hash = 6614918939584953775, me_key = '__package__', me_value = None}}}
Note that the pretty-printers do not actually call ``repr()``.
For basic types, they try to match its result closely.
An area that can be confusing is that the custom printer for some types look a
lot like GDB's built-in printer for standard types. For example, the
pretty-printer for a Python ``int`` (:c:expr:`PyLongObject *`)
gives a representation that is not distinguishable from one of a
regular machine-level integer::
(gdb) p some_machine_integer
$3 = 42
(gdb) p some_python_integer
$4 = 42
The internal structure can be revealed with a cast to :c:expr:`PyLongObject *`:
(gdb) p *(PyLongObject*)some_python_integer
$5 = {ob_base = {ob_base = {ob_refcnt = 8, ob_type = 0x3dad39f5e0}, ob_size = 1},
ob_digit = {42}}
A similar confusion can arise with the ``str`` type, where the output looks a
lot like gdb's built-in printer for ``char *``::
(gdb) p ptr_to_python_str
$6 = '__builtins__'
The pretty-printer for ``str`` instances defaults to using single-quotes (as
does Python's ``repr`` for strings) whereas the standard printer for ``char *``
values uses double-quotes and contains a hexadecimal address::
(gdb) p ptr_to_char_star
$7 = 0x6d72c0 "hello world"
Again, the implementation details can be revealed with a cast to
:c:expr:`PyUnicodeObject *`::
(gdb) p *(PyUnicodeObject*)$6
$8 = {ob_base = {ob_refcnt = 33, ob_type = 0x3dad3a95a0}, length = 12,
str = 0x7ffff2128500, hash = 7065186196740147912, state = 1, defenc = 0x0}
``py-list``
-----------
The extension adds a ``py-list`` command, which
lists the Python source code (if any) for the current frame in the selected
thread. The current line is marked with a ">"::
(gdb) py-list
901 if options.profile:
902 options.profile = False
903 profile_me()
904 return
905
>906 u = UI()
907 if not u.quit:
908 try:
909 gtk.main()
910 except KeyboardInterrupt:
911 # properly quit on a keyboard interrupt...
Use ``py-list START`` to list at a different line number within the Python
source, and ``py-list START,END`` to list a specific range of lines within
the Python source.
``py-up`` and ``py-down``
-------------------------
The ``py-up`` and ``py-down`` commands are analogous to GDB's regular ``up``
and ``down`` commands, but try to move at the level of CPython frames, rather
than C frames.
GDB is not always able to read the relevant frame information, depending on
the optimization level with which CPython was compiled. Internally, the
commands look for C frames that are executing the default frame evaluation
function (that is, the core bytecode interpreter loop within CPython) and
look up the value of the related ``PyFrameObject *``.
They emit the frame number (at the C level) within the thread.
For example::
(gdb) py-up
#37 Frame 0x9420b04, for file /usr/lib/python2.6/site-packages/
gnome_sudoku/main.py, line 906, in start_game ()
u = UI()
(gdb) py-up
#40 Frame 0x948e82c, for file /usr/lib/python2.6/site-packages/
gnome_sudoku/gnome_sudoku.py, line 22, in start_game(main=<module at remote 0xb771b7f4>)
main.start_game()
(gdb) py-up
Unable to find an older python frame
so we're at the top of the Python stack.
The frame numbers correspond to those displayed by GDB's standard
``backtrace`` command.
The command skips C frames which are not executing Python code.
Going back down::
(gdb) py-down
#37 Frame 0x9420b04, for file /usr/lib/python2.6/site-packages/gnome_sudoku/main.py, line 906, in start_game ()
u = UI()
(gdb) py-down
#34 (unable to read python frame information)
(gdb) py-down
#23 (unable to read python frame information)
(gdb) py-down
#19 (unable to read python frame information)
(gdb) py-down
#14 Frame 0x99262ac, for file /usr/lib/python2.6/site-packages/gnome_sudoku/game_selector.py, line 201, in run_swallowed_dialog (self=<NewOrSavedGameSelector(new_game_model=<gtk.ListStore at remote 0x98fab44>, puzzle=None, saved_games=[{'gsd.auto_fills': 0, 'tracking': {}, 'trackers': {}, 'notes': [], 'saved_at': 1270084485, 'game': '7 8 0 0 0 0 0 5 6 0 0 9 0 8 0 1 0 0 0 4 6 0 0 0 0 7 0 6 5 0 0 0 4 7 9 2 0 0 0 9 0 1 0 0 0 3 9 7 6 0 0 0 1 8 0 6 0 0 0 0 2 8 0 0 0 5 0 4 0 6 0 0 2 1 0 0 0 0 0 4 5\n7 8 0 0 0 0 0 5 6 0 0 9 0 8 0 1 0 0 0 4 6 0 0 0 0 7 0 6 5 1 8 3 4 7 9 2 0 0 0 9 0 1 0 0 0 3 9 7 6 0 0 0 1 8 0 6 0 0 0 0 2 8 0 0 0 5 0 4 0 6 0 0 2 1 0 0 0 0 0 4 5', 'gsd.impossible_hints': 0, 'timer.__absolute_start_time__': <float at remote 0x984b474>, 'gsd.hints': 0, 'timer.active_time': <float at remote 0x984b494>, 'timer.total_time': <float at remote 0x984b464>}], dialog=<gtk.Dialog at remote 0x98faaa4>, saved_game_model=<gtk.ListStore at remote 0x98fad24>, sudoku_maker=<SudokuMaker(terminated=False, played=[], batch_siz...(truncated)
swallower.run_dialog(self.dialog)
(gdb) py-down
#11 Frame 0x9aead74, for file /usr/lib/python2.6/site-packages/gnome_sudoku/dialog_swallower.py, line 48, in run_dialog (self=<SwappableArea(running=<gtk.Dialog at remote 0x98faaa4>, main_page=0) at remote 0x98fa6e4>, d=<gtk.Dialog at remote 0x98faaa4>)
gtk.main()
(gdb) py-down
#8 (unable to read python frame information)
(gdb) py-down
Unable to find a newer python frame
and we're at the bottom of the Python stack.
Note that in Python 3.12 and newer, the same C stack frame can be used for
multiple Python stack frames. This means that ``py-up`` and ``py-down``
may move multiple Python frames at once. For example::
(gdb) py-up
#6 Frame 0x7ffff7fb62b0, for file /tmp/rec.py, line 5, in recursive_function (n=0)
time.sleep(5)
#6 Frame 0x7ffff7fb6240, for file /tmp/rec.py, line 7, in recursive_function (n=1)
recursive_function(n-1)
#6 Frame 0x7ffff7fb61d0, for file /tmp/rec.py, line 7, in recursive_function (n=2)
recursive_function(n-1)
#6 Frame 0x7ffff7fb6160, for file /tmp/rec.py, line 7, in recursive_function (n=3)
recursive_function(n-1)
#6 Frame 0x7ffff7fb60f0, for file /tmp/rec.py, line 7, in recursive_function (n=4)
recursive_function(n-1)
#6 Frame 0x7ffff7fb6080, for file /tmp/rec.py, line 7, in recursive_function (n=5)
recursive_function(n-1)
#6 Frame 0x7ffff7fb6020, for file /tmp/rec.py, line 9, in <module> ()
recursive_function(5)
(gdb) py-up
Unable to find an older python frame
``py-bt``
---------
The ``py-bt`` command attempts to display a Python-level backtrace of the
current thread.
For example::
(gdb) py-bt
#8 (unable to read python frame information)
#11 Frame 0x9aead74, for file /usr/lib/python2.6/site-packages/gnome_sudoku/dialog_swallower.py, line 48, in run_dialog (self=<SwappableArea(running=<gtk.Dialog at remote 0x98faaa4>, main_page=0) at remote 0x98fa6e4>, d=<gtk.Dialog at remote 0x98faaa4>)
gtk.main()
#14 Frame 0x99262ac, for file /usr/lib/python2.6/site-packages/gnome_sudoku/game_selector.py, line 201, in run_swallowed_dialog (self=<NewOrSavedGameSelector(new_game_model=<gtk.ListStore at remote 0x98fab44>, puzzle=None, saved_games=[{'gsd.auto_fills': 0, 'tracking': {}, 'trackers': {}, 'notes': [], 'saved_at': 1270084485, 'game': '7 8 0 0 0 0 0 5 6 0 0 9 0 8 0 1 0 0 0 4 6 0 0 0 0 7 0 6 5 0 0 0 4 7 9 2 0 0 0 9 0 1 0 0 0 3 9 7 6 0 0 0 1 8 0 6 0 0 0 0 2 8 0 0 0 5 0 4 0 6 0 0 2 1 0 0 0 0 0 4 5\n7 8 0 0 0 0 0 5 6 0 0 9 0 8 0 1 0 0 0 4 6 0 0 0 0 7 0 6 5 1 8 3 4 7 9 2 0 0 0 9 0 1 0 0 0 3 9 7 6 0 0 0 1 8 0 6 0 0 0 0 2 8 0 0 0 5 0 4 0 6 0 0 2 1 0 0 0 0 0 4 5', 'gsd.impossible_hints': 0, 'timer.__absolute_start_time__': <float at remote 0x984b474>, 'gsd.hints': 0, 'timer.active_time': <float at remote 0x984b494>, 'timer.total_time': <float at remote 0x984b464>}], dialog=<gtk.Dialog at remote 0x98faaa4>, saved_game_model=<gtk.ListStore at remote 0x98fad24>, sudoku_maker=<SudokuMaker(terminated=False, played=[], batch_siz...(truncated)
swallower.run_dialog(self.dialog)
#19 (unable to read python frame information)
#23 (unable to read python frame information)
#34 (unable to read python frame information)
#37 Frame 0x9420b04, for file /usr/lib/python2.6/site-packages/gnome_sudoku/main.py, line 906, in start_game ()
u = UI()
#40 Frame 0x948e82c, for file /usr/lib/python2.6/site-packages/gnome_sudoku/gnome_sudoku.py, line 22, in start_game (main=<module at remote 0xb771b7f4>)
main.start_game()
The frame numbers correspond to those displayed by GDB's standard
``backtrace`` command.
``py-print``
------------
The ``py-print`` command looks up a Python name and tries to print it.
It looks in locals within the current thread, then globals, then finally
builtins::
(gdb) py-print self
local 'self' = <SwappableArea(running=<gtk.Dialog at remote 0x98faaa4>,
main_page=0) at remote 0x98fa6e4>
(gdb) py-print __name__
global '__name__' = 'gnome_sudoku.dialog_swallower'
(gdb) py-print len
builtin 'len' = <built-in function len>
(gdb) py-print scarlet_pimpernel
'scarlet_pimpernel' not found
If the current C frame corresponds to multiple Python frames, ``py-print``
only considers the first one.
``py-locals``
-------------
The ``py-locals`` command looks up all Python locals within the current
Python frame in the selected thread, and prints their representations::
(gdb) py-locals
self = <SwappableArea(running=<gtk.Dialog at remote 0x98faaa4>,
main_page=0) at remote 0x98fa6e4>
d = <gtk.Dialog at remote 0x98faaa4>
If the current C frame corresponds to multiple Python frames, locals from
all of them will be shown::
(gdb) py-locals
Locals for recursive_function
n = 0
Locals for recursive_function
n = 1
Locals for recursive_function
n = 2
Locals for recursive_function
n = 3
Locals for recursive_function
n = 4
Locals for recursive_function
n = 5
Locals for <module>
Use with GDB commands
=====================
The extension commands complement GDB's built-in commands.
For example, you can use a frame numbers shown by ``py-bt`` with the ``frame``
command to go a specific frame within the selected thread, like this::
(gdb) py-bt
(output snipped)
#68 Frame 0xaa4560, for file Lib/test/regrtest.py, line 1548, in <module> ()
main()
(gdb) frame 68
#68 0x00000000004cd1e6 in PyEval_EvalFrameEx (f=Frame 0xaa4560, for file Lib/test/regrtest.py, line 1548, in <module> (), throwflag=0) at Python/ceval.c:2665
2665 x = call_function(&sp, oparg);
(gdb) py-list
1543 # Run the tests in a context manager that temporary changes the CWD to a
1544 # temporary and writable directory. If it's not possible to create or
1545 # change the CWD, the original CWD will be used. The original CWD is
1546 # available from test_support.SAVEDCWD.
1547 with test_support.temp_cwd(TESTCWD, quiet=True):
>1548 main()
The ``info threads`` command will give you a list of the threads within the
process, and you can use the ``thread`` command to select a different one::
(gdb) info threads
105 Thread 0x7fffefa18710 (LWP 10260) sem_wait () at ../nptl/sysdeps/unix/sysv/linux/x86_64/sem_wait.S:86
104 Thread 0x7fffdf5fe710 (LWP 10259) sem_wait () at ../nptl/sysdeps/unix/sysv/linux/x86_64/sem_wait.S:86
* 1 Thread 0x7ffff7fe2700 (LWP 10145) 0x00000038e46d73e3 in select () at ../sysdeps/unix/syscall-template.S:82
You can use ``thread apply all COMMAND`` or (``t a a COMMAND`` for short) to run
a command on all threads. With ``py-bt``, this lets you see what every
thread is doing at the Python level::
(gdb) t a a py-bt
Thread 105 (Thread 0x7fffefa18710 (LWP 10260)):
#5 Frame 0x7fffd00019d0, for file /home/david/coding/python-svn/Lib/threading.py, line 155, in _acquire_restore (self=<_RLock(_Verbose__verbose=False, _RLock__owner=140737354016512, _RLock__block=<thread.lock at remote 0x858770>, _RLock__count=1) at remote 0xd7ff40>, count_owner=(1, 140737213728528), count=1, owner=140737213728528)
self.__block.acquire()
#8 Frame 0x7fffac001640, for file /home/david/coding/python-svn/Lib/threading.py, line 269, in wait (self=<_Condition(_Condition__lock=<_RLock(_Verbose__verbose=False, _RLock__owner=140737354016512, _RLock__block=<thread.lock at remote 0x858770>, _RLock__count=1) at remote 0xd7ff40>, acquire=<instancemethod at remote 0xd80260>, _is_owned=<instancemethod at remote 0xd80160>, _release_save=<instancemethod at remote 0xd803e0>, release=<instancemethod at remote 0xd802e0>, _acquire_restore=<instancemethod at remote 0xd7ee60>, _Verbose__verbose=False, _Condition__waiters=[]) at remote 0xd7fd10>, timeout=None, waiter=<thread.lock at remote 0x858a90>, saved_state=(1, 140737213728528))
self._acquire_restore(saved_state)
#12 Frame 0x7fffb8001a10, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 348, in f ()
cond.wait()
#16 Frame 0x7fffb8001c40, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 37, in task (tid=140737213728528)
f()
Thread 104 (Thread 0x7fffdf5fe710 (LWP 10259)):
#5 Frame 0x7fffe4001580, for file /home/david/coding/python-svn/Lib/threading.py, line 155, in _acquire_restore (self=<_RLock(_Verbose__verbose=False, _RLock__owner=140737354016512, _RLock__block=<thread.lock at remote 0x858770>, _RLock__count=1) at remote 0xd7ff40>, count_owner=(1, 140736940992272), count=1, owner=140736940992272)
self.__block.acquire()
#8 Frame 0x7fffc8002090, for file /home/david/coding/python-svn/Lib/threading.py, line 269, in wait (self=<_Condition(_Condition__lock=<_RLock(_Verbose__verbose=False, _RLock__owner=140737354016512, _RLock__block=<thread.lock at remote 0x858770>, _RLock__count=1) at remote 0xd7ff40>, acquire=<instancemethod at remote 0xd80260>, _is_owned=<instancemethod at remote 0xd80160>, _release_save=<instancemethod at remote 0xd803e0>, release=<instancemethod at remote 0xd802e0>, _acquire_restore=<instancemethod at remote 0xd7ee60>, _Verbose__verbose=False, _Condition__waiters=[]) at remote 0xd7fd10>, timeout=None, waiter=<thread.lock at remote 0x858860>, saved_state=(1, 140736940992272))
self._acquire_restore(saved_state)
#12 Frame 0x7fffac001c90, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 348, in f ()
cond.wait()
#16 Frame 0x7fffac0011c0, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 37, in task (tid=140736940992272)
f()
Thread 1 (Thread 0x7ffff7fe2700 (LWP 10145)):
#5 Frame 0xcb5380, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 16, in _wait ()
time.sleep(0.01)
#8 Frame 0x7fffd00024a0, for file /home/david/coding/python-svn/Lib/test/lock_tests.py, line 378, in _check_notify (self=<ConditionTests(_testMethodName='test_notify', _resultForDoCleanups=<TestResult(_original_stdout=<cStringIO.StringO at remote 0xc191e0>, skipped=[], _mirrorOutput=False, testsRun=39, buffer=False, _original_stderr=<file at remote 0x7ffff7fc6340>, _stdout_buffer=<cStringIO.StringO at remote 0xc9c7f8>, _stderr_buffer=<cStringIO.StringO at remote 0xc9c790>, _moduleSetUpFailed=False, expectedFailures=[], errors=[], _previousTestClass=<type at remote 0x928310>, unexpectedSuccesses=[], failures=[], shouldStop=False, failfast=False) at remote 0xc185a0>, _threads=(0,), _cleanups=[], _type_equality_funcs={<type at remote 0x7eba00>: <instancemethod at remote 0xd750e0>, <type at remote 0x7e7820>: <instancemethod at remote 0xd75160>, <type at remote 0x7e30e0>: <instancemethod at remote 0xd75060>, <type at remote 0x7e7d20>: <instancemethod at remote 0xd751e0>, <type at remote 0x7f19e0...(truncated)
_wait()

View file

@ -13,10 +13,10 @@ Currently, the HOWTOs are:
.. toctree:: .. toctree::
:maxdepth: 1 :maxdepth: 1
pyporting.rst
cporting.rst cporting.rst
curses.rst curses.rst
descriptor.rst descriptor.rst
gdb_helpers.rst
enum.rst enum.rst
functional.rst functional.rst
logging.rst logging.rst
@ -33,4 +33,5 @@ Currently, the HOWTOs are:
annotations.rst annotations.rst
isolating-extensions.rst isolating-extensions.rst
timerfd.rst timerfd.rst
mro.rst

View file

@ -1744,13 +1744,11 @@ to the above, as in the following example::
return self.fmt.format(*self.args) return self.fmt.format(*self.args)
class StyleAdapter(logging.LoggerAdapter): class StyleAdapter(logging.LoggerAdapter):
def __init__(self, logger, extra=None): def log(self, level, msg, /, *args, stacklevel=1, **kwargs):
super().__init__(logger, extra or {})
def log(self, level, msg, /, *args, **kwargs):
if self.isEnabledFor(level): if self.isEnabledFor(level):
msg, kwargs = self.process(msg, kwargs) msg, kwargs = self.process(msg, kwargs)
self.logger._log(level, Message(msg, args), (), **kwargs) self.logger.log(level, Message(msg, args), **kwargs,
stacklevel=stacklevel+1)
logger = StyleAdapter(logging.getLogger(__name__)) logger = StyleAdapter(logging.getLogger(__name__))
@ -1762,7 +1760,7 @@ to the above, as in the following example::
main() main()
The above script should log the message ``Hello, world!`` when run with The above script should log the message ``Hello, world!`` when run with
Python 3.2 or later. Python 3.8 or later.
.. currentmodule:: logging .. currentmodule:: logging
@ -1848,8 +1846,11 @@ the use of a :class:`Filter` does not provide the desired result.
.. _zeromq-handlers: .. _zeromq-handlers:
Subclassing QueueHandler - a ZeroMQ example Subclassing QueueHandler and QueueListener- a ZeroMQ example
------------------------------------------- ------------------------------------------------------------
Subclass ``QueueHandler``
^^^^^^^^^^^^^^^^^^^^^^^^^
You can use a :class:`QueueHandler` subclass to send messages to other kinds You can use a :class:`QueueHandler` subclass to send messages to other kinds
of queues, for example a ZeroMQ 'publish' socket. In the example below,the of queues, for example a ZeroMQ 'publish' socket. In the example below,the
@ -1887,8 +1888,8 @@ data needed by the handler to create the socket::
self.queue.close() self.queue.close()
Subclassing QueueListener - a ZeroMQ example Subclass ``QueueListener``
-------------------------------------------- ^^^^^^^^^^^^^^^^^^^^^^^^^^
You can also subclass :class:`QueueListener` to get messages from other kinds You can also subclass :class:`QueueListener` to get messages from other kinds
of queues, for example a ZeroMQ 'subscribe' socket. Here's an example:: of queues, for example a ZeroMQ 'subscribe' socket. Here's an example::
@ -1905,25 +1906,194 @@ of queues, for example a ZeroMQ 'subscribe' socket. Here's an example::
msg = self.queue.recv_json() msg = self.queue.recv_json()
return logging.makeLogRecord(msg) return logging.makeLogRecord(msg)
.. _pynng-handlers:
.. seealso:: Subclassing QueueHandler and QueueListener- a ``pynng`` example
---------------------------------------------------------------
Module :mod:`logging` In a similar way to the above section, we can implement a listener and handler
API reference for the logging module. using :pypi:`pynng`, which is a Python binding to
`NNG <https://nng.nanomsg.org/>`_, billed as a spiritual successor to ZeroMQ.
The following snippets illustrate -- you can test them in an environment which has
``pynng`` installed. Just for variety, we present the listener first.
Module :mod:`logging.config`
Configuration API for the logging module.
Module :mod:`logging.handlers` Subclass ``QueueListener``
Useful handlers included with the logging module. ^^^^^^^^^^^^^^^^^^^^^^^^^^
:ref:`A basic logging tutorial <logging-basic-tutorial>` .. code-block:: python
:ref:`A more advanced logging tutorial <logging-advanced-tutorial>` # listener.py
import json
import logging
import logging.handlers
import pynng
DEFAULT_ADDR = "tcp://localhost:13232"
interrupted = False
class NNGSocketListener(logging.handlers.QueueListener):
def __init__(self, uri, /, *handlers, **kwargs):
# Have a timeout for interruptability, and open a
# subscriber socket
socket = pynng.Sub0(listen=uri, recv_timeout=500)
# The b'' subscription matches all topics
topics = kwargs.pop('topics', None) or b''
socket.subscribe(topics)
# We treat the socket as a queue
super().__init__(socket, *handlers, **kwargs)
def dequeue(self, block):
data = None
# Keep looping while not interrupted and no data received over the
# socket
while not interrupted:
try:
data = self.queue.recv(block=block)
break
except pynng.Timeout:
pass
except pynng.Closed: # sometimes happens when you hit Ctrl-C
break
if data is None:
return None
# Get the logging event sent from a publisher
event = json.loads(data.decode('utf-8'))
return logging.makeLogRecord(event)
def enqueue_sentinel(self):
# Not used in this implementation, as the socket isn't really a
# queue
pass
logging.getLogger('pynng').propagate = False
listener = NNGSocketListener(DEFAULT_ADDR, logging.StreamHandler(), topics=b'')
listener.start()
print('Press Ctrl-C to stop.')
try:
while True:
pass
except KeyboardInterrupt:
interrupted = True
finally:
listener.stop()
Subclass ``QueueHandler``
^^^^^^^^^^^^^^^^^^^^^^^^^
.. currentmodule:: logging .. currentmodule:: logging
.. code-block:: python
# sender.py
import json
import logging
import logging.handlers
import time
import random
import pynng
DEFAULT_ADDR = "tcp://localhost:13232"
class NNGSocketHandler(logging.handlers.QueueHandler):
def __init__(self, uri):
socket = pynng.Pub0(dial=uri, send_timeout=500)
super().__init__(socket)
def enqueue(self, record):
# Send the record as UTF-8 encoded JSON
d = dict(record.__dict__)
data = json.dumps(d)
self.queue.send(data.encode('utf-8'))
def close(self):
self.queue.close()
logging.getLogger('pynng').propagate = False
handler = NNGSocketHandler(DEFAULT_ADDR)
# Make sure the process ID is in the output
logging.basicConfig(level=logging.DEBUG,
handlers=[logging.StreamHandler(), handler],
format='%(levelname)-8s %(name)10s %(process)6s %(message)s')
levels = (logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR,
logging.CRITICAL)
logger_names = ('myapp', 'myapp.lib1', 'myapp.lib2')
msgno = 1
while True:
# Just randomly select some loggers and levels and log away
level = random.choice(levels)
logger = logging.getLogger(random.choice(logger_names))
logger.log(level, 'Message no. %5d' % msgno)
msgno += 1
delay = random.random() * 2 + 0.5
time.sleep(delay)
You can run the above two snippets in separate command shells. If we run the
listener in one shell and run the sender in two separate shells, we should see
something like the following. In the first sender shell:
.. code-block:: console
$ python sender.py
DEBUG myapp 613 Message no. 1
WARNING myapp.lib2 613 Message no. 2
CRITICAL myapp.lib2 613 Message no. 3
WARNING myapp.lib2 613 Message no. 4
CRITICAL myapp.lib1 613 Message no. 5
DEBUG myapp 613 Message no. 6
CRITICAL myapp.lib1 613 Message no. 7
INFO myapp.lib1 613 Message no. 8
(and so on)
In the second sender shell:
.. code-block:: console
$ python sender.py
INFO myapp.lib2 657 Message no. 1
CRITICAL myapp.lib2 657 Message no. 2
CRITICAL myapp 657 Message no. 3
CRITICAL myapp.lib1 657 Message no. 4
INFO myapp.lib1 657 Message no. 5
WARNING myapp.lib2 657 Message no. 6
CRITICAL myapp 657 Message no. 7
DEBUG myapp.lib1 657 Message no. 8
(and so on)
In the listener shell:
.. code-block:: console
$ python listener.py
Press Ctrl-C to stop.
DEBUG myapp 613 Message no. 1
WARNING myapp.lib2 613 Message no. 2
INFO myapp.lib2 657 Message no. 1
CRITICAL myapp.lib2 613 Message no. 3
CRITICAL myapp.lib2 657 Message no. 2
CRITICAL myapp 657 Message no. 3
WARNING myapp.lib2 613 Message no. 4
CRITICAL myapp.lib1 613 Message no. 5
CRITICAL myapp.lib1 657 Message no. 4
INFO myapp.lib1 657 Message no. 5
DEBUG myapp 613 Message no. 6
WARNING myapp.lib2 657 Message no. 6
CRITICAL myapp 657 Message no. 7
CRITICAL myapp.lib1 613 Message no. 7
INFO myapp.lib1 613 Message no. 8
DEBUG myapp.lib1 657 Message no. 8
(and so on)
As you can see, the logging from the two sender processes is interleaved in the
listener's output.
An example dictionary-based configuration An example dictionary-based configuration
----------------------------------------- -----------------------------------------
@ -1933,30 +2103,28 @@ This dictionary is passed to :func:`~config.dictConfig` to put the configuration
LOGGING = { LOGGING = {
'version': 1, 'version': 1,
'disable_existing_loggers': True, 'disable_existing_loggers': False,
'formatters': { 'formatters': {
'verbose': { 'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' 'format': '{levelname} {asctime} {module} {process:d} {thread:d} {message}',
'style': '{',
}, },
'simple': { 'simple': {
'format': '%(levelname)s %(message)s' 'format': '{levelname} {message}',
'style': '{',
}, },
}, },
'filters': { 'filters': {
'special': { 'special': {
'()': 'project.logging.SpecialFilter', '()': 'project.logging.SpecialFilter',
'foo': 'bar', 'foo': 'bar',
} },
}, },
'handlers': { 'handlers': {
'null': { 'console': {
'level':'DEBUG', 'level': 'INFO',
'class':'django.utils.log.NullHandler', 'class': 'logging.StreamHandler',
}, 'formatter': 'simple',
'console':{
'level':'DEBUG',
'class':'logging.StreamHandler',
'formatter': 'simple'
}, },
'mail_admins': { 'mail_admins': {
'level': 'ERROR', 'level': 'ERROR',
@ -1966,9 +2134,8 @@ This dictionary is passed to :func:`~config.dictConfig` to put the configuration
}, },
'loggers': { 'loggers': {
'django': { 'django': {
'handlers':['null'], 'handlers': ['console'],
'propagate': True, 'propagate': True,
'level':'INFO',
}, },
'django.request': { 'django.request': {
'handlers': ['mail_admins'], 'handlers': ['mail_admins'],
@ -3408,9 +3575,8 @@ A Qt GUI for logging
A question that comes up from time to time is about how to log to a GUI A question that comes up from time to time is about how to log to a GUI
application. The `Qt <https://www.qt.io/>`_ framework is a popular application. The `Qt <https://www.qt.io/>`_ framework is a popular
cross-platform UI framework with Python bindings using `PySide2 cross-platform UI framework with Python bindings using :pypi:`PySide2`
<https://pypi.org/project/PySide2/>`_ or `PyQt5 or :pypi:`PyQt5` libraries.
<https://pypi.org/project/PyQt5/>`_ libraries.
The following example shows how to log to a Qt GUI. This introduces a simple The following example shows how to log to a Qt GUI. This introduces a simple
``QtHandler`` class which takes a callable, which should be a slot in the main ``QtHandler`` class which takes a callable, which should be a slot in the main
@ -3423,9 +3589,10 @@ The worker thread is implemented using Qt's ``QThread`` class rather than the
:mod:`threading` module, as there are circumstances where one has to use :mod:`threading` module, as there are circumstances where one has to use
``QThread``, which offers better integration with other ``Qt`` components. ``QThread``, which offers better integration with other ``Qt`` components.
The code should work with recent releases of either ``PySide2`` or ``PyQt5``. The code should work with recent releases of any of ``PySide6``, ``PyQt6``,
You should be able to adapt the approach to earlier versions of Qt. Please ``PySide2`` or ``PyQt5``. You should be able to adapt the approach to earlier
refer to the comments in the code snippet for more detailed information. versions of Qt. Please refer to the comments in the code snippet for more
detailed information.
.. code-block:: python3 .. code-block:: python3
@ -3435,7 +3602,17 @@ refer to the comments in the code snippet for more detailed information.
import sys import sys
import time import time
# Deal with minor differences between PySide2 and PyQt5 # Deal with minor differences between different Qt packages
try:
from PySide6 import QtCore, QtGui, QtWidgets
Signal = QtCore.Signal
Slot = QtCore.Slot
except ImportError:
try:
from PyQt6 import QtCore, QtGui, QtWidgets
Signal = QtCore.pyqtSignal
Slot = QtCore.pyqtSlot
except ImportError:
try: try:
from PySide2 import QtCore, QtGui, QtWidgets from PySide2 import QtCore, QtGui, QtWidgets
Signal = QtCore.Signal Signal = QtCore.Signal
@ -3445,7 +3622,6 @@ refer to the comments in the code snippet for more detailed information.
Signal = QtCore.pyqtSignal Signal = QtCore.pyqtSignal
Slot = QtCore.pyqtSlot Slot = QtCore.pyqtSlot
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -3516,8 +3692,14 @@ refer to the comments in the code snippet for more detailed information.
while not QtCore.QThread.currentThread().isInterruptionRequested(): while not QtCore.QThread.currentThread().isInterruptionRequested():
delay = 0.5 + random.random() * 2 delay = 0.5 + random.random() * 2
time.sleep(delay) time.sleep(delay)
try:
if random.random() < 0.1:
raise ValueError('Exception raised: %d' % i)
else:
level = random.choice(LEVELS) level = random.choice(LEVELS)
logger.log(level, 'Message after delay of %3.1f: %d', delay, i, extra=extra) logger.log(level, 'Message after delay of %3.1f: %d', delay, i, extra=extra)
except ValueError as e:
logger.exception('Failed: %s', e, extra=extra)
i += 1 i += 1
# #
@ -3544,7 +3726,10 @@ refer to the comments in the code snippet for more detailed information.
self.textedit = te = QtWidgets.QPlainTextEdit(self) self.textedit = te = QtWidgets.QPlainTextEdit(self)
# Set whatever the default monospace font is for the platform # Set whatever the default monospace font is for the platform
f = QtGui.QFont('nosuchfont') f = QtGui.QFont('nosuchfont')
if hasattr(f, 'Monospace'):
f.setStyleHint(f.Monospace) f.setStyleHint(f.Monospace)
else:
f.setStyleHint(f.StyleHint.Monospace) # for Qt6
te.setFont(f) te.setFont(f)
te.setReadOnly(True) te.setReadOnly(True)
PB = QtWidgets.QPushButton PB = QtWidgets.QPushButton
@ -3631,7 +3816,11 @@ refer to the comments in the code snippet for more detailed information.
app = QtWidgets.QApplication(sys.argv) app = QtWidgets.QApplication(sys.argv)
example = Window(app) example = Window(app)
example.show() example.show()
sys.exit(app.exec_()) if hasattr(app, 'exec'):
rc = app.exec()
else:
rc = app.exec_()
sys.exit(rc)
if __name__=='__main__': if __name__=='__main__':
main() main()

View file

@ -25,10 +25,12 @@ or *severity*.
When to use logging When to use logging
^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^
Logging provides a set of convenience functions for simple logging usage. These You can access logging functionality by creating a logger via ``logger =
are :func:`debug`, :func:`info`, :func:`warning`, :func:`error` and getLogger(__name__)``, and then calling the logger's :meth:`~Logger.debug`,
:func:`critical`. To determine when to use logging, see the table below, which :meth:`~Logger.info`, :meth:`~Logger.warning`, :meth:`~Logger.error` and
states, for each of a set of common tasks, the best tool to use for it. :meth:`~Logger.critical` methods. To determine when to use logging, and to see
which logger methods to use when, see the table below. It states, for each of a
set of common tasks, the best tool to use for that task.
+-------------------------------------+--------------------------------------+ +-------------------------------------+--------------------------------------+
| Task you want to perform | The best tool for the task | | Task you want to perform | The best tool for the task |
@ -37,8 +39,8 @@ states, for each of a set of common tasks, the best tool to use for it.
| usage of a command line script or | | | usage of a command line script or | |
| program | | | program | |
+-------------------------------------+--------------------------------------+ +-------------------------------------+--------------------------------------+
| Report events that occur during | :func:`logging.info` (or | | Report events that occur during | A logger's :meth:`~Logger.info` (or |
| normal operation of a program (e.g. | :func:`logging.debug` for very | | normal operation of a program (e.g. | :meth:`~Logger.debug` method for very|
| for status monitoring or fault | detailed output for diagnostic | | for status monitoring or fault | detailed output for diagnostic |
| investigation) | purposes) | | investigation) | purposes) |
+-------------------------------------+--------------------------------------+ +-------------------------------------+--------------------------------------+
@ -47,22 +49,23 @@ states, for each of a set of common tasks, the best tool to use for it.
| | the client application should be | | | the client application should be |
| | modified to eliminate the warning | | | modified to eliminate the warning |
| | | | | |
| | :func:`logging.warning` if there is | | | A logger's :meth:`~Logger.warning` |
| | nothing the client application can do| | | method if there is nothing the client|
| | about the situation, but the event | | | application can do about the |
| | should still be noted | | | situation, but the event should still|
| | be noted |
+-------------------------------------+--------------------------------------+ +-------------------------------------+--------------------------------------+
| Report an error regarding a | Raise an exception | | Report an error regarding a | Raise an exception |
| particular runtime event | | | particular runtime event | |
+-------------------------------------+--------------------------------------+ +-------------------------------------+--------------------------------------+
| Report suppression of an error | :func:`logging.error`, | | Report suppression of an error | A logger's :meth:`~Logger.error`, |
| without raising an exception (e.g. | :func:`logging.exception` or | | without raising an exception (e.g. | :meth:`~Logger.exception` or |
| error handler in a long-running | :func:`logging.critical` as | | error handler in a long-running | :meth:`~Logger.critical` method as |
| server process) | appropriate for the specific error | | server process) | appropriate for the specific error |
| | and application domain | | | and application domain |
+-------------------------------------+--------------------------------------+ +-------------------------------------+--------------------------------------+
The logging functions are named after the level or severity of the events The logger methods are named after the level or severity of the events
they are used to track. The standard levels and their applicability are they are used to track. The standard levels and their applicability are
described below (in increasing order of severity): described below (in increasing order of severity):
@ -115,12 +118,18 @@ If you type these lines into a script and run it, you'll see:
WARNING:root:Watch out! WARNING:root:Watch out!
printed out on the console. The ``INFO`` message doesn't appear because the printed out on the console. The ``INFO`` message doesn't appear because the
default level is ``WARNING``. The printed message includes the indication of default level is ``WARNING``. The printed message includes the indication of the
the level and the description of the event provided in the logging call, i.e. level and the description of the event provided in the logging call, i.e.
'Watch out!'. Don't worry about the 'root' part for now: it will be explained 'Watch out!'. The actual output can be formatted quite flexibly if you need
later. The actual output can be formatted quite flexibly if you need that; that; formatting options will also be explained later.
formatting options will also be explained later.
Notice that in this example, we use functions directly on the ``logging``
module, like ``logging.debug``, rather than creating a logger and calling
functions on it. These functions operation on the root logger, but can be useful
as they will call :func:`~logging.basicConfig` for you if it has not been called yet, like in
this example. In larger programs you'll usually want to control the logging
configuration explicitly however - so for that reason as well as others, it's
better to create loggers and call their methods.
Logging to a file Logging to a file
^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
@ -130,11 +139,12 @@ look at that next. Be sure to try the following in a newly started Python
interpreter, and don't just continue from the session described above:: interpreter, and don't just continue from the session described above::
import logging import logging
logger = logging.getLogger(__name__)
logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG)
logging.debug('This message should go to the log file') logger.debug('This message should go to the log file')
logging.info('So should this') logger.info('So should this')
logging.warning('And this, too') logger.warning('And this, too')
logging.error('And non-ASCII stuff, too, like Øresund and Malmö') logger.error('And non-ASCII stuff, too, like Øresund and Malmö')
.. versionchanged:: 3.9 .. versionchanged:: 3.9
The *encoding* argument was added. In earlier Python versions, or if not The *encoding* argument was added. In earlier Python versions, or if not
@ -148,10 +158,10 @@ messages:
.. code-block:: none .. code-block:: none
DEBUG:root:This message should go to the log file DEBUG:__main__:This message should go to the log file
INFO:root:So should this INFO:__main__:So should this
WARNING:root:And this, too WARNING:__main__:And this, too
ERROR:root:And non-ASCII stuff, too, like Øresund and Malmö ERROR:__main__:And non-ASCII stuff, too, like Øresund and Malmö
This example also shows how you can set the logging level which acts as the This example also shows how you can set the logging level which acts as the
threshold for tracking. In this case, because we set the threshold to threshold for tracking. In this case, because we set the threshold to
@ -180,11 +190,9 @@ following example::
raise ValueError('Invalid log level: %s' % loglevel) raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level, ...) logging.basicConfig(level=numeric_level, ...)
The call to :func:`basicConfig` should come *before* any calls to The call to :func:`basicConfig` should come *before* any calls to a logger's
:func:`debug`, :func:`info`, etc. Otherwise, those functions will call methods such as :meth:`~Logger.debug`, :meth:`~Logger.info`, etc. Otherwise,
:func:`basicConfig` for you with the default options. As it's intended as a that logging event may not be handled in the desired manner.
one-off simple configuration facility, only the first call will actually do
anything: subsequent calls are effectively no-ops.
If you run the above script several times, the messages from successive runs If you run the above script several times, the messages from successive runs
are appended to the file *example.log*. If you want each run to start afresh, are appended to the file *example.log*. If you want each run to start afresh,
@ -197,50 +205,6 @@ The output will be the same as before, but the log file is no longer appended
to, so the messages from earlier runs are lost. to, so the messages from earlier runs are lost.
Logging from multiple modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If your program consists of multiple modules, here's an example of how you
could organize logging in it::
# myapp.py
import logging
import mylib
def main():
logging.basicConfig(filename='myapp.log', level=logging.INFO)
logging.info('Started')
mylib.do_something()
logging.info('Finished')
if __name__ == '__main__':
main()
::
# mylib.py
import logging
def do_something():
logging.info('Doing something')
If you run *myapp.py*, you should see this in *myapp.log*:
.. code-block:: none
INFO:root:Started
INFO:root:Doing something
INFO:root:Finished
which is hopefully what you were expecting to see. You can generalize this to
multiple modules, using the pattern in *mylib.py*. Note that for this simple
usage pattern, you won't know, by looking in the log file, *where* in your
application your messages came from, apart from looking at the event
description. If you want to track the location of your messages, you'll need
to refer to the documentation beyond the tutorial level -- see
:ref:`logging-advanced-tutorial`.
Logging variable data Logging variable data
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
@ -520,7 +484,7 @@ custom handlers) are the following configuration methods:
* The :meth:`~Handler.setLevel` method, just as in logger objects, specifies the * The :meth:`~Handler.setLevel` method, just as in logger objects, specifies the
lowest severity that will be dispatched to the appropriate destination. Why lowest severity that will be dispatched to the appropriate destination. Why
are there two :func:`setLevel` methods? The level set in the logger are there two :meth:`~Handler.setLevel` methods? The level set in the logger
determines which severity of messages it will pass to its handlers. The level determines which severity of messages it will pass to its handlers. The level
set in each handler determines which messages that handler will send on. set in each handler determines which messages that handler will send on.
@ -774,29 +738,29 @@ What happens if no configuration is provided
If no logging configuration is provided, it is possible to have a situation If no logging configuration is provided, it is possible to have a situation
where a logging event needs to be output, but no handlers can be found to where a logging event needs to be output, but no handlers can be found to
output the event. The behaviour of the logging package in these output the event.
circumstances is dependent on the Python version.
For versions of Python prior to 3.2, the behaviour is as follows: The event is output using a 'handler of last resort', stored in
:data:`lastResort`. This internal handler is not associated with any
logger, and acts like a :class:`~logging.StreamHandler` which writes the
event description message to the current value of ``sys.stderr`` (therefore
respecting any redirections which may be in effect). No formatting is
done on the message - just the bare event description message is printed.
The handler's level is set to ``WARNING``, so all events at this and
greater severities will be output.
* If *logging.raiseExceptions* is ``False`` (production mode), the event is .. versionchanged:: 3.2
For versions of Python prior to 3.2, the behaviour is as follows:
* If :data:`raiseExceptions` is ``False`` (production mode), the event is
silently dropped. silently dropped.
* If *logging.raiseExceptions* is ``True`` (development mode), a message * If :data:`raiseExceptions` is ``True`` (development mode), a message
'No handlers could be found for logger X.Y.Z' is printed once. 'No handlers could be found for logger X.Y.Z' is printed once.
In Python 3.2 and later, the behaviour is as follows: To obtain the pre-3.2 behaviour,
:data:`lastResort` can be set to ``None``.
* The event is output using a 'handler of last resort', stored in
``logging.lastResort``. This internal handler is not associated with any
logger, and acts like a :class:`~logging.StreamHandler` which writes the
event description message to the current value of ``sys.stderr`` (therefore
respecting any redirections which may be in effect). No formatting is
done on the message - just the bare event description message is printed.
The handler's level is set to ``WARNING``, so all events at this and
greater severities will be output.
To obtain the pre-3.2 behaviour, ``logging.lastResort`` can be set to ``None``.
.. _library-config: .. _library-config:
@ -998,7 +962,7 @@ Logged messages are formatted for presentation through instances of the
use with the % operator and a dictionary. use with the % operator and a dictionary.
For formatting multiple messages in a batch, instances of For formatting multiple messages in a batch, instances of
:class:`~handlers.BufferingFormatter` can be used. In addition to the format :class:`BufferingFormatter` can be used. In addition to the format
string (which is applied to each message in the batch), there is provision for string (which is applied to each message in the batch), there is provision for
header and trailer format strings. header and trailer format strings.
@ -1034,7 +998,8 @@ checks to see if a module-level variable, :data:`raiseExceptions`, is set. If
set, a traceback is printed to :data:`sys.stderr`. If not set, the exception is set, a traceback is printed to :data:`sys.stderr`. If not set, the exception is
swallowed. swallowed.
.. note:: The default value of :data:`raiseExceptions` is ``True``. This is .. note::
The default value of :data:`raiseExceptions` is ``True``. This is
because during development, you typically want to be notified of any because during development, you typically want to be notified of any
exceptions that occur. It's advised that you set :data:`raiseExceptions` to exceptions that occur. It's advised that you set :data:`raiseExceptions` to
``False`` for production usage. ``False`` for production usage.
@ -1072,7 +1037,7 @@ You can write code like this::
expensive_func2()) expensive_func2())
so that if the logger's threshold is set above ``DEBUG``, the calls to so that if the logger's threshold is set above ``DEBUG``, the calls to
:func:`expensive_func1` and :func:`expensive_func2` are never made. ``expensive_func1`` and ``expensive_func2`` are never made.
.. note:: In some cases, :meth:`~Logger.isEnabledFor` can itself be more .. note:: In some cases, :meth:`~Logger.isEnabledFor` can itself be more
expensive than you'd like (e.g. for deeply nested loggers where an explicit expensive than you'd like (e.g. for deeply nested loggers where an explicit

671
Doc/howto/mro.rst Normal file
View file

@ -0,0 +1,671 @@
.. _python_2.3_mro:
The Python 2.3 Method Resolution Order
======================================
.. note::
This is a historical document, provided as an appendix to the official
documentation.
The Method Resolution Order discussed here was *introduced* in Python 2.3,
but it is still used in later versions -- including Python 3.
By `Michele Simionato <https://www.phyast.pitt.edu/~micheles/>`__.
:Abstract:
*This document is intended for Python programmers who want to
understand the C3 Method Resolution Order used in Python 2.3.
Although it is not intended for newbies, it is quite pedagogical with
many worked out examples. I am not aware of other publicly available
documents with the same scope, therefore it should be useful.*
Disclaimer:
*I donate this document to the Python Software Foundation, under the
Python 2.3 license. As usual in these circumstances, I warn the
reader that what follows* should *be correct, but I don't give any
warranty. Use it at your own risk and peril!*
Acknowledgments:
*All the people of the Python mailing list who sent me their support.
Paul Foley who pointed out various imprecisions and made me to add the
part on local precedence ordering. David Goodger for help with the
formatting in reStructuredText. David Mertz for help with the editing.
Finally, Guido van Rossum who enthusiastically added this document to
the official Python 2.3 home-page.*
The beginning
-------------
*Felix qui potuit rerum cognoscere causas* -- Virgilius
Everything started with a post by Samuele Pedroni to the Python
development mailing list [#]_. In his post, Samuele showed that the
Python 2.2 method resolution order is not monotonic and he proposed to
replace it with the C3 method resolution order. Guido agreed with his
arguments and therefore now Python 2.3 uses C3. The C3 method itself
has nothing to do with Python, since it was invented by people working
on Dylan and it is described in a paper intended for lispers [#]_. The
present paper gives a (hopefully) readable discussion of the C3
algorithm for Pythonistas who want to understand the reasons for the
change.
First of all, let me point out that what I am going to say only applies
to the *new style classes* introduced in Python 2.2: *classic classes*
maintain their old method resolution order, depth first and then left to
right. Therefore, there is no breaking of old code for classic classes;
and even if in principle there could be breaking of code for Python 2.2
new style classes, in practice the cases in which the C3 resolution
order differs from the Python 2.2 method resolution order are so rare
that no real breaking of code is expected. Therefore:
*Don't be scared!*
Moreover, unless you make strong use of multiple inheritance and you
have non-trivial hierarchies, you don't need to understand the C3
algorithm, and you can easily skip this paper. On the other hand, if
you really want to know how multiple inheritance works, then this paper
is for you. The good news is that things are not as complicated as you
might expect.
Let me begin with some basic definitions.
1) Given a class C in a complicated multiple inheritance hierarchy, it
is a non-trivial task to specify the order in which methods are
overridden, i.e. to specify the order of the ancestors of C.
2) The list of the ancestors of a class C, including the class itself,
ordered from the nearest ancestor to the furthest, is called the
class precedence list or the *linearization* of C.
3) The *Method Resolution Order* (MRO) is the set of rules that
construct the linearization. In the Python literature, the idiom
"the MRO of C" is also used as a synonymous for the linearization of
the class C.
4) For instance, in the case of single inheritance hierarchy, if C is a
subclass of C1, and C1 is a subclass of C2, then the linearization of
C is simply the list [C, C1 , C2]. However, with multiple
inheritance hierarchies, the construction of the linearization is
more cumbersome, since it is more difficult to construct a
linearization that respects *local precedence ordering* and
*monotonicity*.
5) I will discuss the local precedence ordering later, but I can give
the definition of monotonicity here. A MRO is monotonic when the
following is true: *if C1 precedes C2 in the linearization of C,
then C1 precedes C2 in the linearization of any subclass of C*.
Otherwise, the innocuous operation of deriving a new class could
change the resolution order of methods, potentially introducing very
subtle bugs. Examples where this happens will be shown later.
6) Not all classes admit a linearization. There are cases, in
complicated hierarchies, where it is not possible to derive a class
such that its linearization respects all the desired properties.
Here I give an example of this situation. Consider the hierarchy
>>> O = object
>>> class X(O): pass
>>> class Y(O): pass
>>> class A(X,Y): pass
>>> class B(Y,X): pass
which can be represented with the following inheritance graph, where I
have denoted with O the ``object`` class, which is the beginning of any
hierarchy for new style classes:
.. code-block:: text
-----------
| |
| O |
| / \ |
- X Y /
| / | /
| / |/
A B
\ /
?
In this case, it is not possible to derive a new class C from A and B,
since X precedes Y in A, but Y precedes X in B, therefore the method
resolution order would be ambiguous in C.
Python 2.3 raises an exception in this situation (TypeError: MRO
conflict among bases Y, X) forbidding the naive programmer from creating
ambiguous hierarchies. Python 2.2 instead does not raise an exception,
but chooses an *ad hoc* ordering (CABXYO in this case).
The C3 Method Resolution Order
------------------------------
Let me introduce a few simple notations which will be useful for the
following discussion. I will use the shortcut notation::
C1 C2 ... CN
to indicate the list of classes [C1, C2, ... , CN].
The *head* of the list is its first element::
head = C1
whereas the *tail* is the rest of the list::
tail = C2 ... CN.
I shall also use the notation::
C + (C1 C2 ... CN) = C C1 C2 ... CN
to denote the sum of the lists [C] + [C1, C2, ... ,CN].
Now I can explain how the MRO works in Python 2.3.
Consider a class C in a multiple inheritance hierarchy, with C
inheriting from the base classes B1, B2, ... , BN. We want to
compute the linearization L[C] of the class C. The rule is the
following:
*the linearization of C is the sum of C plus the merge of the
linearizations of the parents and the list of the parents.*
In symbolic notation::
L[C(B1 ... BN)] = C + merge(L[B1] ... L[BN], B1 ... BN)
In particular, if C is the ``object`` class, which has no parents, the
linearization is trivial::
L[object] = object.
However, in general one has to compute the merge according to the following
prescription:
*take the head of the first list, i.e L[B1][0]; if this head is not in
the tail of any of the other lists, then add it to the linearization
of C and remove it from the lists in the merge, otherwise look at the
head of the next list and take it, if it is a good head. Then repeat
the operation until all the class are removed or it is impossible to
find good heads. In this case, it is impossible to construct the
merge, Python 2.3 will refuse to create the class C and will raise an
exception.*
This prescription ensures that the merge operation *preserves* the
ordering, if the ordering can be preserved. On the other hand, if the
order cannot be preserved (as in the example of serious order
disagreement discussed above) then the merge cannot be computed.
The computation of the merge is trivial if C has only one parent
(single inheritance); in this case::
L[C(B)] = C + merge(L[B],B) = C + L[B]
However, in the case of multiple inheritance things are more cumbersome
and I don't expect you can understand the rule without a couple of
examples ;-)
Examples
--------
First example. Consider the following hierarchy:
>>> O = object
>>> class F(O): pass
>>> class E(O): pass
>>> class D(O): pass
>>> class C(D,F): pass
>>> class B(D,E): pass
>>> class A(B,C): pass
In this case the inheritance graph can be drawn as:
.. code-block:: text
6
---
Level 3 | O | (more general)
/ --- \
/ | \ |
/ | \ |
/ | \ |
--- --- --- |
Level 2 3 | D | 4| E | | F | 5 |
--- --- --- |
\ \ _ / | |
\ / \ _ | |
\ / \ | |
--- --- |
Level 1 1 | B | | C | 2 |
--- --- |
\ / |
\ / \ /
---
Level 0 0 | A | (more specialized)
---
The linearizations of O,D,E and F are trivial::
L[O] = O
L[D] = D O
L[E] = E O
L[F] = F O
The linearization of B can be computed as::
L[B] = B + merge(DO, EO, DE)
We see that D is a good head, therefore we take it and we are reduced to
compute ``merge(O,EO,E)``. Now O is not a good head, since it is in the
tail of the sequence EO. In this case the rule says that we have to
skip to the next sequence. Then we see that E is a good head; we take
it and we are reduced to compute ``merge(O,O)`` which gives O. Therefore::
L[B] = B D E O
Using the same procedure one finds::
L[C] = C + merge(DO,FO,DF)
= C + D + merge(O,FO,F)
= C + D + F + merge(O,O)
= C D F O
Now we can compute::
L[A] = A + merge(BDEO,CDFO,BC)
= A + B + merge(DEO,CDFO,C)
= A + B + C + merge(DEO,DFO)
= A + B + C + D + merge(EO,FO)
= A + B + C + D + E + merge(O,FO)
= A + B + C + D + E + F + merge(O,O)
= A B C D E F O
In this example, the linearization is ordered in a pretty nice way
according to the inheritance level, in the sense that lower levels (i.e.
more specialized classes) have higher precedence (see the inheritance
graph). However, this is not the general case.
I leave as an exercise for the reader to compute the linearization for
my second example:
>>> O = object
>>> class F(O): pass
>>> class E(O): pass
>>> class D(O): pass
>>> class C(D,F): pass
>>> class B(E,D): pass
>>> class A(B,C): pass
The only difference with the previous example is the change B(D,E) -->
B(E,D); however even such a little modification completely changes the
ordering of the hierarchy:
.. code-block:: text
6
---
Level 3 | O |
/ --- \
/ | \
/ | \
/ | \
--- --- ---
Level 2 2 | E | 4 | D | | F | 5
--- --- ---
\ / \ /
\ / \ /
\ / \ /
--- ---
Level 1 1 | B | | C | 3
--- ---
\ /
\ /
---
Level 0 0 | A |
---
Notice that the class E, which is in the second level of the hierarchy,
precedes the class C, which is in the first level of the hierarchy, i.e.
E is more specialized than C, even if it is in a higher level.
A lazy programmer can obtain the MRO directly from Python 2.2, since in
this case it coincides with the Python 2.3 linearization. It is enough
to invoke the .mro() method of class A:
>>> A.mro() # doctest: +NORMALIZE_WHITESPACE
[<class 'A'>, <class 'B'>, <class 'E'>,
<class 'C'>, <class 'D'>, <class 'F'>,
<class 'object'>]
Finally, let me consider the example discussed in the first section,
involving a serious order disagreement. In this case, it is
straightforward to compute the linearizations of O, X, Y, A and B:
.. code-block:: text
L[O] = 0
L[X] = X O
L[Y] = Y O
L[A] = A X Y O
L[B] = B Y X O
However, it is impossible to compute the linearization for a class C
that inherits from A and B::
L[C] = C + merge(AXYO, BYXO, AB)
= C + A + merge(XYO, BYXO, B)
= C + A + B + merge(XYO, YXO)
At this point we cannot merge the lists XYO and YXO, since X is in the
tail of YXO whereas Y is in the tail of XYO: therefore there are no
good heads and the C3 algorithm stops. Python 2.3 raises an error and
refuses to create the class C.
Bad Method Resolution Orders
----------------------------
A MRO is *bad* when it breaks such fundamental properties as local
precedence ordering and monotonicity. In this section, I will show
that both the MRO for classic classes and the MRO for new style classes
in Python 2.2 are bad.
It is easier to start with the local precedence ordering. Consider the
following example:
>>> F=type('Food',(),{'remember2buy':'spam'})
>>> E=type('Eggs',(F,),{'remember2buy':'eggs'})
>>> G=type('GoodFood',(F,E),{}) # under Python 2.3 this is an error! # doctest: +SKIP
with inheritance diagram
.. code-block:: text
O
|
(buy spam) F
| \
| E (buy eggs)
| /
G
(buy eggs or spam ?)
We see that class G inherits from F and E, with F *before* E: therefore
we would expect the attribute *G.remember2buy* to be inherited by
*F.rembermer2buy* and not by *E.remember2buy*: nevertheless Python 2.2
gives
>>> G.remember2buy # doctest: +SKIP
'eggs'
This is a breaking of local precedence ordering since the order in the
local precedence list, i.e. the list of the parents of G, is not
preserved in the Python 2.2 linearization of G::
L[G,P22]= G E F object # F *follows* E
One could argue that the reason why F follows E in the Python 2.2
linearization is that F is less specialized than E, since F is the
superclass of E; nevertheless the breaking of local precedence ordering
is quite non-intuitive and error prone. This is particularly true since
it is a different from old style classes:
>>> class F: remember2buy='spam'
>>> class E(F): remember2buy='eggs'
>>> class G(F,E): pass # doctest: +SKIP
>>> G.remember2buy # doctest: +SKIP
'spam'
In this case the MRO is GFEF and the local precedence ordering is
preserved.
As a general rule, hierarchies such as the previous one should be
avoided, since it is unclear if F should override E or viceversa.
Python 2.3 solves the ambiguity by raising an exception in the creation
of class G, effectively stopping the programmer from generating
ambiguous hierarchies. The reason for that is that the C3 algorithm
fails when the merge::
merge(FO,EFO,FE)
cannot be computed, because F is in the tail of EFO and E is in the tail
of FE.
The real solution is to design a non-ambiguous hierarchy, i.e. to derive
G from E and F (the more specific first) and not from F and E; in this
case the MRO is GEF without any doubt.
.. code-block:: text
O
|
F (spam)
/ |
(eggs) E |
\ |
G
(eggs, no doubt)
Python 2.3 forces the programmer to write good hierarchies (or, at
least, less error-prone ones).
On a related note, let me point out that the Python 2.3 algorithm is
smart enough to recognize obvious mistakes, as the duplication of
classes in the list of parents:
>>> class A(object): pass
>>> class C(A,A): pass # error
Traceback (most recent call last):
File "<stdin>", line 1, in ?
TypeError: duplicate base class A
Python 2.2 (both for classic classes and new style classes) in this
situation, would not raise any exception.
Finally, I would like to point out two lessons we have learned from this
example:
1. despite the name, the MRO determines the resolution order of
attributes, not only of methods;
2. the default food for Pythonistas is spam ! (but you already knew
that ;-)
Having discussed the issue of local precedence ordering, let me now
consider the issue of monotonicity. My goal is to show that neither the
MRO for classic classes nor that for Python 2.2 new style classes is
monotonic.
To prove that the MRO for classic classes is non-monotonic is rather
trivial, it is enough to look at the diamond diagram:
.. code-block:: text
C
/ \
/ \
A B
\ /
\ /
D
One easily discerns the inconsistency::
L[B,P21] = B C # B precedes C : B's methods win
L[D,P21] = D A C B C # B follows C : C's methods win!
On the other hand, there are no problems with the Python 2.2 and 2.3
MROs, they give both::
L[D] = D A B C
Guido points out in his essay [#]_ that the classic MRO is not so bad in
practice, since one can typically avoids diamonds for classic classes.
But all new style classes inherit from ``object``, therefore diamonds are
unavoidable and inconsistencies shows up in every multiple inheritance
graph.
The MRO of Python 2.2 makes breaking monotonicity difficult, but not
impossible. The following example, originally provided by Samuele
Pedroni, shows that the MRO of Python 2.2 is non-monotonic:
>>> class A(object): pass
>>> class B(object): pass
>>> class C(object): pass
>>> class D(object): pass
>>> class E(object): pass
>>> class K1(A,B,C): pass
>>> class K2(D,B,E): pass
>>> class K3(D,A): pass
>>> class Z(K1,K2,K3): pass
Here are the linearizations according to the C3 MRO (the reader should
verify these linearizations as an exercise and draw the inheritance
diagram ;-) ::
L[A] = A O
L[B] = B O
L[C] = C O
L[D] = D O
L[E] = E O
L[K1]= K1 A B C O
L[K2]= K2 D B E O
L[K3]= K3 D A O
L[Z] = Z K1 K2 K3 D A B C E O
Python 2.2 gives exactly the same linearizations for A, B, C, D, E, K1,
K2 and K3, but a different linearization for Z::
L[Z,P22] = Z K1 K3 A K2 D B C E O
It is clear that this linearization is *wrong*, since A comes before D
whereas in the linearization of K3 A comes *after* D. In other words, in
K3 methods derived by D override methods derived by A, but in Z, which
still is a subclass of K3, methods derived by A override methods derived
by D! This is a violation of monotonicity. Moreover, the Python 2.2
linearization of Z is also inconsistent with local precedence ordering,
since the local precedence list of the class Z is [K1, K2, K3] (K2
precedes K3), whereas in the linearization of Z K2 *follows* K3. These
problems explain why the 2.2 rule has been dismissed in favor of the C3
rule.
The end
-------
This section is for the impatient reader, who skipped all the previous
sections and jumped immediately to the end. This section is for the
lazy programmer too, who didn't want to exercise her/his brain.
Finally, it is for the programmer with some hubris, otherwise s/he would
not be reading a paper on the C3 method resolution order in multiple
inheritance hierarchies ;-) These three virtues taken all together (and
*not* separately) deserve a prize: the prize is a short Python 2.2
script that allows you to compute the 2.3 MRO without risk to your
brain. Simply change the last line to play with the various examples I
have discussed in this paper.::
#<mro.py>
"""C3 algorithm by Samuele Pedroni (with readability enhanced by me)."""
class __metaclass__(type):
"All classes are metamagically modified to be nicely printed"
__repr__ = lambda cls: cls.__name__
class ex_2:
"Serious order disagreement" #From Guido
class O: pass
class X(O): pass
class Y(O): pass
class A(X,Y): pass
class B(Y,X): pass
try:
class Z(A,B): pass #creates Z(A,B) in Python 2.2
except TypeError:
pass # Z(A,B) cannot be created in Python 2.3
class ex_5:
"My first example"
class O: pass
class F(O): pass
class E(O): pass
class D(O): pass
class C(D,F): pass
class B(D,E): pass
class A(B,C): pass
class ex_6:
"My second example"
class O: pass
class F(O): pass
class E(O): pass
class D(O): pass
class C(D,F): pass
class B(E,D): pass
class A(B,C): pass
class ex_9:
"Difference between Python 2.2 MRO and C3" #From Samuele
class O: pass
class A(O): pass
class B(O): pass
class C(O): pass
class D(O): pass
class E(O): pass
class K1(A,B,C): pass
class K2(D,B,E): pass
class K3(D,A): pass
class Z(K1,K2,K3): pass
def merge(seqs):
print '\n\nCPL[%s]=%s' % (seqs[0][0],seqs),
res = []; i=0
while 1:
nonemptyseqs=[seq for seq in seqs if seq]
if not nonemptyseqs: return res
i+=1; print '\n',i,'round: candidates...',
for seq in nonemptyseqs: # find merge candidates among seq heads
cand = seq[0]; print ' ',cand,
nothead=[s for s in nonemptyseqs if cand in s[1:]]
if nothead: cand=None #reject candidate
else: break
if not cand: raise "Inconsistent hierarchy"
res.append(cand)
for seq in nonemptyseqs: # remove cand
if seq[0] == cand: del seq[0]
def mro(C):
"Compute the class precedence list (mro) according to C3"
return merge([[C]]+map(mro,C.__bases__)+[list(C.__bases__)])
def print_mro(C):
print '\nMRO[%s]=%s' % (C,mro(C))
print '\nP22 MRO[%s]=%s' % (C,C.mro())
print_mro(ex_9.Z)
#</mro.py>
That's all folks,
enjoy !
Resources
---------
.. [#] The thread on python-dev started by Samuele Pedroni:
https://mail.python.org/pipermail/python-dev/2002-October/029035.html
.. [#] The paper *A Monotonic Superclass Linearization for Dylan*:
https://doi.org/10.1145/236337.236343
.. [#] Guido van Rossum's essay, *Unifying types and classes in Python 2.2*:
https://web.archive.org/web/20140210194412/http://www.python.org/download/releases/2.2.2/descrintro

View file

@ -1,3 +1,5 @@
:orphan:
.. _pyporting-howto: .. _pyporting-howto:
************************************* *************************************
@ -6,423 +8,30 @@ How to port Python 2 Code to Python 3
:author: Brett Cannon :author: Brett Cannon
.. topic:: Abstract Python 2 reached its official end-of-life at the start of 2020. This means
that no new bug reports, fixes, or changes will be made to Python 2 - it's
no longer supported: see :pep:`373` and
`status of Python versions <https://devguide.python.org/versions>`_.
Python 2 reached its official end-of-life at the start of 2020. This means If you are looking to port an extension module instead of pure Python code,
that no new bug reports, fixes, or changes will be made to Python 2 - it's please see :ref:`cporting-howto`.
no longer supported.
This guide is intended to provide you with a path to Python 3 for your The archived python-porting_ mailing list may contain some useful guidance.
code, that includes compatibility with Python 2 as a first step.
If you are looking to port an extension module instead of pure Python code, Since Python 3.13 the original porting guide was discontinued.
please see :ref:`cporting-howto`. You can find the old guide in the
`archive <https://docs.python.org/3.12/howto/pyporting.html>`_.
The archived python-porting_ mailing list may contain some useful guidance.
Third-party guides
==================
The Short Explanation There are also multiple third-party guides that might be useful:
=====================
To achieve Python 2/3 compatibility in a single code base, the basic steps - `Guide by Fedora <https://portingguide.readthedocs.io>`_
are: - `PyCon 2020 tutorial <https://www.youtube.com/watch?v=JgIgEjASOlk>`_
- `Guide by DigitalOcean <https://www.digitalocean.com/community/tutorials/how-to-port-python-2-code-to-python-3>`_
- `Guide by ActiveState <https://www.activestate.com/blog/how-to-migrate-python-2-applications-to-python-3>`_
#. Only worry about supporting Python 2.7
#. Make sure you have good test coverage (coverage.py_ can help;
``python -m pip install coverage``)
#. Learn the differences between Python 2 and 3
#. Use Futurize_ (or Modernize_) to update your code (e.g. ``python -m pip install future``)
#. Use Pylint_ to help make sure you don't regress on your Python 3 support
(``python -m pip install pylint``)
#. Use caniusepython3_ to find out which of your dependencies are blocking your
use of Python 3 (``python -m pip install caniusepython3``)
#. Once your dependencies are no longer blocking you, use continuous integration
to make sure you stay compatible with Python 2 and 3 (tox_ can help test
against multiple versions of Python; ``python -m pip install tox``)
#. Consider using optional :term:`static type checking <static type checker>`
to make sure your type usage
works in both Python 2 and 3 (e.g. use mypy_ to check your typing under both
Python 2 and Python 3; ``python -m pip install mypy``).
.. note::
Note: Using ``python -m pip install`` guarantees that the ``pip`` you invoke
is the one installed for the Python currently in use, whether it be
a system-wide ``pip`` or one installed within a
:ref:`virtual environment <tut-venv>`.
Details
=======
Even if other factors - say, dependencies over which you have no control -
still require you to support Python 2, that does not prevent you taking the
step of including Python 3 support.
Most changes required to support Python 3 lead to cleaner code using newer
practices even in Python 2 code.
Different versions of Python 2
------------------------------
Ideally, your code should be compatible with Python 2.7, which was the
last supported version of Python 2.
Some of the tools mentioned in this guide will not work with Python 2.6.
If absolutely necessary, the six_ project can help you support Python 2.5 and
3 simultaneously. Do realize, though, that nearly all the projects listed in
this guide will not be available to you.
If you are able to skip Python 2.5 and older, the required changes to your
code will be minimal. At worst you will have to use a function instead of a
method in some instances or have to import a function instead of using a
built-in one.
Make sure you specify the proper version support in your ``setup.py`` file
--------------------------------------------------------------------------
In your ``setup.py`` file you should have the proper `trove classifier`_
specifying what versions of Python you support. As your project does not support
Python 3 yet you should at least have
``Programming Language :: Python :: 2 :: Only`` specified. Ideally you should
also specify each major/minor version of Python that you do support, e.g.
``Programming Language :: Python :: 2.7``.
Have good test coverage
-----------------------
Once you have your code supporting the oldest version of Python 2 you want it
to, you will want to make sure your test suite has good coverage. A good rule of
thumb is that if you want to be confident enough in your test suite that any
failures that appear after having tools rewrite your code are actual bugs in the
tools and not in your code. If you want a number to aim for, try to get over 80%
coverage (and don't feel bad if you find it hard to get better than 90%
coverage). If you don't already have a tool to measure test coverage then
coverage.py_ is recommended.
Be aware of the differences between Python 2 and 3
--------------------------------------------------
Once you have your code well-tested you are ready to begin porting your code to
Python 3! But to fully understand how your code is going to change and what
you want to look out for while you code, you will want to learn what changes
Python 3 makes in terms of Python 2.
Some resources for understanding the differences and their implications for you
code:
* the :ref:`"What's New" <whatsnew-index>` doc for each release of Python 3
* the `Porting to Python 3`_ book (which is free online)
* the handy `cheat sheet`_ from the Python-Future project.
Update your code
----------------
There are tools available that can port your code automatically.
Futurize_ does its best to make Python 3 idioms and practices exist in Python
2, e.g. backporting the ``bytes`` type from Python 3 so that you have
semantic parity between the major versions of Python. This is the better
approach for most cases.
Modernize_, on the other hand, is more conservative and targets a Python 2/3
subset of Python, directly relying on six_ to help provide compatibility.
A good approach is to run the tool over your test suite first and visually
inspect the diff to make sure the transformation is accurate. After you have
transformed your test suite and verified that all the tests still pass as
expected, then you can transform your application code knowing that any tests
which fail is a translation failure.
Unfortunately the tools can't automate everything to make your code work under
Python 3, and you will also need to read the tools' documentation in case some
options you need are turned off by default.
Key issues to be aware of and check for:
Division
++++++++
In Python 3, ``5 / 2 == 2.5`` and not ``2`` as it was in Python 2; all
division between ``int`` values result in a ``float``. This change has
actually been planned since Python 2.2 which was released in 2002. Since then
users have been encouraged to add ``from __future__ import division`` to any
and all files which use the ``/`` and ``//`` operators or to be running the
interpreter with the ``-Q`` flag. If you have not been doing this then you
will need to go through your code and do two things:
#. Add ``from __future__ import division`` to your files
#. Update any division operator as necessary to either use ``//`` to use floor
division or continue using ``/`` and expect a float
The reason that ``/`` isn't simply translated to ``//`` automatically is that if
an object defines a ``__truediv__`` method but not ``__floordiv__`` then your
code would begin to fail (e.g. a user-defined class that uses ``/`` to
signify some operation but not ``//`` for the same thing or at all).
Text versus binary data
+++++++++++++++++++++++
In Python 2 you could use the ``str`` type for both text and binary data.
Unfortunately this confluence of two different concepts could lead to brittle
code which sometimes worked for either kind of data, sometimes not. It also
could lead to confusing APIs if people didn't explicitly state that something
that accepted ``str`` accepted either text or binary data instead of one
specific type. This complicated the situation especially for anyone supporting
multiple languages as APIs wouldn't bother explicitly supporting ``unicode``
when they claimed text data support.
Python 3 made text and binary data distinct types that cannot simply be mixed
together. For any code that deals only with text or only binary data, this
separation doesn't pose an issue. But for code that has to deal with both, it
does mean you might have to now care about when you are using text compared
to binary data, which is why this cannot be entirely automated.
Decide which APIs take text and which take binary (it is **highly** recommended
you don't design APIs that can take both due to the difficulty of keeping the
code working; as stated earlier it is difficult to do well). In Python 2 this
means making sure the APIs that take text can work with ``unicode`` and those
that work with binary data work with the ``bytes`` type from Python 3
(which is a subset of ``str`` in Python 2 and acts as an alias for ``bytes``
type in Python 2). Usually the biggest issue is realizing which methods exist
on which types in Python 2 and 3 simultaneously (for text that's ``unicode``
in Python 2 and ``str`` in Python 3, for binary that's ``str``/``bytes`` in
Python 2 and ``bytes`` in Python 3).
The following table lists the **unique** methods of each data type across
Python 2 and 3 (e.g., the ``decode()`` method is usable on the equivalent binary
data type in either Python 2 or 3, but it can't be used by the textual data
type consistently between Python 2 and 3 because ``str`` in Python 3 doesn't
have the method). Do note that as of Python 3.5 the ``__mod__`` method was
added to the bytes type.
======================== =====================
**Text data** **Binary data**
------------------------ ---------------------
\ decode
------------------------ ---------------------
encode
------------------------ ---------------------
format
------------------------ ---------------------
isdecimal
------------------------ ---------------------
isnumeric
======================== =====================
Making the distinction easier to handle can be accomplished by encoding and
decoding between binary data and text at the edge of your code. This means that
when you receive text in binary data, you should immediately decode it. And if
your code needs to send text as binary data then encode it as late as possible.
This allows your code to work with only text internally and thus eliminates
having to keep track of what type of data you are working with.
The next issue is making sure you know whether the string literals in your code
represent text or binary data. You should add a ``b`` prefix to any
literal that presents binary data. For text you should add a ``u`` prefix to
the text literal. (There is a :mod:`__future__` import to force all unspecified
literals to be Unicode, but usage has shown it isn't as effective as adding a
``b`` or ``u`` prefix to all literals explicitly)
You also need to be careful about opening files. Possibly you have not always
bothered to add the ``b`` mode when opening a binary file (e.g., ``rb`` for
binary reading). Under Python 3, binary files and text files are clearly
distinct and mutually incompatible; see the :mod:`io` module for details.
Therefore, you **must** make a decision of whether a file will be used for
binary access (allowing binary data to be read and/or written) or textual access
(allowing text data to be read and/or written). You should also use :func:`io.open`
for opening files instead of the built-in :func:`open` function as the :mod:`io`
module is consistent from Python 2 to 3 while the built-in :func:`open` function
is not (in Python 3 it's actually :func:`io.open`). Do not bother with the
outdated practice of using :func:`codecs.open` as that's only necessary for
keeping compatibility with Python 2.5.
The constructors of both ``str`` and ``bytes`` have different semantics for the
same arguments between Python 2 and 3. Passing an integer to ``bytes`` in Python 2
will give you the string representation of the integer: ``bytes(3) == '3'``.
But in Python 3, an integer argument to ``bytes`` will give you a bytes object
as long as the integer specified, filled with null bytes:
``bytes(3) == b'\x00\x00\x00'``. A similar worry is necessary when passing a
bytes object to ``str``. In Python 2 you just get the bytes object back:
``str(b'3') == b'3'``. But in Python 3 you get the string representation of the
bytes object: ``str(b'3') == "b'3'"``.
Finally, the indexing of binary data requires careful handling (slicing does
**not** require any special handling). In Python 2,
``b'123'[1] == b'2'`` while in Python 3 ``b'123'[1] == 50``. Because binary data
is simply a collection of binary numbers, Python 3 returns the integer value for
the byte you index on. But in Python 2 because ``bytes == str``, indexing
returns a one-item slice of bytes. The six_ project has a function
named ``six.indexbytes()`` which will return an integer like in Python 3:
``six.indexbytes(b'123', 1)``.
To summarize:
#. Decide which of your APIs take text and which take binary data
#. Make sure that your code that works with text also works with ``unicode`` and
code for binary data works with ``bytes`` in Python 2 (see the table above
for what methods you cannot use for each type)
#. Mark all binary literals with a ``b`` prefix, textual literals with a ``u``
prefix
#. Decode binary data to text as soon as possible, encode text as binary data as
late as possible
#. Open files using :func:`io.open` and make sure to specify the ``b`` mode when
appropriate
#. Be careful when indexing into binary data
Use feature detection instead of version detection
++++++++++++++++++++++++++++++++++++++++++++++++++
Inevitably you will have code that has to choose what to do based on what
version of Python is running. The best way to do this is with feature detection
of whether the version of Python you're running under supports what you need.
If for some reason that doesn't work then you should make the version check be
against Python 2 and not Python 3. To help explain this, let's look at an
example.
Let's pretend that you need access to a feature of :mod:`importlib` that
is available in Python's standard library since Python 3.3 and available for
Python 2 through importlib2_ on PyPI. You might be tempted to write code to
access e.g. the :mod:`importlib.abc` module by doing the following::
import sys
if sys.version_info[0] == 3:
from importlib import abc
else:
from importlib2 import abc
The problem with this code is what happens when Python 4 comes out? It would
be better to treat Python 2 as the exceptional case instead of Python 3 and
assume that future Python versions will be more compatible with Python 3 than
Python 2::
import sys
if sys.version_info[0] > 2:
from importlib import abc
else:
from importlib2 import abc
The best solution, though, is to do no version detection at all and instead rely
on feature detection. That avoids any potential issues of getting the version
detection wrong and helps keep you future-compatible::
try:
from importlib import abc
except ImportError:
from importlib2 import abc
Prevent compatibility regressions
---------------------------------
Once you have fully translated your code to be compatible with Python 3, you
will want to make sure your code doesn't regress and stop working under
Python 3. This is especially true if you have a dependency which is blocking you
from actually running under Python 3 at the moment.
To help with staying compatible, any new modules you create should have
at least the following block of code at the top of it::
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
You can also run Python 2 with the ``-3`` flag to be warned about various
compatibility issues your code triggers during execution. If you turn warnings
into errors with ``-Werror`` then you can make sure that you don't accidentally
miss a warning.
You can also use the Pylint_ project and its ``--py3k`` flag to lint your code
to receive warnings when your code begins to deviate from Python 3
compatibility. This also prevents you from having to run Modernize_ or Futurize_
over your code regularly to catch compatibility regressions. This does require
you only support Python 2.7 and Python 3.4 or newer as that is Pylint's
minimum Python version support.
Check which dependencies block your transition
----------------------------------------------
**After** you have made your code compatible with Python 3 you should begin to
care about whether your dependencies have also been ported. The caniusepython3_
project was created to help you determine which projects
-- directly or indirectly -- are blocking you from supporting Python 3. There
is both a command-line tool as well as a web interface at
https://caniusepython3.com.
The project also provides code which you can integrate into your test suite so
that you will have a failing test when you no longer have dependencies blocking
you from using Python 3. This allows you to avoid having to manually check your
dependencies and to be notified quickly when you can start running on Python 3.
Update your ``setup.py`` file to denote Python 3 compatibility
--------------------------------------------------------------
Once your code works under Python 3, you should update the classifiers in
your ``setup.py`` to contain ``Programming Language :: Python :: 3`` and to not
specify sole Python 2 support. This will tell anyone using your code that you
support Python 2 **and** 3. Ideally you will also want to add classifiers for
each major/minor version of Python you now support.
Use continuous integration to stay compatible
---------------------------------------------
Once you are able to fully run under Python 3 you will want to make sure your
code always works under both Python 2 and 3. Probably the best tool for running
your tests under multiple Python interpreters is tox_. You can then integrate
tox with your continuous integration system so that you never accidentally break
Python 2 or 3 support.
You may also want to use the ``-bb`` flag with the Python 3 interpreter to
trigger an exception when you are comparing bytes to strings or bytes to an int
(the latter is available starting in Python 3.5). By default type-differing
comparisons simply return ``False``, but if you made a mistake in your
separation of text/binary data handling or indexing on bytes you wouldn't easily
find the mistake. This flag will raise an exception when these kinds of
comparisons occur, making the mistake much easier to track down.
Consider using optional static type checking
--------------------------------------------
Another way to help port your code is to use a :term:`static type checker` like
mypy_ or pytype_ on your code. These tools can be used to analyze your code as
if it's being run under Python 2, then you can run the tool a second time as if
your code is running under Python 3. By running a static type checker twice like
this you can discover if you're e.g. misusing binary data type in one version
of Python compared to another. If you add optional type hints to your code you
can also explicitly state whether your APIs use textual or binary data, helping
to make sure everything functions as expected in both versions of Python.
.. _caniusepython3: https://pypi.org/project/caniusepython3
.. _cheat sheet: https://python-future.org/compatible_idioms.html
.. _coverage.py: https://pypi.org/project/coverage
.. _Futurize: https://python-future.org/automatic_conversion.html
.. _importlib2: https://pypi.org/project/importlib2
.. _Modernize: https://python-modernize.readthedocs.io/
.. _mypy: https://mypy-lang.org/
.. _Porting to Python 3: http://python3porting.com/
.. _Pylint: https://pypi.org/project/pylint
.. _Python 3 Q & A: https://ncoghlan-devs-python-notes.readthedocs.io/en/latest/python3/questions_and_answers.html
.. _pytype: https://github.com/google/pytype
.. _python-future: https://python-future.org/
.. _python-porting: https://mail.python.org/pipermail/python-porting/ .. _python-porting: https://mail.python.org/pipermail/python-porting/
.. _six: https://pypi.org/project/six
.. _tox: https://pypi.org/project/tox
.. _trove classifier: https://pypi.org/classifiers
.. _Why Python 3 exists: https://snarky.ca/why-python-3-exists

View file

@ -1,10 +1,9 @@
.. _sortinghowto: .. _sortinghowto:
Sorting HOW TO Sorting Techniques
************** ******************
:Author: Andrew Dalke and Raymond Hettinger :Author: Andrew Dalke and Raymond Hettinger
:Release: 0.1
Python lists have a built-in :meth:`list.sort` method that modifies the list Python lists have a built-in :meth:`list.sort` method that modifies the list
@ -56,7 +55,7 @@ For example, here's a case-insensitive string comparison:
.. doctest:: .. doctest::
>>> sorted("This is a test string from Andrew".split(), key=str.lower) >>> sorted("This is a test string from Andrew".split(), key=str.casefold)
['a', 'Andrew', 'from', 'is', 'string', 'test', 'This'] ['a', 'Andrew', 'from', 'is', 'string', 'test', 'This']
The value of the *key* parameter should be a function (or other callable) that The value of the *key* parameter should be a function (or other callable) that
@ -97,10 +96,14 @@ The same technique works for objects with named attributes. For example:
>>> sorted(student_objects, key=lambda student: student.age) # sort by age >>> sorted(student_objects, key=lambda student: student.age) # sort by age
[('dave', 'B', 10), ('jane', 'B', 12), ('john', 'A', 15)] [('dave', 'B', 10), ('jane', 'B', 12), ('john', 'A', 15)]
Operator Module Functions Objects with named attributes can be made by a regular class as shown
========================= above, or they can be instances of :class:`~dataclasses.dataclass` or
a :term:`named tuple`.
The key-function patterns shown above are very common, so Python provides Operator Module Functions and Partial Function Evaluation
=========================================================
The :term:`key function` patterns shown above are very common, so Python provides
convenience functions to make accessor functions easier and faster. The convenience functions to make accessor functions easier and faster. The
:mod:`operator` module has :func:`~operator.itemgetter`, :mod:`operator` module has :func:`~operator.itemgetter`,
:func:`~operator.attrgetter`, and a :func:`~operator.methodcaller` function. :func:`~operator.attrgetter`, and a :func:`~operator.methodcaller` function.
@ -128,6 +131,24 @@ sort by *grade* then by *age*:
>>> sorted(student_objects, key=attrgetter('grade', 'age')) >>> sorted(student_objects, key=attrgetter('grade', 'age'))
[('john', 'A', 15), ('dave', 'B', 10), ('jane', 'B', 12)] [('john', 'A', 15), ('dave', 'B', 10), ('jane', 'B', 12)]
The :mod:`functools` module provides another helpful tool for making
key-functions. The :func:`~functools.partial` function can reduce the
`arity <https://en.wikipedia.org/wiki/Arity>`_ of a multi-argument
function making it suitable for use as a key-function.
.. doctest::
>>> from functools import partial
>>> from unicodedata import normalize
>>> names = 'Zoë Åbjørn Núñez Élana Zeke Abe Nubia Eloise'.split()
>>> sorted(names, key=partial(normalize, 'NFD'))
['Abe', 'Åbjørn', 'Eloise', 'Élana', 'Nubia', 'Núñez', 'Zeke', 'Zoë']
>>> sorted(names, key=partial(normalize, 'NFC'))
['Abe', 'Eloise', 'Nubia', 'Núñez', 'Zeke', 'Zoë', 'Åbjørn', 'Élana']
Ascending and Descending Ascending and Descending
======================== ========================
@ -200,6 +221,8 @@ This idiom is called Decorate-Sort-Undecorate after its three steps:
For example, to sort the student data by *grade* using the DSU approach: For example, to sort the student data by *grade* using the DSU approach:
.. doctest::
>>> decorated = [(student.grade, i, student) for i, student in enumerate(student_objects)] >>> decorated = [(student.grade, i, student) for i, student in enumerate(student_objects)]
>>> decorated.sort() >>> decorated.sort()
>>> [student for grade, i, student in decorated] # undecorate >>> [student for grade, i, student in decorated] # undecorate
@ -282,7 +305,11 @@ Odds and Ends
[('dave', 'B', 10), ('jane', 'B', 12), ('john', 'A', 15)] [('dave', 'B', 10), ('jane', 'B', 12), ('john', 'A', 15)]
However, note that ``<`` can fall back to using :meth:`~object.__gt__` if However, note that ``<`` can fall back to using :meth:`~object.__gt__` if
:meth:`~object.__lt__` is not implemented (see :func:`object.__lt__`). :meth:`~object.__lt__` is not implemented (see :func:`object.__lt__`
for details on the mechanics). To avoid surprises, :pep:`8`
recommends that all six comparison methods be implemented.
The :func:`~functools.total_ordering` decorator is provided to make that
task easier.
* Key functions need not depend directly on the objects being sorted. A key * Key functions need not depend directly on the objects being sorted. A key
function can also access external resources. For instance, if the student grades function can also access external resources. For instance, if the student grades
@ -295,3 +322,24 @@ Odds and Ends
>>> newgrades = {'john': 'F', 'jane':'A', 'dave': 'C'} >>> newgrades = {'john': 'F', 'jane':'A', 'dave': 'C'}
>>> sorted(students, key=newgrades.__getitem__) >>> sorted(students, key=newgrades.__getitem__)
['jane', 'dave', 'john'] ['jane', 'dave', 'john']
Partial Sorts
=============
Some applications require only some of the data to be ordered. The standard
library provides several tools that do less work than a full sort:
* :func:`min` and :func:`max` return the smallest and largest values,
respectively. These functions make a single pass over the input data and
require almost no auxiliary memory.
* :func:`heapq.nsmallest` and :func:`heapq.nlargest` return
the *n* smallest and largest values, respectively. These functions
make a single pass over the data keeping only *n* elements in memory
at a time. For values of *n* that are small relative to the number of
inputs, these functions make far fewer comparisons than a full sort.
* :func:`heapq.heappush` and :func:`heapq.heappop` create and maintain a
partially sorted arrangement of data that keeps the smallest element
at position ``0``. These functions are suitable for implementing
priority queues which are commonly used for task scheduling.

View file

@ -392,16 +392,16 @@ info and geturl
=============== ===============
The response returned by urlopen (or the :exc:`~urllib.error.HTTPError` instance) has two The response returned by urlopen (or the :exc:`~urllib.error.HTTPError` instance) has two
useful methods :meth:`info` and :meth:`geturl` and is defined in the module useful methods :meth:`!info` and :meth:`!geturl` and is defined in the module
:mod:`urllib.response`.. :mod:`urllib.response`.
**geturl** - this returns the real URL of the page fetched. This is useful * **geturl** - this returns the real URL of the page fetched. This is useful
because ``urlopen`` (or the opener object used) may have followed a because ``urlopen`` (or the opener object used) may have followed a
redirect. The URL of the page fetched may not be the same as the URL requested. redirect. The URL of the page fetched may not be the same as the URL requested.
**info** - this returns a dictionary-like object that describes the page * **info** - this returns a dictionary-like object that describes the page
fetched, particularly the headers sent by the server. It is currently an fetched, particularly the headers sent by the server. It is currently an
:class:`http.client.HTTPMessage` instance. :class:`http.client.HTTPMessage` instance.
Typical headers include 'Content-length', 'Content-type', and so on. See the Typical headers include 'Content-length', 'Content-type', and so on. See the
`Quick Reference to HTTP Headers <https://jkorpela.fi/http.html>`_ `Quick Reference to HTTP Headers <https://jkorpela.fi/http.html>`_
@ -507,7 +507,7 @@ than the URL you pass to .add_password() will also match. ::
In the above example we only supplied our ``HTTPBasicAuthHandler`` to In the above example we only supplied our ``HTTPBasicAuthHandler`` to
``build_opener``. By default openers have the handlers for normal situations ``build_opener``. By default openers have the handlers for normal situations
-- ``ProxyHandler`` (if a proxy setting such as an :envvar:`http_proxy` -- ``ProxyHandler`` (if a proxy setting such as an :envvar:`!http_proxy`
environment variable is set), ``UnknownHandler``, ``HTTPHandler``, environment variable is set), ``UnknownHandler``, ``HTTPHandler``,
``HTTPDefaultErrorHandler``, ``HTTPRedirectHandler``, ``FTPHandler``, ``HTTPDefaultErrorHandler``, ``HTTPRedirectHandler``, ``FTPHandler``,
``FileHandler``, ``DataHandler``, ``HTTPErrorProcessor``. ``FileHandler``, ``DataHandler``, ``HTTPErrorProcessor``.

View file

@ -0,0 +1,8 @@
.. include for modules that don't work on WASM or iOS
.. availability:: not WASI, not iOS.
This module does not work or is not available on WebAssembly platforms, or
on iOS. See :ref:`wasm-availability` for more information on WASM
availability; see :ref:`iOS-availability` for more information on iOS
availability.

View file

@ -1,7 +1,6 @@
.. include for modules that don't work on WASM .. include for modules that don't work on WASM
.. availability:: not Emscripten, not WASI. .. availability:: not WASI.
This module does not work or is not available on WebAssembly platforms This module does not work or is not available on WebAssembly. See
``wasm32-emscripten`` and ``wasm32-wasi``. See
:ref:`wasm-availability` for more information. :ref:`wasm-availability` for more information.

View file

@ -8,20 +8,68 @@
-------------- --------------
:mod:`__future__` is a real module, and serves three purposes: Imports of the form ``from __future__ import feature`` are called
:ref:`future statements <future>`. These are special-cased by the Python compiler
to allow the use of new Python features in modules containing the future statement
before the release in which the feature becomes standard.
While these future statements are given additional special meaning by the
Python compiler, they are still executed like any other import statement and
the :mod:`__future__` exists and is handled by the import system the same way
any other Python module would be. This design serves three purposes:
* To avoid confusing existing tools that analyze import statements and expect to * To avoid confusing existing tools that analyze import statements and expect to
find the modules they're importing. find the modules they're importing.
* To ensure that :ref:`future statements <future>` run under releases prior to
2.1 at least yield runtime exceptions (the import of :mod:`__future__` will
fail, because there was no module of that name prior to 2.1).
* To document when incompatible changes were introduced, and when they will be * To document when incompatible changes were introduced, and when they will be
--- or were --- made mandatory. This is a form of executable documentation, and --- or were --- made mandatory. This is a form of executable documentation, and
can be inspected programmatically via importing :mod:`__future__` and examining can be inspected programmatically via importing :mod:`__future__` and examining
its contents. its contents.
* To ensure that :ref:`future statements <future>` run under releases prior to
Python 2.1 at least yield runtime exceptions (the import of :mod:`__future__`
will fail, because there was no module of that name prior to 2.1).
Module Contents
---------------
No feature description will ever be deleted from :mod:`__future__`. Since its
introduction in Python 2.1 the following features have found their way into the
language using this mechanism:
+------------------+-------------+--------------+---------------------------------------------+
| feature | optional in | mandatory in | effect |
+==================+=============+==============+=============================================+
| nested_scopes | 2.1.0b1 | 2.2 | :pep:`227`: |
| | | | *Statically Nested Scopes* |
+------------------+-------------+--------------+---------------------------------------------+
| generators | 2.2.0a1 | 2.3 | :pep:`255`: |
| | | | *Simple Generators* |
+------------------+-------------+--------------+---------------------------------------------+
| division | 2.2.0a2 | 3.0 | :pep:`238`: |
| | | | *Changing the Division Operator* |
+------------------+-------------+--------------+---------------------------------------------+
| absolute_import | 2.5.0a1 | 3.0 | :pep:`328`: |
| | | | *Imports: Multi-Line and Absolute/Relative* |
+------------------+-------------+--------------+---------------------------------------------+
| with_statement | 2.5.0a1 | 2.6 | :pep:`343`: |
| | | | *The "with" Statement* |
+------------------+-------------+--------------+---------------------------------------------+
| print_function | 2.6.0a2 | 3.0 | :pep:`3105`: |
| | | | *Make print a function* |
+------------------+-------------+--------------+---------------------------------------------+
| unicode_literals | 2.6.0a2 | 3.0 | :pep:`3112`: |
| | | | *Bytes literals in Python 3000* |
+------------------+-------------+--------------+---------------------------------------------+
| generator_stop | 3.5.0b1 | 3.7 | :pep:`479`: |
| | | | *StopIteration handling inside generators* |
+------------------+-------------+--------------+---------------------------------------------+
| annotations | 3.7.0b1 | TBD [1]_ | :pep:`563`: |
| | | | *Postponed evaluation of annotations* |
+------------------+-------------+--------------+---------------------------------------------+
.. XXX Adding a new entry? Remember to update simple_stmts.rst, too.
.. _future-classes: .. _future-classes:
.. class:: _Feature .. class:: _Feature
@ -65,43 +113,6 @@
dynamically compiled code. This flag is stored in the :attr:`_Feature.compiler_flag` dynamically compiled code. This flag is stored in the :attr:`_Feature.compiler_flag`
attribute on :class:`_Feature` instances. attribute on :class:`_Feature` instances.
No feature description will ever be deleted from :mod:`__future__`. Since its
introduction in Python 2.1 the following features have found their way into the
language using this mechanism:
+------------------+-------------+--------------+---------------------------------------------+
| feature | optional in | mandatory in | effect |
+==================+=============+==============+=============================================+
| nested_scopes | 2.1.0b1 | 2.2 | :pep:`227`: |
| | | | *Statically Nested Scopes* |
+------------------+-------------+--------------+---------------------------------------------+
| generators | 2.2.0a1 | 2.3 | :pep:`255`: |
| | | | *Simple Generators* |
+------------------+-------------+--------------+---------------------------------------------+
| division | 2.2.0a2 | 3.0 | :pep:`238`: |
| | | | *Changing the Division Operator* |
+------------------+-------------+--------------+---------------------------------------------+
| absolute_import | 2.5.0a1 | 3.0 | :pep:`328`: |
| | | | *Imports: Multi-Line and Absolute/Relative* |
+------------------+-------------+--------------+---------------------------------------------+
| with_statement | 2.5.0a1 | 2.6 | :pep:`343`: |
| | | | *The "with" Statement* |
+------------------+-------------+--------------+---------------------------------------------+
| print_function | 2.6.0a2 | 3.0 | :pep:`3105`: |
| | | | *Make print a function* |
+------------------+-------------+--------------+---------------------------------------------+
| unicode_literals | 2.6.0a2 | 3.0 | :pep:`3112`: |
| | | | *Bytes literals in Python 3000* |
+------------------+-------------+--------------+---------------------------------------------+
| generator_stop | 3.5.0b1 | 3.7 | :pep:`479`: |
| | | | *StopIteration handling inside generators* |
+------------------+-------------+--------------+---------------------------------------------+
| annotations | 3.7.0b1 | TBD [1]_ | :pep:`563`: |
| | | | *Postponed evaluation of annotations* |
+------------------+-------------+--------------+---------------------------------------------+
.. XXX Adding a new entry? Remember to update simple_stmts.rst, too.
.. [1] .. [1]
``from __future__ import annotations`` was previously scheduled to ``from __future__ import annotations`` was previously scheduled to
become mandatory in Python 3.10, but the Python Steering Council become mandatory in Python 3.10, but the Python Steering Council
@ -115,3 +126,6 @@ language using this mechanism:
:ref:`future` :ref:`future`
How the compiler treats future imports. How the compiler treats future imports.
:pep:`236` - Back to the __future__
The original proposal for the __future__ mechanism.

View file

@ -101,11 +101,11 @@ a helper class :class:`ABC` to alternatively define ABCs through inheritance:
subclass of the ABC. (This class method is called from the subclass of the ABC. (This class method is called from the
:meth:`~class.__subclasscheck__` method of the ABC.) :meth:`~class.__subclasscheck__` method of the ABC.)
This method should return ``True``, ``False`` or ``NotImplemented``. If This method should return ``True``, ``False`` or :data:`NotImplemented`. If
it returns ``True``, the *subclass* is considered a subclass of this ABC. it returns ``True``, the *subclass* is considered a subclass of this ABC.
If it returns ``False``, the *subclass* is not considered a subclass of If it returns ``False``, the *subclass* is not considered a subclass of
this ABC, even if it would normally be one. If it returns this ABC, even if it would normally be one. If it returns
``NotImplemented``, the subclass check is continued with the usual :data:`!NotImplemented`, the subclass check is continued with the usual
mechanism. mechanism.
.. XXX explain the "usual mechanism" .. XXX explain the "usual mechanism"

View file

@ -745,7 +745,7 @@ The add_argument() method
.. method:: ArgumentParser.add_argument(name or flags..., [action], [nargs], \ .. method:: ArgumentParser.add_argument(name or flags..., [action], [nargs], \
[const], [default], [type], [choices], [required], \ [const], [default], [type], [choices], [required], \
[help], [metavar], [dest]) [help], [metavar], [dest], [deprecated])
Define how a single command-line argument should be parsed. Each parameter Define how a single command-line argument should be parsed. Each parameter
has its own more detailed description below, but in short they are: has its own more detailed description below, but in short they are:
@ -777,6 +777,8 @@ The add_argument() method
* dest_ - The name of the attribute to be added to the object returned by * dest_ - The name of the attribute to be added to the object returned by
:meth:`parse_args`. :meth:`parse_args`.
* deprecated_ - Whether or not use of the argument is deprecated.
The following sections describe how each of these are used. The following sections describe how each of these are used.
@ -1439,6 +1441,34 @@ behavior::
>>> parser.parse_args('--foo XXX'.split()) >>> parser.parse_args('--foo XXX'.split())
Namespace(bar='XXX') Namespace(bar='XXX')
.. _deprecated:
deprecated
^^^^^^^^^^
During a project's lifetime, some arguments may need to be removed from the
command line. Before removing them, you should inform
your users that the arguments are deprecated and will be removed.
The ``deprecated`` keyword argument of
:meth:`~ArgumentParser.add_argument`, which defaults to ``False``,
specifies if the argument is deprecated and will be removed
in the future.
For arguments, if ``deprecated`` is ``True``, then a warning will be
printed to standard error when the argument is used::
>>> import argparse
>>> parser = argparse.ArgumentParser(prog='snake.py')
>>> parser.add_argument('--legs', default=0, type=int, deprecated=True)
>>> parser.parse_args([])
Namespace(legs=0)
>>> parser.parse_args(['--legs', '4']) # doctest: +SKIP
snake.py: warning: option '--legs' is deprecated
Namespace(legs=4)
.. versionchanged:: 3.13
Action classes Action classes
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
@ -1842,7 +1872,8 @@ Sub-commands
{foo,bar} additional help {foo,bar} additional help
Furthermore, ``add_parser`` supports an additional ``aliases`` argument, Furthermore, :meth:`~_SubParsersAction.add_parser` supports an additional
*aliases* argument,
which allows multiple strings to refer to the same subparser. This example, which allows multiple strings to refer to the same subparser. This example,
like ``svn``, aliases ``co`` as a shorthand for ``checkout``:: like ``svn``, aliases ``co`` as a shorthand for ``checkout``::
@ -1853,6 +1884,20 @@ Sub-commands
>>> parser.parse_args(['co', 'bar']) >>> parser.parse_args(['co', 'bar'])
Namespace(foo='bar') Namespace(foo='bar')
:meth:`~_SubParsersAction.add_parser` supports also an additional
*deprecated* argument, which allows to deprecate the subparser.
>>> import argparse
>>> parser = argparse.ArgumentParser(prog='chicken.py')
>>> subparsers = parser.add_subparsers()
>>> run = subparsers.add_parser('run')
>>> fly = subparsers.add_parser('fly', deprecated=True)
>>> parser.parse_args(['fly']) # doctest: +SKIP
chicken.py: warning: command 'fly' is deprecated
Namespace()
.. versionadded:: 3.13
One particularly effective way of handling sub-commands is to combine the use One particularly effective way of handling sub-commands is to combine the use
of the :meth:`add_subparsers` method with calls to :meth:`set_defaults` so of the :meth:`add_subparsers` method with calls to :meth:`set_defaults` so
that each subparser knows which Python function it should execute. For that each subparser knows which Python function it should execute. For
@ -1936,8 +1981,8 @@ FileType objects
>>> parser.parse_args(['-']) >>> parser.parse_args(['-'])
Namespace(infile=<_io.TextIOWrapper name='<stdin>' encoding='UTF-8'>) Namespace(infile=<_io.TextIOWrapper name='<stdin>' encoding='UTF-8'>)
.. versionadded:: 3.4 .. versionchanged:: 3.4
The *encodings* and *errors* keyword arguments. Added the *encodings* and *errors* parameters.
Argument groups Argument groups

View file

@ -79,14 +79,16 @@ The module defines the following type:
.. class:: array(typecode[, initializer]) .. class:: array(typecode[, initializer])
A new array whose items are restricted by *typecode*, and initialized A new array whose items are restricted by *typecode*, and initialized
from the optional *initializer* value, which must be a list, a from the optional *initializer* value, which must be a :class:`bytes`
:term:`bytes-like object`, or iterable over elements of the or :class:`bytearray` object, a Unicode string, or iterable over elements
appropriate type. of the appropriate type.
If given a list or string, the initializer is passed to the new array's If given a :class:`bytes` or :class:`bytearray` object, the initializer
:meth:`fromlist`, :meth:`frombytes`, or :meth:`fromunicode` method (see below) is passed to the new array's :meth:`frombytes` method;
to add initial items to the array. Otherwise, the iterable initializer is if given a Unicode string, the initializer is passed to the
passed to the :meth:`extend` method. :meth:`fromunicode` method;
otherwise, the initializer's iterator is passed to the :meth:`extend` method
to add initial items to the array.
Array objects support the ordinary sequence operations of indexing, slicing, Array objects support the ordinary sequence operations of indexing, slicing,
concatenation, and multiplication. When using slice assignment, the assigned concatenation, and multiplication. When using slice assignment, the assigned
@ -152,10 +154,11 @@ The module defines the following type:
must be the right type to be appended to the array. must be the right type to be appended to the array.
.. method:: frombytes(s) .. method:: frombytes(buffer)
Appends items from the string, interpreting the string as an array of machine Appends items from the :term:`bytes-like object`, interpreting
values (as if it had been read from a file using the :meth:`fromfile` method). its content as an array of machine values (as if it had been read
from a file using the :meth:`fromfile` method).
.. versionadded:: 3.2 .. versionadded:: 3.2
:meth:`!fromstring` is renamed to :meth:`frombytes` for clarity. :meth:`!fromstring` is renamed to :meth:`frombytes` for clarity.
@ -177,7 +180,7 @@ The module defines the following type:
.. method:: fromunicode(s) .. method:: fromunicode(s)
Extends this array with data from the given unicode string. Extends this array with data from the given Unicode string.
The array must have type code ``'u'`` or ``'w'``; otherwise a :exc:`ValueError` is raised. The array must have type code ``'u'`` or ``'w'``; otherwise a :exc:`ValueError` is raised.
Use ``array.frombytes(unicodestring.encode(enc))`` to append Unicode data to an Use ``array.frombytes(unicodestring.encode(enc))`` to append Unicode data to an
array of some other type. array of some other type.
@ -212,6 +215,13 @@ The module defines the following type:
Remove the first occurrence of *x* from the array. Remove the first occurrence of *x* from the array.
.. method:: clear()
Remove all elements from the array.
.. versionadded:: 3.13
.. method:: reverse() .. method:: reverse()
Reverse the order of the items in the array. Reverse the order of the items in the array.
@ -239,24 +249,27 @@ The module defines the following type:
.. method:: tounicode() .. method:: tounicode()
Convert the array to a unicode string. The array must have a type ``'u'`` or ``'w'``; Convert the array to a Unicode string. The array must have a type ``'u'`` or ``'w'``;
otherwise a :exc:`ValueError` is raised. Use ``array.tobytes().decode(enc)`` to otherwise a :exc:`ValueError` is raised. Use ``array.tobytes().decode(enc)`` to
obtain a unicode string from an array of some other type. obtain a Unicode string from an array of some other type.
When an array object is printed or converted to a string, it is represented as The string representation of array objects has the form
``array(typecode, initializer)``. The *initializer* is omitted if the array is ``array(typecode, initializer)``.
empty, otherwise it is a string if the *typecode* is ``'u'`` or ``'w'``, The *initializer* is omitted if the array is empty, otherwise it is
otherwise it is a list of numbers. a Unicode string if the *typecode* is ``'u'`` or ``'w'``, otherwise it is
The string is guaranteed to be able to be converted back to an a list of numbers.
The string representation is guaranteed to be able to be converted back to an
array with the same type and value using :func:`eval`, so long as the array with the same type and value using :func:`eval`, so long as the
:class:`~array.array` class has been imported using ``from array import array``. :class:`~array.array` class has been imported using ``from array import array``.
Variables ``inf`` and ``nan`` must also be defined if it contains
corresponding floating point values.
Examples:: Examples::
array('l') array('l')
array('w', 'hello \u2641') array('w', 'hello \u2641')
array('l', [1, 2, 3, 4, 5]) array('l', [1, 2, 3, 4, 5])
array('d', [1.0, 2.0, 3.14]) array('d', [1.0, 2.0, 3.14, -inf, nan])
.. seealso:: .. seealso::
@ -266,4 +279,3 @@ Examples::
`NumPy <https://numpy.org/>`_ `NumPy <https://numpy.org/>`_
The NumPy package defines another array type. The NumPy package defines another array type.

View file

@ -103,20 +103,15 @@ Node classes
For example, to create and populate an :class:`ast.UnaryOp` node, you could For example, to create and populate an :class:`ast.UnaryOp` node, you could
use :: use ::
node = ast.UnaryOp()
node.op = ast.USub()
node.operand = ast.Constant()
node.operand.value = 5
node.operand.lineno = 0
node.operand.col_offset = 0
node.lineno = 0
node.col_offset = 0
or the more compact ::
node = ast.UnaryOp(ast.USub(), ast.Constant(5, lineno=0, col_offset=0), node = ast.UnaryOp(ast.USub(), ast.Constant(5, lineno=0, col_offset=0),
lineno=0, col_offset=0) lineno=0, col_offset=0)
If a field that is optional in the grammar is omitted from the constructor,
it defaults to ``None``. If a list field is omitted, it defaults to the empty
list. If any other field is omitted, a :exc:`DeprecationWarning` is raised
and the AST node will not have this field. In Python 3.15, this condition will
raise an error.
.. versionchanged:: 3.8 .. versionchanged:: 3.8
Class :class:`ast.Constant` is now used for all constants. Class :class:`ast.Constant` is now used for all constants.
@ -140,6 +135,14 @@ Node classes
In the meantime, instantiating them will return an instance of In the meantime, instantiating them will return an instance of
a different class. a different class.
.. deprecated-removed:: 3.13 3.15
Previous versions of Python allowed the creation of AST nodes that were missing
required fields. Similarly, AST node constructors allowed arbitrary keyword
arguments that were set as attributes of the AST node, even if they did not
match any of the fields of the AST node. This behavior is deprecated and will
be removed in Python 3.15.
.. note:: .. note::
The descriptions of the specific node classes displayed here The descriptions of the specific node classes displayed here
were initially adapted from the fantastic `Green Tree were initially adapted from the fantastic `Green Tree
@ -170,8 +173,7 @@ Root nodes
Assign( Assign(
targets=[ targets=[
Name(id='x', ctx=Store())], Name(id='x', ctx=Store())],
value=Constant(value=1))], value=Constant(value=1))])
type_ignores=[])
.. class:: Expression(body) .. class:: Expression(body)
@ -299,8 +301,7 @@ Literals
value=Call( value=Call(
func=Name(id='sin', ctx=Load()), func=Name(id='sin', ctx=Load()),
args=[ args=[
Name(id='a', ctx=Load())], Name(id='a', ctx=Load())]),
keywords=[]),
conversion=-1, conversion=-1,
format_spec=JoinedStr( format_spec=JoinedStr(
values=[ values=[
@ -395,8 +396,7 @@ Variables
Module( Module(
body=[ body=[
Expr( Expr(
value=Name(id='a', ctx=Load()))], value=Name(id='a', ctx=Load()))])
type_ignores=[])
>>> print(ast.dump(ast.parse('a = 1'), indent=4)) >>> print(ast.dump(ast.parse('a = 1'), indent=4))
Module( Module(
@ -404,16 +404,14 @@ Variables
Assign( Assign(
targets=[ targets=[
Name(id='a', ctx=Store())], Name(id='a', ctx=Store())],
value=Constant(value=1))], value=Constant(value=1))])
type_ignores=[])
>>> print(ast.dump(ast.parse('del a'), indent=4)) >>> print(ast.dump(ast.parse('del a'), indent=4))
Module( Module(
body=[ body=[
Delete( Delete(
targets=[ targets=[
Name(id='a', ctx=Del())])], Name(id='a', ctx=Del())])])
type_ignores=[])
.. class:: Starred(value, ctx) .. class:: Starred(value, ctx)
@ -436,8 +434,7 @@ Variables
value=Name(id='b', ctx=Store()), value=Name(id='b', ctx=Store()),
ctx=Store())], ctx=Store())],
ctx=Store())], ctx=Store())],
value=Name(id='it', ctx=Load()))], value=Name(id='it', ctx=Load()))])
type_ignores=[])
.. _ast-expressions: .. _ast-expressions:
@ -460,8 +457,7 @@ Expressions
Expr( Expr(
value=UnaryOp( value=UnaryOp(
op=USub(), op=USub(),
operand=Name(id='a', ctx=Load())))], operand=Name(id='a', ctx=Load())))])
type_ignores=[])
.. class:: UnaryOp(op, operand) .. class:: UnaryOp(op, operand)
@ -726,7 +722,10 @@ Comprehensions
.. doctest:: .. doctest::
>>> print(ast.dump(ast.parse('[x for x in numbers]', mode='eval'), indent=4)) >>> print(ast.dump(
... ast.parse('[x for x in numbers]', mode='eval'),
... indent=4,
... ))
Expression( Expression(
body=ListComp( body=ListComp(
elt=Name(id='x', ctx=Load()), elt=Name(id='x', ctx=Load()),
@ -734,9 +733,11 @@ Comprehensions
comprehension( comprehension(
target=Name(id='x', ctx=Store()), target=Name(id='x', ctx=Store()),
iter=Name(id='numbers', ctx=Load()), iter=Name(id='numbers', ctx=Load()),
ifs=[],
is_async=0)])) is_async=0)]))
>>> print(ast.dump(ast.parse('{x: x**2 for x in numbers}', mode='eval'), indent=4)) >>> print(ast.dump(
... ast.parse('{x: x**2 for x in numbers}', mode='eval'),
... indent=4,
... ))
Expression( Expression(
body=DictComp( body=DictComp(
key=Name(id='x', ctx=Load()), key=Name(id='x', ctx=Load()),
@ -748,9 +749,11 @@ Comprehensions
comprehension( comprehension(
target=Name(id='x', ctx=Store()), target=Name(id='x', ctx=Store()),
iter=Name(id='numbers', ctx=Load()), iter=Name(id='numbers', ctx=Load()),
ifs=[],
is_async=0)])) is_async=0)]))
>>> print(ast.dump(ast.parse('{x for x in numbers}', mode='eval'), indent=4)) >>> print(ast.dump(
... ast.parse('{x for x in numbers}', mode='eval'),
... indent=4,
... ))
Expression( Expression(
body=SetComp( body=SetComp(
elt=Name(id='x', ctx=Load()), elt=Name(id='x', ctx=Load()),
@ -758,7 +761,6 @@ Comprehensions
comprehension( comprehension(
target=Name(id='x', ctx=Store()), target=Name(id='x', ctx=Store()),
iter=Name(id='numbers', ctx=Load()), iter=Name(id='numbers', ctx=Load()),
ifs=[],
is_async=0)])) is_async=0)]))
@ -781,18 +783,15 @@ Comprehensions
elt=Call( elt=Call(
func=Name(id='ord', ctx=Load()), func=Name(id='ord', ctx=Load()),
args=[ args=[
Name(id='c', ctx=Load())], Name(id='c', ctx=Load())]),
keywords=[]),
generators=[ generators=[
comprehension( comprehension(
target=Name(id='line', ctx=Store()), target=Name(id='line', ctx=Store()),
iter=Name(id='file', ctx=Load()), iter=Name(id='file', ctx=Load()),
ifs=[],
is_async=0), is_async=0),
comprehension( comprehension(
target=Name(id='c', ctx=Store()), target=Name(id='c', ctx=Store()),
iter=Name(id='line', ctx=Load()), iter=Name(id='line', ctx=Load()),
ifs=[],
is_async=0)])) is_async=0)]))
>>> print(ast.dump(ast.parse('(n**2 for n in it if n>5 if n<10)', mode='eval'), >>> print(ast.dump(ast.parse('(n**2 for n in it if n>5 if n<10)', mode='eval'),
@ -831,7 +830,6 @@ Comprehensions
comprehension( comprehension(
target=Name(id='i', ctx=Store()), target=Name(id='i', ctx=Store()),
iter=Name(id='soc', ctx=Load()), iter=Name(id='soc', ctx=Load()),
ifs=[],
is_async=1)])) is_async=1)]))
@ -861,8 +859,7 @@ Statements
targets=[ targets=[
Name(id='a', ctx=Store()), Name(id='a', ctx=Store()),
Name(id='b', ctx=Store())], Name(id='b', ctx=Store())],
value=Constant(value=1))], value=Constant(value=1))])
type_ignores=[])
>>> print(ast.dump(ast.parse('a,b = c'), indent=4)) # Unpacking >>> print(ast.dump(ast.parse('a,b = c'), indent=4)) # Unpacking
Module( Module(
@ -874,8 +871,7 @@ Statements
Name(id='a', ctx=Store()), Name(id='a', ctx=Store()),
Name(id='b', ctx=Store())], Name(id='b', ctx=Store())],
ctx=Store())], ctx=Store())],
value=Name(id='c', ctx=Load()))], value=Name(id='c', ctx=Load()))])
type_ignores=[])
.. class:: AnnAssign(target, annotation, value, simple) .. class:: AnnAssign(target, annotation, value, simple)
@ -895,8 +891,7 @@ Statements
AnnAssign( AnnAssign(
target=Name(id='c', ctx=Store()), target=Name(id='c', ctx=Store()),
annotation=Name(id='int', ctx=Load()), annotation=Name(id='int', ctx=Load()),
simple=1)], simple=1)])
type_ignores=[])
>>> print(ast.dump(ast.parse('(a): int = 1'), indent=4)) # Annotation with parenthesis >>> print(ast.dump(ast.parse('(a): int = 1'), indent=4)) # Annotation with parenthesis
Module( Module(
@ -905,8 +900,7 @@ Statements
target=Name(id='a', ctx=Store()), target=Name(id='a', ctx=Store()),
annotation=Name(id='int', ctx=Load()), annotation=Name(id='int', ctx=Load()),
value=Constant(value=1), value=Constant(value=1),
simple=0)], simple=0)])
type_ignores=[])
>>> print(ast.dump(ast.parse('a.b: int'), indent=4)) # Attribute annotation >>> print(ast.dump(ast.parse('a.b: int'), indent=4)) # Attribute annotation
Module( Module(
@ -917,8 +911,7 @@ Statements
attr='b', attr='b',
ctx=Store()), ctx=Store()),
annotation=Name(id='int', ctx=Load()), annotation=Name(id='int', ctx=Load()),
simple=0)], simple=0)])
type_ignores=[])
>>> print(ast.dump(ast.parse('a[1]: int'), indent=4)) # Subscript annotation >>> print(ast.dump(ast.parse('a[1]: int'), indent=4)) # Subscript annotation
Module( Module(
@ -929,8 +922,7 @@ Statements
slice=Constant(value=1), slice=Constant(value=1),
ctx=Store()), ctx=Store()),
annotation=Name(id='int', ctx=Load()), annotation=Name(id='int', ctx=Load()),
simple=0)], simple=0)])
type_ignores=[])
.. class:: AugAssign(target, op, value) .. class:: AugAssign(target, op, value)
@ -951,8 +943,7 @@ Statements
AugAssign( AugAssign(
target=Name(id='x', ctx=Store()), target=Name(id='x', ctx=Store()),
op=Add(), op=Add(),
value=Constant(value=2))], value=Constant(value=2))])
type_ignores=[])
.. class:: Raise(exc, cause) .. class:: Raise(exc, cause)
@ -968,8 +959,7 @@ Statements
body=[ body=[
Raise( Raise(
exc=Name(id='x', ctx=Load()), exc=Name(id='x', ctx=Load()),
cause=Name(id='y', ctx=Load()))], cause=Name(id='y', ctx=Load()))])
type_ignores=[])
.. class:: Assert(test, msg) .. class:: Assert(test, msg)
@ -984,8 +974,7 @@ Statements
body=[ body=[
Assert( Assert(
test=Name(id='x', ctx=Load()), test=Name(id='x', ctx=Load()),
msg=Name(id='y', ctx=Load()))], msg=Name(id='y', ctx=Load()))])
type_ignores=[])
.. class:: Delete(targets) .. class:: Delete(targets)
@ -1002,8 +991,7 @@ Statements
targets=[ targets=[
Name(id='x', ctx=Del()), Name(id='x', ctx=Del()),
Name(id='y', ctx=Del()), Name(id='y', ctx=Del()),
Name(id='z', ctx=Del())])], Name(id='z', ctx=Del())])])
type_ignores=[])
.. class:: Pass() .. class:: Pass()
@ -1015,8 +1003,7 @@ Statements
>>> print(ast.dump(ast.parse('pass'), indent=4)) >>> print(ast.dump(ast.parse('pass'), indent=4))
Module( Module(
body=[ body=[
Pass()], Pass()])
type_ignores=[])
.. class:: TypeAlias(name, type_params, value) .. class:: TypeAlias(name, type_params, value)
@ -1033,9 +1020,7 @@ Statements
body=[ body=[
TypeAlias( TypeAlias(
name=Name(id='Alias', ctx=Store()), name=Name(id='Alias', ctx=Store()),
type_params=[], value=Name(id='int', ctx=Load()))])
value=Name(id='int', ctx=Load()))],
type_ignores=[])
.. versionadded:: 3.12 .. versionadded:: 3.12
@ -1058,8 +1043,7 @@ Imports
names=[ names=[
alias(name='x'), alias(name='x'),
alias(name='y'), alias(name='y'),
alias(name='z')])], alias(name='z')])])
type_ignores=[])
.. class:: ImportFrom(module, names, level) .. class:: ImportFrom(module, names, level)
@ -1080,8 +1064,7 @@ Imports
alias(name='x'), alias(name='x'),
alias(name='y'), alias(name='y'),
alias(name='z')], alias(name='z')],
level=0)], level=0)])
type_ignores=[])
.. class:: alias(name, asname) .. class:: alias(name, asname)
@ -1099,8 +1082,7 @@ Imports
names=[ names=[
alias(name='a', asname='b'), alias(name='a', asname='b'),
alias(name='c')], alias(name='c')],
level=2)], level=2)])
type_ignores=[])
Control flow Control flow
^^^^^^^^^^^^ ^^^^^^^^^^^^
@ -1143,8 +1125,7 @@ Control flow
value=Constant(value=Ellipsis))], value=Constant(value=Ellipsis))],
orelse=[ orelse=[
Expr( Expr(
value=Constant(value=Ellipsis))])])], value=Constant(value=Ellipsis))])])])
type_ignores=[])
.. class:: For(target, iter, body, orelse, type_comment) .. class:: For(target, iter, body, orelse, type_comment)
@ -1178,8 +1159,7 @@ Control flow
value=Constant(value=Ellipsis))], value=Constant(value=Ellipsis))],
orelse=[ orelse=[
Expr( Expr(
value=Constant(value=Ellipsis))])], value=Constant(value=Ellipsis))])])
type_ignores=[])
.. class:: While(test, body, orelse) .. class:: While(test, body, orelse)
@ -1204,8 +1184,7 @@ Control flow
value=Constant(value=Ellipsis))], value=Constant(value=Ellipsis))],
orelse=[ orelse=[
Expr( Expr(
value=Constant(value=Ellipsis))])], value=Constant(value=Ellipsis))])])
type_ignores=[])
.. class:: Break .. class:: Break
@ -1239,9 +1218,7 @@ Control flow
body=[ body=[
Break()], Break()],
orelse=[ orelse=[
Continue()])], Continue()])])])
orelse=[])],
type_ignores=[])
.. class:: Try(body, handlers, orelse, finalbody) .. class:: Try(body, handlers, orelse, finalbody)
@ -1286,8 +1263,7 @@ Control flow
value=Constant(value=Ellipsis))], value=Constant(value=Ellipsis))],
finalbody=[ finalbody=[
Expr( Expr(
value=Constant(value=Ellipsis))])], value=Constant(value=Ellipsis))])])
type_ignores=[])
.. class:: TryStar(body, handlers, orelse, finalbody) .. class:: TryStar(body, handlers, orelse, finalbody)
@ -1315,10 +1291,7 @@ Control flow
type=Name(id='Exception', ctx=Load()), type=Name(id='Exception', ctx=Load()),
body=[ body=[
Expr( Expr(
value=Constant(value=Ellipsis))])], value=Constant(value=Ellipsis))])])])
orelse=[],
finalbody=[])],
type_ignores=[])
.. versionadded:: 3.11 .. versionadded:: 3.11
@ -1350,10 +1323,7 @@ Control flow
ExceptHandler( ExceptHandler(
type=Name(id='TypeError', ctx=Load()), type=Name(id='TypeError', ctx=Load()),
body=[ body=[
Pass()])], Pass()])])])
orelse=[],
finalbody=[])],
type_ignores=[])
.. class:: With(items, body, type_comment) .. class:: With(items, body, type_comment)
@ -1395,9 +1365,7 @@ Control flow
func=Name(id='something', ctx=Load()), func=Name(id='something', ctx=Load()),
args=[ args=[
Name(id='b', ctx=Load()), Name(id='b', ctx=Load()),
Name(id='d', ctx=Load())], Name(id='d', ctx=Load())]))])])
keywords=[]))])],
type_ignores=[])
Pattern matching Pattern matching
@ -1454,14 +1422,10 @@ Pattern matching
value=Constant(value=Ellipsis))]), value=Constant(value=Ellipsis))]),
match_case( match_case(
pattern=MatchClass( pattern=MatchClass(
cls=Name(id='tuple', ctx=Load()), cls=Name(id='tuple', ctx=Load())),
patterns=[],
kwd_attrs=[],
kwd_patterns=[]),
body=[ body=[
Expr( Expr(
value=Constant(value=Ellipsis))])])], value=Constant(value=Ellipsis))])])])
type_ignores=[])
.. versionadded:: 3.10 .. versionadded:: 3.10
@ -1489,8 +1453,7 @@ Pattern matching
value=Constant(value='Relevant')), value=Constant(value='Relevant')),
body=[ body=[
Expr( Expr(
value=Constant(value=Ellipsis))])])], value=Constant(value=Ellipsis))])])])
type_ignores=[])
.. versionadded:: 3.10 .. versionadded:: 3.10
@ -1516,8 +1479,7 @@ Pattern matching
pattern=MatchSingleton(value=None), pattern=MatchSingleton(value=None),
body=[ body=[
Expr( Expr(
value=Constant(value=Ellipsis))])])], value=Constant(value=Ellipsis))])])])
type_ignores=[])
.. versionadded:: 3.10 .. versionadded:: 3.10
@ -1549,8 +1511,7 @@ Pattern matching
value=Constant(value=2))]), value=Constant(value=2))]),
body=[ body=[
Expr( Expr(
value=Constant(value=Ellipsis))])])], value=Constant(value=Ellipsis))])])])
type_ignores=[])
.. versionadded:: 3.10 .. versionadded:: 3.10
@ -1591,8 +1552,7 @@ Pattern matching
MatchStar()]), MatchStar()]),
body=[ body=[
Expr( Expr(
value=Constant(value=Ellipsis))])])], value=Constant(value=Ellipsis))])])])
type_ignores=[])
.. versionadded:: 3.10 .. versionadded:: 3.10
@ -1636,11 +1596,10 @@ Pattern matching
Expr( Expr(
value=Constant(value=Ellipsis))]), value=Constant(value=Ellipsis))]),
match_case( match_case(
pattern=MatchMapping(keys=[], patterns=[], rest='rest'), pattern=MatchMapping(rest='rest'),
body=[ body=[
Expr( Expr(
value=Constant(value=Ellipsis))])])], value=Constant(value=Ellipsis))])])])
type_ignores=[])
.. versionadded:: 3.10 .. versionadded:: 3.10
@ -1682,16 +1641,13 @@ Pattern matching
MatchValue( MatchValue(
value=Constant(value=0)), value=Constant(value=0)),
MatchValue( MatchValue(
value=Constant(value=0))], value=Constant(value=0))]),
kwd_attrs=[],
kwd_patterns=[]),
body=[ body=[
Expr( Expr(
value=Constant(value=Ellipsis))]), value=Constant(value=Ellipsis))]),
match_case( match_case(
pattern=MatchClass( pattern=MatchClass(
cls=Name(id='Point3D', ctx=Load()), cls=Name(id='Point3D', ctx=Load()),
patterns=[],
kwd_attrs=[ kwd_attrs=[
'x', 'x',
'y', 'y',
@ -1705,8 +1661,7 @@ Pattern matching
value=Constant(value=0))]), value=Constant(value=0))]),
body=[ body=[
Expr( Expr(
value=Constant(value=Ellipsis))])])], value=Constant(value=Ellipsis))])])])
type_ignores=[])
.. versionadded:: 3.10 .. versionadded:: 3.10
@ -1748,8 +1703,7 @@ Pattern matching
pattern=MatchAs(), pattern=MatchAs(),
body=[ body=[
Expr( Expr(
value=Constant(value=Ellipsis))])])], value=Constant(value=Ellipsis))])])])
type_ignores=[])
.. versionadded:: 3.10 .. versionadded:: 3.10
@ -1782,8 +1736,7 @@ Pattern matching
MatchAs(name='y')]), MatchAs(name='y')]),
body=[ body=[
Expr( Expr(
value=Constant(value=Ellipsis))])])], value=Constant(value=Ellipsis))])])])
type_ignores=[])
.. versionadded:: 3.10 .. versionadded:: 3.10
@ -1815,8 +1768,7 @@ aliases.
value=Subscript( value=Subscript(
value=Name(id='list', ctx=Load()), value=Name(id='list', ctx=Load()),
slice=Name(id='T', ctx=Load()), slice=Name(id='T', ctx=Load()),
ctx=Load()))], ctx=Load()))])
type_ignores=[])
.. versionadded:: 3.12 .. versionadded:: 3.12
@ -1840,8 +1792,7 @@ aliases.
Name(id='P', ctx=Load()), Name(id='P', ctx=Load()),
Name(id='int', ctx=Load())], Name(id='int', ctx=Load())],
ctx=Load()), ctx=Load()),
ctx=Load()))], ctx=Load()))])
type_ignores=[])
.. versionadded:: 3.12 .. versionadded:: 3.12
@ -1866,8 +1817,7 @@ aliases.
value=Name(id='Ts', ctx=Load()), value=Name(id='Ts', ctx=Load()),
ctx=Load())], ctx=Load())],
ctx=Load()), ctx=Load()),
ctx=Load()))], ctx=Load()))])
type_ignores=[])
.. versionadded:: 3.12 .. versionadded:: 3.12
@ -1907,15 +1857,10 @@ Function and class definitions
Expr( Expr(
value=Lambda( value=Lambda(
args=arguments( args=arguments(
posonlyargs=[],
args=[ args=[
arg(arg='x'), arg(arg='x'),
arg(arg='y')], arg(arg='y')]),
kwonlyargs=[], body=Constant(value=Ellipsis)))])
kw_defaults=[],
defaults=[]),
body=Constant(value=Ellipsis)))],
type_ignores=[])
.. class:: arguments(posonlyargs, args, vararg, kwonlyargs, kw_defaults, kwarg, defaults) .. class:: arguments(posonlyargs, args, vararg, kwonlyargs, kw_defaults, kwarg, defaults)
@ -1954,7 +1899,6 @@ Function and class definitions
FunctionDef( FunctionDef(
name='f', name='f',
args=arguments( args=arguments(
posonlyargs=[],
args=[ args=[
arg( arg(
arg='a', arg='a',
@ -1977,9 +1921,7 @@ Function and class definitions
decorator_list=[ decorator_list=[
Name(id='decorator1', ctx=Load()), Name(id='decorator1', ctx=Load()),
Name(id='decorator2', ctx=Load())], Name(id='decorator2', ctx=Load())],
returns=Constant(value='return annotation'), returns=Constant(value='return annotation'))])
type_params=[])],
type_ignores=[])
.. class:: Return(value) .. class:: Return(value)
@ -1992,8 +1934,7 @@ Function and class definitions
Module( Module(
body=[ body=[
Return( Return(
value=Constant(value=4))], value=Constant(value=4))])
type_ignores=[])
.. class:: Yield(value) .. class:: Yield(value)
@ -2009,16 +1950,14 @@ Function and class definitions
body=[ body=[
Expr( Expr(
value=Yield( value=Yield(
value=Name(id='x', ctx=Load())))], value=Name(id='x', ctx=Load())))])
type_ignores=[])
>>> print(ast.dump(ast.parse('yield from x'), indent=4)) >>> print(ast.dump(ast.parse('yield from x'), indent=4))
Module( Module(
body=[ body=[
Expr( Expr(
value=YieldFrom( value=YieldFrom(
value=Name(id='x', ctx=Load())))], value=Name(id='x', ctx=Load())))])
type_ignores=[])
.. class:: Global(names) .. class:: Global(names)
@ -2035,8 +1974,7 @@ Function and class definitions
names=[ names=[
'x', 'x',
'y', 'y',
'z'])], 'z'])])
type_ignores=[])
>>> print(ast.dump(ast.parse('nonlocal x,y,z'), indent=4)) >>> print(ast.dump(ast.parse('nonlocal x,y,z'), indent=4))
Module( Module(
@ -2045,8 +1983,7 @@ Function and class definitions
names=[ names=[
'x', 'x',
'y', 'y',
'z'])], 'z'])])
type_ignores=[])
.. class:: ClassDef(name, bases, keywords, body, decorator_list, type_params) .. class:: ClassDef(name, bases, keywords, body, decorator_list, type_params)
@ -2086,9 +2023,7 @@ Function and class definitions
Pass()], Pass()],
decorator_list=[ decorator_list=[
Name(id='decorator1', ctx=Load()), Name(id='decorator1', ctx=Load()),
Name(id='decorator2', ctx=Load())], Name(id='decorator2', ctx=Load())])])
type_params=[])],
type_ignores=[])
.. versionchanged:: 3.12 .. versionchanged:: 3.12
Added ``type_params``. Added ``type_params``.
@ -2120,22 +2055,12 @@ Async and await
body=[ body=[
AsyncFunctionDef( AsyncFunctionDef(
name='f', name='f',
args=arguments( args=arguments(),
posonlyargs=[],
args=[],
kwonlyargs=[],
kw_defaults=[],
defaults=[]),
body=[ body=[
Expr( Expr(
value=Await( value=Await(
value=Call( value=Call(
func=Name(id='other_func', ctx=Load()), func=Name(id='other_func', ctx=Load()))))])])
args=[],
keywords=[])))],
decorator_list=[],
type_params=[])],
type_ignores=[])
.. class:: AsyncFor(target, iter, body, orelse, type_comment) .. class:: AsyncFor(target, iter, body, orelse, type_comment)
@ -2180,14 +2105,17 @@ and classes for traversing abstract syntax trees:
modified to correspond to :pep:`484` "signature type comments", modified to correspond to :pep:`484` "signature type comments",
e.g. ``(str, int) -> List[str]``. e.g. ``(str, int) -> List[str]``.
Also, setting ``feature_version`` to a tuple ``(major, minor)`` Setting ``feature_version`` to a tuple ``(major, minor)`` will result in
will attempt to parse using that Python version's grammar. a "best-effort" attempt to parse using that Python version's grammar.
Currently ``major`` must equal to ``3``. For example, setting For example, setting ``feature_version=(3, 9)`` will attempt to disallow
``feature_version=(3, 4)`` will allow the use of ``async`` and parsing of :keyword:`match` statements.
``await`` as variable names. The lowest supported version is Currently ``major`` must equal to ``3``. The lowest supported version is
``(3, 7)``; the highest is ``sys.version_info[0:2]``. ``(3, 7)`` (and this may increase in future Python versions);
the highest is ``sys.version_info[0:2]``. "Best-effort" attempt means there
is no guarantee that the parse (or success of the parse) is the same as
when run on the Python version corresponding to ``feature_version``.
If source contains a null character ('\0'), :exc:`ValueError` is raised. If source contains a null character (``\0``), :exc:`ValueError` is raised.
.. warning:: .. warning::
Note that successfully parsing source code into an AST object doesn't Note that successfully parsing source code into an AST object doesn't
@ -2419,7 +2347,7 @@ and classes for traversing abstract syntax trees:
node = YourTransformer().visit(node) node = YourTransformer().visit(node)
.. function:: dump(node, annotate_fields=True, include_attributes=False, *, indent=None) .. function:: dump(node, annotate_fields=True, include_attributes=False, *, indent=None, show_empty=False)
Return a formatted dump of the tree in *node*. This is mainly useful for Return a formatted dump of the tree in *node*. This is mainly useful for
debugging purposes. If *annotate_fields* is true (by default), debugging purposes. If *annotate_fields* is true (by default),
@ -2436,9 +2364,42 @@ and classes for traversing abstract syntax trees:
indents that many spaces per level. If *indent* is a string (such as ``"\t"``), indents that many spaces per level. If *indent* is a string (such as ``"\t"``),
that string is used to indent each level. that string is used to indent each level.
If *show_empty* is ``False`` (the default), empty lists and fields that are ``None``
will be omitted from the output.
.. versionchanged:: 3.9 .. versionchanged:: 3.9
Added the *indent* option. Added the *indent* option.
.. versionchanged:: 3.13
Added the *show_empty* option.
.. doctest::
>>> print(ast.dump(ast.parse("""\
... async def f():
... await other_func()
... """), indent=4, show_empty=True))
Module(
body=[
AsyncFunctionDef(
name='f',
args=arguments(
posonlyargs=[],
args=[],
kwonlyargs=[],
kw_defaults=[],
defaults=[]),
body=[
Expr(
value=Await(
value=Call(
func=Name(id='other_func', ctx=Load()),
args=[],
keywords=[])))],
decorator_list=[],
type_params=[])],
type_ignores=[])
.. _ast-compiler-flags: .. _ast-compiler-flags:
@ -2530,7 +2491,8 @@ to stdout. Otherwise, the content is read from stdin.
code that generated them. This is helpful for tools that make source code code that generated them. This is helpful for tools that make source code
transformations. transformations.
`leoAst.py <https://leoeditor.com/appendices.html#leoast-py>`_ unifies the `leoAst.py <https://leo-editor.github.io/leo-editor/appendices.html#leoast-py>`_
unifies the
token-based and parse-tree-based views of python programs by inserting token-based and parse-tree-based views of python programs by inserting
two-way links between tokens and ast nodes. two-way links between tokens and ast nodes.
@ -2542,4 +2504,4 @@ to stdout. Otherwise, the content is read from stdin.
`Parso <https://parso.readthedocs.io>`_ is a Python parser that supports `Parso <https://parso.readthedocs.io>`_ is a Python parser that supports
error recovery and round-trip parsing for different Python versions (in error recovery and round-trip parsing for different Python versions (in
multiple Python versions). Parso is also able to list multiple syntax errors multiple Python versions). Parso is also able to list multiple syntax errors
in your python file. in your Python file.

View file

@ -605,6 +605,9 @@ Opening network connections
The *family*, *proto*, *flags*, *reuse_address*, *reuse_port*, The *family*, *proto*, *flags*, *reuse_address*, *reuse_port*,
*allow_broadcast*, and *sock* parameters were added. *allow_broadcast*, and *sock* parameters were added.
.. versionchanged:: 3.8
Added support for Windows.
.. versionchanged:: 3.8.1 .. versionchanged:: 3.8.1
The *reuse_address* parameter is no longer supported, as using The *reuse_address* parameter is no longer supported, as using
:ref:`socket.SO_REUSEADDR <socket-unix-constants>` :ref:`socket.SO_REUSEADDR <socket-unix-constants>`
@ -622,11 +625,8 @@ Opening network connections
prevents processes with differing UIDs from assigning sockets to the same prevents processes with differing UIDs from assigning sockets to the same
socket address. socket address.
.. versionchanged:: 3.8
Added support for Windows.
.. versionchanged:: 3.11 .. versionchanged:: 3.11
The *reuse_address* parameter, disabled since Python 3.9.0, 3.8.1, The *reuse_address* parameter, disabled since Python 3.8.1,
3.7.6 and 3.6.10, has been entirely removed. 3.7.6 and 3.6.10, has been entirely removed.
.. coroutinemethod:: loop.create_unix_connection(protocol_factory, \ .. coroutinemethod:: loop.create_unix_connection(protocol_factory, \
@ -1641,6 +1641,31 @@ Do not instantiate the :class:`Server` class directly.
coroutine to wait until the server is closed (and no more coroutine to wait until the server is closed (and no more
connections are active). connections are active).
.. method:: close_clients()
Close all existing incoming client connections.
Calls :meth:`~asyncio.BaseTransport.close` on all associated
transports.
:meth:`close` should be called before :meth:`close_clients` when
closing the server to avoid races with new clients connecting.
.. versionadded:: 3.13
.. method:: abort_clients()
Close all existing incoming client connections immediately,
without waiting for pending operations to complete.
Calls :meth:`~asyncio.WriteTransport.abort` on all associated
transports.
:meth:`close` should be called before :meth:`abort_clients` when
closing the server to avoid races with new clients connecting.
.. versionadded:: 3.13
.. method:: get_loop() .. method:: get_loop()
Return the event loop associated with the server object. Return the event loop associated with the server object.

View file

@ -237,7 +237,7 @@ implementation used by the asyncio event loop:
It works reliably even when the asyncio event loop is run in a non-main OS thread. It works reliably even when the asyncio event loop is run in a non-main OS thread.
There is no noticeable overhead when handling a big number of children (*O(1)* each There is no noticeable overhead when handling a big number of children (*O*\ (1) each
time a child terminates), but starting a thread per process requires extra memory. time a child terminates), but starting a thread per process requires extra memory.
This watcher is used by default. This watcher is used by default.
@ -257,7 +257,7 @@ implementation used by the asyncio event loop:
watcher is installed. watcher is installed.
The solution is safe but it has a significant overhead when The solution is safe but it has a significant overhead when
handling a big number of processes (*O(n)* each time a handling a big number of processes (*O*\ (*n*) each time a
:py:data:`SIGCHLD` is received). :py:data:`SIGCHLD` is received).
.. versionadded:: 3.8 .. versionadded:: 3.8
@ -273,7 +273,7 @@ implementation used by the asyncio event loop:
The watcher avoids disrupting other code spawning processes The watcher avoids disrupting other code spawning processes
by polling every process explicitly on a :py:data:`SIGCHLD` signal. by polling every process explicitly on a :py:data:`SIGCHLD` signal.
This solution is as safe as :class:`MultiLoopChildWatcher` and has the same *O(N)* This solution is as safe as :class:`MultiLoopChildWatcher` and has the same *O*\ (*n*)
complexity but requires a running event loop in the main thread to work. complexity but requires a running event loop in the main thread to work.
.. deprecated:: 3.12 .. deprecated:: 3.12
@ -285,7 +285,7 @@ implementation used by the asyncio event loop:
processes and waiting for their termination. processes and waiting for their termination.
There is no noticeable overhead when handling a big number of There is no noticeable overhead when handling a big number of
children (*O(1)* each time a child terminates). children (*O*\ (1) each time a child terminates).
This solution requires a running event loop in the main thread to work, as This solution requires a running event loop in the main thread to work, as
:class:`SafeChildWatcher`. :class:`SafeChildWatcher`.

View file

@ -362,6 +362,11 @@ Datagram Transports
This method does not block; it buffers the data and arranges This method does not block; it buffers the data and arranges
for it to be sent out asynchronously. for it to be sent out asynchronously.
.. versionchanged:: 3.13
This method can be called with an empty bytes object to send a
zero-length datagram. The buffer size calculation used for flow
control is also updated to account for the datagram header.
.. method:: DatagramTransport.abort() .. method:: DatagramTransport.abort()
Close the transport immediately, without waiting for pending Close the transport immediately, without waiting for pending
@ -417,8 +422,8 @@ Subprocess Transports
Stop the subprocess. Stop the subprocess.
On POSIX systems, this method sends SIGTERM to the subprocess. On POSIX systems, this method sends :py:const:`~signal.SIGTERM` to the subprocess.
On Windows, the Windows API function TerminateProcess() is called to On Windows, the Windows API function :c:func:`!TerminateProcess` is called to
stop the subprocess. stop the subprocess.
See also :meth:`subprocess.Popen.terminate`. See also :meth:`subprocess.Popen.terminate`.

View file

@ -62,6 +62,9 @@ Queue
Remove and return an item from the queue. If queue is empty, Remove and return an item from the queue. If queue is empty,
wait until an item is available. wait until an item is available.
Raises :exc:`QueueShutDown` if the queue has been shut down and
is empty, or if the queue has been shut down immediately.
.. method:: get_nowait() .. method:: get_nowait()
Return an item if one is immediately available, else raise Return an item if one is immediately available, else raise
@ -82,6 +85,8 @@ Queue
Put an item into the queue. If the queue is full, wait until a Put an item into the queue. If the queue is full, wait until a
free slot is available before adding the item. free slot is available before adding the item.
Raises :exc:`QueueShutDown` if the queue has been shut down.
.. method:: put_nowait(item) .. method:: put_nowait(item)
Put an item into the queue without blocking. Put an item into the queue without blocking.
@ -92,6 +97,22 @@ Queue
Return the number of items in the queue. Return the number of items in the queue.
.. method:: shutdown(immediate=False)
Shut down the queue, making :meth:`~Queue.get` and :meth:`~Queue.put`
raise :exc:`QueueShutDown`.
By default, :meth:`~Queue.get` on a shut down queue will only
raise once the queue is empty. Set *immediate* to true to make
:meth:`~Queue.get` raise immediately instead.
All blocked callers of :meth:`~Queue.put` and :meth:`~Queue.get`
will be unblocked. If *immediate* is true, a task will be marked
as done for each remaining item in the queue, which may unblock
callers of :meth:`~Queue.join`.
.. versionadded:: 3.13
.. method:: task_done() .. method:: task_done()
Indicate that a formerly enqueued task is complete. Indicate that a formerly enqueued task is complete.
@ -105,6 +126,9 @@ Queue
call was received for every item that had been :meth:`~Queue.put` call was received for every item that had been :meth:`~Queue.put`
into the queue). into the queue).
``shutdown(immediate=True)`` calls :meth:`task_done` for each
remaining item in the queue.
Raises :exc:`ValueError` if called more times than there were Raises :exc:`ValueError` if called more times than there were
items placed in the queue. items placed in the queue.
@ -145,6 +169,14 @@ Exceptions
on a queue that has reached its *maxsize*. on a queue that has reached its *maxsize*.
.. exception:: QueueShutDown
Exception raised when :meth:`~Queue.put` or :meth:`~Queue.get` is
called on a queue which has been shut down.
.. versionadded:: 3.13
Examples Examples
======== ========

View file

@ -77,8 +77,8 @@ and work with streams:
.. versionchanged:: 3.7 .. versionchanged:: 3.7
Added the *ssl_handshake_timeout* parameter. Added the *ssl_handshake_timeout* parameter.
.. versionadded:: 3.8 .. versionchanged:: 3.8
Added *happy_eyeballs_delay* and *interleave* parameters. Added the *happy_eyeballs_delay* and *interleave* parameters.
.. versionchanged:: 3.10 .. versionchanged:: 3.10
Removed the *loop* parameter. Removed the *loop* parameter.
@ -260,8 +260,19 @@ StreamReader
buffer is reset. The :attr:`IncompleteReadError.partial` attribute buffer is reset. The :attr:`IncompleteReadError.partial` attribute
may contain a portion of the separator. may contain a portion of the separator.
The *separator* may also be a tuple of separators. In this
case the return value will be the shortest possible that has any
separator as the suffix. For the purposes of :exc:`LimitOverrunError`,
the shortest possible separator is considered to be the one that
matched.
.. versionadded:: 3.5.2 .. versionadded:: 3.5.2
.. versionchanged:: 3.13
The *separator* parameter may now be a :class:`tuple` of
separators.
.. method:: at_eof() .. method:: at_eof()
Return ``True`` if the buffer is empty and :meth:`feed_eof` Return ``True`` if the buffer is empty and :meth:`feed_eof`
@ -347,7 +358,7 @@ StreamWriter
be resumed. When there is nothing to wait for, the :meth:`drain` be resumed. When there is nothing to wait for, the :meth:`drain`
returns immediately. returns immediately.
.. coroutinemethod:: start_tls(sslcontext, \*, server_hostname=None, \ .. coroutinemethod:: start_tls(sslcontext, *, server_hostname=None, \
ssl_handshake_timeout=None, ssl_shutdown_timeout=None) ssl_handshake_timeout=None, ssl_shutdown_timeout=None)
Upgrade an existing stream-based connection to TLS. Upgrade an existing stream-based connection to TLS.

View file

@ -240,7 +240,7 @@ their completion.
.. note:: .. note::
On Windows, :py:data:`SIGTERM` is an alias for :meth:`terminate`. On Windows, :py:const:`~signal.SIGTERM` is an alias for :meth:`terminate`.
``CTRL_C_EVENT`` and ``CTRL_BREAK_EVENT`` can be sent to processes ``CTRL_C_EVENT`` and ``CTRL_BREAK_EVENT`` can be sent to processes
started with a *creationflags* parameter which includes started with a *creationflags* parameter which includes
``CREATE_NEW_PROCESS_GROUP``. ``CREATE_NEW_PROCESS_GROUP``.
@ -249,10 +249,10 @@ their completion.
Stop the child process. Stop the child process.
On POSIX systems this method sends :py:const:`signal.SIGTERM` to the On POSIX systems this method sends :py:const:`~signal.SIGTERM` to the
child process. child process.
On Windows the Win32 API function :c:func:`TerminateProcess` is On Windows the Win32 API function :c:func:`!TerminateProcess` is
called to stop the child process. called to stop the child process.
.. method:: kill() .. method:: kill()

View file

@ -216,8 +216,8 @@ Condition
.. method:: notify(n=1) .. method:: notify(n=1)
Wake up at most *n* tasks (1 by default) waiting on this Wake up *n* tasks (1 by default) waiting on this
condition. The method is no-op if no tasks are waiting. condition. If fewer than *n* tasks are waiting they are all awakened.
The lock must be acquired before this method is called and The lock must be acquired before this method is called and
released shortly after. If called with an *unlocked* lock released shortly after. If called with an *unlocked* lock
@ -257,12 +257,18 @@ Condition
Once awakened, the Condition re-acquires its lock and this method Once awakened, the Condition re-acquires its lock and this method
returns ``True``. returns ``True``.
Note that a task *may* return from this call spuriously,
which is why the caller should always re-check the state
and be prepared to :meth:`wait` again. For this reason, you may
prefer to use :meth:`wait_for` instead.
.. coroutinemethod:: wait_for(predicate) .. coroutinemethod:: wait_for(predicate)
Wait until a predicate becomes *true*. Wait until a predicate becomes *true*.
The predicate must be a callable which result will be The predicate must be a callable which result will be
interpreted as a boolean value. The final value is the interpreted as a boolean value. The method will repeatedly
:meth:`wait` until the predicate evaluates to *true*. The final value is the
return value. return value.

View file

@ -334,6 +334,13 @@ and reliable way to wait for all tasks in the group to finish.
Create a task in this task group. Create a task in this task group.
The signature matches that of :func:`asyncio.create_task`. The signature matches that of :func:`asyncio.create_task`.
If the task group is inactive (e.g. not yet entered,
already finished, or in the process of shutting down),
we will close the given ``coro``.
.. versionchanged:: 3.13
Close the given coroutine if the task group is not active.
Example:: Example::
@ -385,6 +392,27 @@ is also included in the exception group.
The same special case is made for The same special case is made for
:exc:`KeyboardInterrupt` and :exc:`SystemExit` as in the previous paragraph. :exc:`KeyboardInterrupt` and :exc:`SystemExit` as in the previous paragraph.
Task groups are careful not to mix up the internal cancellation used to
"wake up" their :meth:`~object.__aexit__` with cancellation requests
for the task in which they are running made by other parties.
In particular, when one task group is syntactically nested in another,
and both experience an exception in one of their child tasks simultaneously,
the inner task group will process its exceptions, and then the outer task group
will receive another cancellation and process its own exceptions.
In the case where a task group is cancelled externally and also must
raise an :exc:`ExceptionGroup`, it will call the parent task's
:meth:`~asyncio.Task.cancel` method. This ensures that a
:exc:`asyncio.CancelledError` will be raised at the next
:keyword:`await`, so the cancellation is not lost.
Task groups preserve the cancellation count
reported by :meth:`asyncio.Task.cancelling`.
.. versionchanged:: 3.13
Improved handling of simultaneous internal and external cancellations
and correct preservation of cancellation counts.
Sleeping Sleeping
======== ========
@ -828,23 +856,22 @@ Waiting Primitives
*return_when* indicates when this function should return. It must *return_when* indicates when this function should return. It must
be one of the following constants: be one of the following constants:
.. tabularcolumns:: |l|L| .. list-table::
:header-rows: 1
+-----------------------------+----------------------------------------+ * - Constant
| Constant | Description | - Description
+=============================+========================================+
| :const:`FIRST_COMPLETED` | The function will return when any | * - .. data:: FIRST_COMPLETED
| | future finishes or is cancelled. | - The function will return when any future finishes or is cancelled.
+-----------------------------+----------------------------------------+
| :const:`FIRST_EXCEPTION` | The function will return when any | * - .. data:: FIRST_EXCEPTION
| | future finishes by raising an | - The function will return when any future finishes by raising an
| | exception. If no future raises an | exception. If no future raises an exception
| | exception then it is equivalent to | then it is equivalent to :const:`ALL_COMPLETED`.
| | :const:`ALL_COMPLETED`. |
+-----------------------------+----------------------------------------+ * - .. data:: ALL_COMPLETED
| :const:`ALL_COMPLETED` | The function will return when all | - The function will return when all futures finish or are cancelled.
| | futures finish or are cancelled. |
+-----------------------------+----------------------------------------+
Unlike :func:`~asyncio.wait_for`, ``wait()`` does not cancel the Unlike :func:`~asyncio.wait_for`, ``wait()`` does not cancel the
futures when a timeout occurs. futures when a timeout occurs.
@ -861,19 +888,50 @@ Waiting Primitives
.. function:: as_completed(aws, *, timeout=None) .. function:: as_completed(aws, *, timeout=None)
Run :ref:`awaitable objects <asyncio-awaitables>` in the *aws* Run :ref:`awaitable objects <asyncio-awaitables>` in the *aws* iterable
iterable concurrently. Return an iterator of coroutines. concurrently. The returned object can be iterated to obtain the results
Each coroutine returned can be awaited to get the earliest next of the awaitables as they finish.
result from the iterable of the remaining awaitables.
Raises :exc:`TimeoutError` if the timeout occurs before The object returned by ``as_completed()`` can be iterated as an
all Futures are done. :term:`asynchronous iterator` or a plain :term:`iterator`. When asynchronous
iteration is used, the originally-supplied awaitables are yielded if they
are tasks or futures. This makes it easy to correlate previously-scheduled
tasks with their results. Example::
Example:: ipv4_connect = create_task(open_connection("127.0.0.1", 80))
ipv6_connect = create_task(open_connection("::1", 80))
tasks = [ipv4_connect, ipv6_connect]
for coro in as_completed(aws): async for earliest_connect in as_completed(tasks):
earliest_result = await coro # earliest_connect is done. The result can be obtained by
# ... # awaiting it or calling earliest_connect.result()
reader, writer = await earliest_connect
if earliest_connect is ipv6_connect:
print("IPv6 connection established.")
else:
print("IPv4 connection established.")
During asynchronous iteration, implicitly-created tasks will be yielded for
supplied awaitables that aren't tasks or futures.
When used as a plain iterator, each iteration yields a new coroutine that
returns the result or raises the exception of the next completed awaitable.
This pattern is compatible with Python versions older than 3.13::
ipv4_connect = create_task(open_connection("127.0.0.1", 80))
ipv6_connect = create_task(open_connection("::1", 80))
tasks = [ipv4_connect, ipv6_connect]
for next_connect in as_completed(tasks):
# next_connect is not one of the original task objects. It must be
# awaited to obtain the result value or raise the exception of the
# awaitable that finishes next.
reader, writer = await next_connect
A :exc:`TimeoutError` is raised if the timeout occurs before all awaitables
are done. This is raised by the ``async for`` loop during asynchronous
iteration or by the coroutines yielded during plain iteration.
.. versionchanged:: 3.10 .. versionchanged:: 3.10
Removed the *loop* parameter. Removed the *loop* parameter.
@ -885,6 +943,10 @@ Waiting Primitives
.. versionchanged:: 3.12 .. versionchanged:: 3.12
Added support for generators yielding tasks. Added support for generators yielding tasks.
.. versionchanged:: 3.13
The result can now be used as either an :term:`asynchronous iterator`
or as a plain :term:`iterator` (previously it was only a plain iterator).
Running in Threads Running in Threads
================== ==================
@ -1328,6 +1390,15 @@ Task Object
catching :exc:`CancelledError`, it needs to call this method to remove catching :exc:`CancelledError`, it needs to call this method to remove
the cancellation state. the cancellation state.
When this method decrements the cancellation count to zero,
the method checks if a previous :meth:`cancel` call had arranged
for :exc:`CancelledError` to be thrown into the task.
If it hasn't been thrown yet, that arrangement will be
rescinded (by resetting the internal ``_must_cancel`` flag).
.. versionchanged:: 3.13
Changed to rescind pending cancellation requests upon reaching zero.
.. method:: cancelling() .. method:: cancelling()
Return the number of pending cancellation requests to this Task, i.e., Return the number of pending cancellation requests to this Task, i.e.,

View file

@ -4,8 +4,8 @@
.. module:: atexit .. module:: atexit
:synopsis: Register and execute cleanup functions. :synopsis: Register and execute cleanup functions.
.. moduleauthor:: Skip Montanaro <skip@pobox.com> .. moduleauthor:: Skip Montanaro <skip.montanaro@gmail.com>
.. sectionauthor:: Skip Montanaro <skip@pobox.com> .. sectionauthor:: Skip Montanaro <skip.montanaro@gmail.com>
-------------- --------------

View file

@ -7,7 +7,7 @@ Audit events table
This table contains all events raised by :func:`sys.audit` or This table contains all events raised by :func:`sys.audit` or
:c:func:`PySys_Audit` calls throughout the CPython runtime and the :c:func:`PySys_Audit` calls throughout the CPython runtime and the
standard library. These calls were added in 3.8.0 or later (see :pep:`578`). standard library. These calls were added in 3.8 or later (see :pep:`578`).
See :func:`sys.addaudithook` and :c:func:`PySys_AddAuditHook` for See :func:`sys.addaudithook` and :c:func:`PySys_AddAuditHook` for
information on handling these events. information on handling these events.

View file

@ -244,6 +244,24 @@ The modern interface provides:
.. versionadded:: 3.4 .. versionadded:: 3.4
.. function:: z85encode(s)
Encode the :term:`bytes-like object` *s* using Z85 (as used in ZeroMQ)
and return the encoded :class:`bytes`. See `Z85 specification
<https://rfc.zeromq.org/spec/32/>`_ for more information.
.. versionadded:: 3.13
.. function:: z85decode(s)
Decode the Z85-encoded :term:`bytes-like object` or ASCII string *s* and
return the decoded :class:`bytes`. See `Z85 specification
<https://rfc.zeromq.org/spec/32/>`_ for more information.
.. versionadded:: 3.13
The legacy interface: The legacy interface:
.. function:: decode(input, output) .. function:: decode(input, output)

View file

@ -132,8 +132,8 @@ The :mod:`bdb` module also defines two classes:
frame is considered to originate in a certain module is determined frame is considered to originate in a certain module is determined
by the ``__name__`` in the frame globals. by the ``__name__`` in the frame globals.
.. versionadded:: 3.1 .. versionchanged:: 3.1
The *skip* argument. Added the *skip* parameter.
The following methods of :class:`Bdb` normally don't need to be overridden. The following methods of :class:`Bdb` normally don't need to be overridden.
@ -148,8 +148,8 @@ The :mod:`bdb` module also defines two classes:
.. method:: reset() .. method:: reset()
Set the :attr:`botframe`, :attr:`stopframe`, :attr:`returnframe` and Set the :attr:`!botframe`, :attr:`!stopframe`, :attr:`!returnframe` and
:attr:`quitting` attributes with values ready to start debugging. :attr:`quitting <Bdb.set_quit>` attributes with values ready to start debugging.
.. method:: trace_dispatch(frame, event, arg) .. method:: trace_dispatch(frame, event, arg)
@ -182,7 +182,7 @@ The :mod:`bdb` module also defines two classes:
If the debugger should stop on the current line, invoke the If the debugger should stop on the current line, invoke the
:meth:`user_line` method (which should be overridden in subclasses). :meth:`user_line` method (which should be overridden in subclasses).
Raise a :exc:`BdbQuit` exception if the :attr:`Bdb.quitting` flag is set Raise a :exc:`BdbQuit` exception if the :attr:`quitting <Bdb.set_quit>` flag is set
(which can be set from :meth:`user_line`). Return a reference to the (which can be set from :meth:`user_line`). Return a reference to the
:meth:`trace_dispatch` method for further tracing in that scope. :meth:`trace_dispatch` method for further tracing in that scope.
@ -190,7 +190,7 @@ The :mod:`bdb` module also defines two classes:
If the debugger should stop on this function call, invoke the If the debugger should stop on this function call, invoke the
:meth:`user_call` method (which should be overridden in subclasses). :meth:`user_call` method (which should be overridden in subclasses).
Raise a :exc:`BdbQuit` exception if the :attr:`Bdb.quitting` flag is set Raise a :exc:`BdbQuit` exception if the :attr:`quitting <Bdb.set_quit>` flag is set
(which can be set from :meth:`user_call`). Return a reference to the (which can be set from :meth:`user_call`). Return a reference to the
:meth:`trace_dispatch` method for further tracing in that scope. :meth:`trace_dispatch` method for further tracing in that scope.
@ -198,7 +198,7 @@ The :mod:`bdb` module also defines two classes:
If the debugger should stop on this function return, invoke the If the debugger should stop on this function return, invoke the
:meth:`user_return` method (which should be overridden in subclasses). :meth:`user_return` method (which should be overridden in subclasses).
Raise a :exc:`BdbQuit` exception if the :attr:`Bdb.quitting` flag is set Raise a :exc:`BdbQuit` exception if the :attr:`quitting <Bdb.set_quit>` flag is set
(which can be set from :meth:`user_return`). Return a reference to the (which can be set from :meth:`user_return`). Return a reference to the
:meth:`trace_dispatch` method for further tracing in that scope. :meth:`trace_dispatch` method for further tracing in that scope.
@ -206,7 +206,7 @@ The :mod:`bdb` module also defines two classes:
If the debugger should stop at this exception, invokes the If the debugger should stop at this exception, invokes the
:meth:`user_exception` method (which should be overridden in subclasses). :meth:`user_exception` method (which should be overridden in subclasses).
Raise a :exc:`BdbQuit` exception if the :attr:`Bdb.quitting` flag is set Raise a :exc:`BdbQuit` exception if the :attr:`quitting <Bdb.set_quit>` flag is set
(which can be set from :meth:`user_exception`). Return a reference to the (which can be set from :meth:`user_exception`). Return a reference to the
:meth:`trace_dispatch` method for further tracing in that scope. :meth:`trace_dispatch` method for further tracing in that scope.
@ -293,7 +293,9 @@ The :mod:`bdb` module also defines two classes:
.. method:: set_quit() .. method:: set_quit()
Set the :attr:`quitting` attribute to ``True``. This raises :exc:`BdbQuit` in .. index:: single: quitting (bdb.Bdb attribute)
Set the :attr:`!quitting` attribute to ``True``. This raises :exc:`BdbQuit` in
the next call to one of the :meth:`!dispatch_\*` methods. the next call to one of the :meth:`!dispatch_\*` methods.
@ -383,7 +385,7 @@ The :mod:`bdb` module also defines two classes:
.. method:: run(cmd, globals=None, locals=None) .. method:: run(cmd, globals=None, locals=None)
Debug a statement executed via the :func:`exec` function. *globals* Debug a statement executed via the :func:`exec` function. *globals*
defaults to :attr:`__main__.__dict__`, *locals* defaults to *globals*. defaults to :attr:`!__main__.__dict__`, *locals* defaults to *globals*.
.. method:: runeval(expr, globals=None, locals=None) .. method:: runeval(expr, globals=None, locals=None)

View file

@ -79,7 +79,7 @@ The following functions are provided:
To support inserting records in a table, the *key* function (if any) is To support inserting records in a table, the *key* function (if any) is
applied to *x* for the search step but not for the insertion step. applied to *x* for the search step but not for the insertion step.
Keep in mind that the ``O(log n)`` search is dominated by the slow O(n) Keep in mind that the *O*\ (log *n*) search is dominated by the slow *O*\ (*n*)
insertion step. insertion step.
.. versionchanged:: 3.10 .. versionchanged:: 3.10
@ -99,7 +99,7 @@ The following functions are provided:
To support inserting records in a table, the *key* function (if any) is To support inserting records in a table, the *key* function (if any) is
applied to *x* for the search step but not for the insertion step. applied to *x* for the search step but not for the insertion step.
Keep in mind that the ``O(log n)`` search is dominated by the slow O(n) Keep in mind that the *O*\ (log *n*) search is dominated by the slow *O*\ (*n*)
insertion step. insertion step.
.. versionchanged:: 3.10 .. versionchanged:: 3.10
@ -115,7 +115,7 @@ thoughts in mind:
* Bisection is effective for searching ranges of values. * Bisection is effective for searching ranges of values.
For locating specific values, dictionaries are more performant. For locating specific values, dictionaries are more performant.
* The *insort()* functions are ``O(n)`` because the logarithmic search step * The *insort()* functions are *O*\ (*n*) because the logarithmic search step
is dominated by the linear time insertion step. is dominated by the linear time insertion step.
* The search functions are stateless and discard key function results after * The search functions are stateless and discard key function results after

View file

@ -91,7 +91,7 @@ The :mod:`bz2` module contains:
and :meth:`~io.IOBase.truncate`. and :meth:`~io.IOBase.truncate`.
Iteration and the :keyword:`with` statement are supported. Iteration and the :keyword:`with` statement are supported.
:class:`BZ2File` also provides the following methods: :class:`BZ2File` also provides the following methods and attributes:
.. method:: peek([n]) .. method:: peek([n])
@ -148,6 +148,19 @@ The :mod:`bz2` module contains:
.. versionadded:: 3.3 .. versionadded:: 3.3
.. attribute:: mode
``'rb'`` for reading and ``'wb'`` for writing.
.. versionadded:: 3.13
.. attribute:: name
The bzip2 file name. Equivalent to the :attr:`~io.FileIO.name`
attribute of the underlying :term:`file object`.
.. versionadded:: 3.13
.. versionchanged:: 3.1 .. versionchanged:: 3.1
Support for the :keyword:`with` statement was added. Support for the :keyword:`with` statement was added.
@ -156,7 +169,6 @@ The :mod:`bz2` module contains:
Support was added for *filename* being a :term:`file object` instead of an Support was added for *filename* being a :term:`file object` instead of an
actual filename. actual filename.
.. versionchanged:: 3.3
The ``'a'`` (append) mode was added, along with support for reading The ``'a'`` (append) mode was added, along with support for reading
multi-stream files. multi-stream files.

View file

@ -512,7 +512,7 @@ to interactively print a calendar.
python -m calendar [-h] [-L LOCALE] [-e ENCODING] [-t {text,html}] python -m calendar [-h] [-L LOCALE] [-e ENCODING] [-t {text,html}]
[-w WIDTH] [-l LINES] [-s SPACING] [-m MONTHS] [-c CSS] [-w WIDTH] [-l LINES] [-s SPACING] [-m MONTHS] [-c CSS]
[year] [month] [-f FIRST_WEEKDAY] [year] [month]
For example, to print a calendar for the year 2000: For example, to print a calendar for the year 2000:
@ -586,10 +586,17 @@ The following options are accepted:
or as an HTML document. or as an HTML document.
.. option:: --first-weekday FIRST_WEEKDAY, -f FIRST_WEEKDAY
The weekday to start each week.
Must be a number between 0 (Monday) and 6 (Sunday).
Defaults to 0.
.. versionadded:: 3.13
.. option:: year .. option:: year
The year to print the calendar for. The year to print the calendar for.
Must be a number between 1 and 9999.
Defaults to the current year. Defaults to the current year.

Some files were not shown because too many files have changed in this diff Show more