mirror of
https://github.com/python/cpython.git
synced 2025-12-08 06:10:17 +00:00
Merge remote-tracking branch 'origin/main' into windows-socket-sendfile
This commit is contained in:
commit
b153f22afe
3241 changed files with 530815 additions and 211222 deletions
|
|
@ -1,11 +1,11 @@
|
|||
trigger: ['main', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7']
|
||||
trigger: ['main', '3.*']
|
||||
|
||||
jobs:
|
||||
- job: Prebuild
|
||||
displayName: Pre-build checks
|
||||
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
vmImage: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- template: ./prebuild-checks.yml
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
#!/bin/sh
|
||||
apt-get update
|
||||
|
||||
apt-get -yq install \
|
||||
build-essential \
|
||||
zlib1g-dev \
|
||||
libbz2-dev \
|
||||
liblzma-dev \
|
||||
libncurses5-dev \
|
||||
libreadline6-dev \
|
||||
libsqlite3-dev \
|
||||
libssl-dev \
|
||||
libgdbm-dev \
|
||||
tk-dev \
|
||||
lzma \
|
||||
lzma-dev \
|
||||
liblzma-dev \
|
||||
libffi-dev \
|
||||
uuid-dev \
|
||||
xvfb
|
||||
|
||||
if [ ! -z "$1" ]
|
||||
then
|
||||
echo ##vso[task.prependpath]$PWD/multissl/openssl/$1
|
||||
echo ##vso[task.setvariable variable=OPENSSL_DIR]$PWD/multissl/openssl/$1
|
||||
python3 Tools/ssl/multissltests.py --steps=library --base-directory $PWD/multissl --openssl $1 --system Linux
|
||||
fi
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
steps:
|
||||
- checkout: self
|
||||
clean: true
|
||||
fetchDepth: 5
|
||||
|
||||
# Work around a known issue affecting Ubuntu VMs on Pipelines
|
||||
- script: sudo setfacl -Rb /home/vsts
|
||||
displayName: 'Workaround ACL issue'
|
||||
|
||||
- script: sudo ./.azure-pipelines/posix-deps-apt.sh $(openssl_version)
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: ./configure --with-pydebug
|
||||
displayName: 'Configure CPython (debug)'
|
||||
|
||||
- script: make -j4
|
||||
displayName: 'Build CPython'
|
||||
|
||||
- script: make pythoninfo
|
||||
displayName: 'Display build info'
|
||||
|
||||
- script: |
|
||||
git fetch origin
|
||||
./python Tools/patchcheck/patchcheck.py --ci true
|
||||
displayName: 'Run patchcheck.py'
|
||||
condition: and(succeeded(), eq(variables['Build.Reason'], 'PullRequest'))
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
pr: ['main', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7']
|
||||
|
||||
jobs:
|
||||
- job: Prebuild
|
||||
displayName: Pre-build checks
|
||||
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- template: ./prebuild-checks.yml
|
||||
|
||||
|
||||
- job: Ubuntu_Patchcheck
|
||||
displayName: Ubuntu patchcheck
|
||||
dependsOn: Prebuild
|
||||
condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
|
||||
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
|
||||
variables:
|
||||
testRunTitle: '$(system.pullRequest.TargetBranch)-linux'
|
||||
testRunPlatform: linux
|
||||
openssl_version: 1.1.1u
|
||||
|
||||
steps:
|
||||
- template: ./posix-steps.yml
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
FROM docker.io/library/fedora:40
|
||||
|
||||
ENV CC=clang
|
||||
|
||||
ENV WASI_SDK_VERSION=21
|
||||
ENV WASI_SDK_PATH=/opt/wasi-sdk
|
||||
|
||||
ENV WASMTIME_HOME=/opt/wasmtime
|
||||
ENV WASMTIME_VERSION=18.0.3
|
||||
ENV WASMTIME_CPU_ARCH=x86_64
|
||||
|
||||
RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \
|
||||
dnf -y --nodocs --setopt=install_weak_deps=False builddep python3 && \
|
||||
dnf -y clean all
|
||||
|
||||
RUN mkdir ${WASI_SDK_PATH} && \
|
||||
curl --location https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_SDK_VERSION}/wasi-sdk-${WASI_SDK_VERSION}.0-linux.tar.gz | \
|
||||
tar --strip-components 1 --directory ${WASI_SDK_PATH} --extract --gunzip
|
||||
|
||||
RUN mkdir --parents ${WASMTIME_HOME} && \
|
||||
curl --location "https://github.com/bytecodealliance/wasmtime/releases/download/v${WASMTIME_VERSION}/wasmtime-v${WASMTIME_VERSION}-${WASMTIME_CPU_ARCH}-linux.tar.xz" | \
|
||||
xz --decompress | \
|
||||
tar --strip-components 1 --directory ${WASMTIME_HOME} -x && \
|
||||
ln -s ${WASMTIME_HOME}/wasmtime /usr/local/bin
|
||||
|
|
@ -1,15 +1,10 @@
|
|||
{
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile"
|
||||
},
|
||||
"image": "ghcr.io/python/devcontainer:latest",
|
||||
"onCreateCommand": [
|
||||
// Install common tooling.
|
||||
"dnf",
|
||||
"install",
|
||||
"-y",
|
||||
"which",
|
||||
"zsh",
|
||||
"fish",
|
||||
// For umask fix below.
|
||||
"/usr/bin/setfacl"
|
||||
],
|
||||
|
|
|
|||
|
|
@ -1,15 +1,15 @@
|
|||
root = true
|
||||
|
||||
[*.{py,c,cpp,h,js,rst,md,yml}]
|
||||
[*.{py,c,cpp,h,js,rst,md,yml,yaml,gram}]
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
indent_style = space
|
||||
|
||||
[*.{py,c,cpp,h}]
|
||||
[*.{py,c,cpp,h,gram}]
|
||||
indent_size = 4
|
||||
|
||||
[*.rst]
|
||||
indent_size = 3
|
||||
|
||||
[*.{js,yml}]
|
||||
[*.{js,yml,yaml}]
|
||||
indent_size = 2
|
||||
|
|
|
|||
5
.gitattributes
vendored
5
.gitattributes
vendored
|
|
@ -10,6 +10,7 @@
|
|||
*.ico binary
|
||||
*.jpg binary
|
||||
*.pck binary
|
||||
*.pdf binary
|
||||
*.png binary
|
||||
*.psd binary
|
||||
*.tar binary
|
||||
|
|
@ -27,8 +28,6 @@ Lib/test/cjkencodings/* noeol
|
|||
Lib/test/tokenizedata/coding20731.py noeol
|
||||
Lib/test/decimaltestdata/*.decTest noeol
|
||||
Lib/test/test_email/data/*.txt noeol
|
||||
Lib/test/test_importlib/resources/data01/* noeol
|
||||
Lib/test/test_importlib/resources/namespacedata01/* noeol
|
||||
Lib/test/xmltestdata/* noeol
|
||||
|
||||
# Shell scripts should have LF even on Windows because of Cygwin
|
||||
|
|
@ -69,6 +68,7 @@ PCbuild/readme.txt dos
|
|||
**/clinic/*.cpp.h generated
|
||||
**/clinic/*.h.h generated
|
||||
*_db.h generated
|
||||
Doc/c-api/lifecycle.dot.svg generated
|
||||
Doc/data/stable_abi.dat generated
|
||||
Doc/library/token-list.inc generated
|
||||
Include/internal/pycore_ast.h generated
|
||||
|
|
@ -103,3 +103,4 @@ Python/stdlib_module_names.h generated
|
|||
Tools/peg_generator/pegen/grammar_parser.py generated
|
||||
aclocal.m4 generated
|
||||
configure generated
|
||||
*.min.js generated
|
||||
|
|
|
|||
760
.github/CODEOWNERS
vendored
760
.github/CODEOWNERS
vendored
|
|
@ -1,111 +1,258 @@
|
|||
# See https://help.github.com/articles/about-codeowners/
|
||||
# for more info about CODEOWNERS file
|
||||
# for further details about the .github/CODEOWNERS file.
|
||||
|
||||
# It uses the same pattern rule for gitignore file
|
||||
# https://git-scm.com/docs/gitignore#_pattern_format
|
||||
# Notably, a later match overrides earlier matches, so order matters.
|
||||
# If using a wildcard pattern, try to be as specific as possible to avoid
|
||||
# matching unintended files or overriding previous entries.
|
||||
# To exclude a file from ownership, add a line with only the file.
|
||||
# See the exclusions section at the end of the file for examples.
|
||||
|
||||
# GitHub
|
||||
.github/** @ezio-melotti @hugovk
|
||||
# =======
|
||||
# Purpose
|
||||
# =======
|
||||
#
|
||||
# An entry in this file does not imply 'ownership', despite the name of the
|
||||
# file, but instead that those listed take an interest in that part of the
|
||||
# project and will automatically be added as reviewers to PRs that affect
|
||||
# the matching files.
|
||||
# See also the Experts Index in the Python Developer's Guide:
|
||||
# https://devguide.python.org/core-developers/experts/
|
||||
#
|
||||
# =========
|
||||
# Structure
|
||||
# =========
|
||||
#
|
||||
# The CODEOWNERS file is organised by topic area.
|
||||
# Please add new entries in alphabetical order within the relevant section.
|
||||
# Where possible, keep related files together. For example, documentation,
|
||||
# code, and tests for a given item should all be listed in the same place.
|
||||
#
|
||||
# GitHub usernames should be aligned to column 31, or the next multiple
|
||||
# of three if the relevant paths are too long to fit.
|
||||
#
|
||||
# Top-level sections are:
|
||||
#
|
||||
# * Buildbots, Continuous Integration, and Testing
|
||||
# project-wide configuration files, internal tools for use in CI,
|
||||
# linting.
|
||||
# * Build System
|
||||
# the Makefile, autoconf, and other autotools files.
|
||||
# * Documentation
|
||||
# broader sections of documentation, documentation tools
|
||||
# * Internal Tools & Data
|
||||
# internal tools, integration with external systems,
|
||||
# entries that don't fit elsewhere
|
||||
# * Platform Support
|
||||
# relating to support for specific platforms
|
||||
# * Interpreter Core
|
||||
# the grammar, parser, compiler, interpreter, etc.
|
||||
# * Standard Library
|
||||
# standard library modules (from both Lib and Modules)
|
||||
# and related files (such as their tests and docs)
|
||||
# * Exclusions
|
||||
# exclusions from .github/CODEOWNERS should go at the very end
|
||||
# because the final matching pattern will take precedence.
|
||||
|
||||
# pre-commit
|
||||
.pre-commit-config.yaml @hugovk @AlexWaygood
|
||||
.ruff.toml @hugovk @AlexWaygood
|
||||
# ----------------------------------------------------------------------------
|
||||
# Buildbots, Continuous Integration, and Testing
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# Build system
|
||||
configure* @erlend-aasland @corona10
|
||||
# Azure Pipelines
|
||||
.azure-pipelines/ @AA-Turner
|
||||
|
||||
# asyncio
|
||||
**/*asyncio* @1st1 @asvetlov @gvanrossum @kumaraditya303 @willingc
|
||||
# GitHub & related scripts
|
||||
.github/ @ezio-melotti @hugovk @AA-Turner
|
||||
Tools/build/compute-changes.py @AA-Turner
|
||||
Tools/build/verify_ensurepip_wheels.py @AA-Turner @pfmoore @pradyunsg
|
||||
|
||||
# Core
|
||||
**/*context* @1st1
|
||||
**/*genobject* @markshannon
|
||||
**/*hamt* @1st1
|
||||
**/*jit* @brandtbucher
|
||||
Objects/set* @rhettinger
|
||||
Objects/dict* @methane @markshannon
|
||||
Objects/typevarobject.c @JelleZijlstra
|
||||
Objects/type* @markshannon
|
||||
Objects/codeobject.c @markshannon
|
||||
Objects/frameobject.c @markshannon
|
||||
Objects/call.c @markshannon
|
||||
Python/ceval*.c @markshannon @gvanrossum
|
||||
Python/ceval*.h @markshannon @gvanrossum
|
||||
Python/compile.c @markshannon @iritkatriel
|
||||
Python/assemble.c @markshannon @iritkatriel
|
||||
Python/flowgraph.c @markshannon @iritkatriel
|
||||
Python/ast_opt.c @isidentical
|
||||
Python/bytecodes.c @markshannon @gvanrossum
|
||||
Python/optimizer*.c @markshannon @gvanrossum
|
||||
Python/optimizer_analysis.c @Fidget-Spinner
|
||||
Python/optimizer_bytecodes.c @Fidget-Spinner
|
||||
Lib/test/test_patma.py @brandtbucher
|
||||
Lib/test/test_type_*.py @JelleZijlstra
|
||||
Lib/test/test_capi/test_misc.py @markshannon @gvanrossum
|
||||
# Pre-commit
|
||||
.pre-commit-config.yaml @hugovk
|
||||
.ruff.toml @hugovk @AlexWaygood @AA-Turner
|
||||
|
||||
# Patchcheck
|
||||
Tools/patchcheck/ @AA-Turner
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Build System
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# Autotools
|
||||
configure* @erlend-aasland @corona10 @AA-Turner @emmatyping
|
||||
Makefile.pre.in @erlend-aasland @AA-Turner @emmatyping
|
||||
Modules/makesetup @erlend-aasland @AA-Turner @emmatyping
|
||||
Modules/Setup* @erlend-aasland @AA-Turner @emmatyping
|
||||
Tools/build/regen-configure.sh @AA-Turner
|
||||
|
||||
# generate-build-details
|
||||
Tools/build/generate-build-details.py @FFY00
|
||||
Lib/test/test_build_details.py @FFY00
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Documentation
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# Internal Docs
|
||||
InternalDocs/ @AA-Turner
|
||||
|
||||
# Tools, Configuration, etc
|
||||
Doc/Makefile @AA-Turner @hugovk
|
||||
Doc/_static/ @AA-Turner @hugovk
|
||||
Doc/conf.py @AA-Turner @hugovk
|
||||
Doc/make.bat @AA-Turner @hugovk
|
||||
Doc/requirements.txt @AA-Turner @hugovk
|
||||
Doc/tools/ @AA-Turner @hugovk
|
||||
|
||||
# PR Previews
|
||||
.readthedocs.yml @AA-Turner
|
||||
|
||||
# Sections
|
||||
Doc/reference/ @willingc @AA-Turner
|
||||
Doc/whatsnew/ @AA-Turner
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Internal Tools and Data
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# Argument Clinic
|
||||
Tools/clinic/ @erlend-aasland @AA-Turner
|
||||
Lib/test/test_clinic.py @erlend-aasland @AA-Turner
|
||||
Doc/howto/clinic.rst @erlend-aasland @AA-Turner
|
||||
|
||||
# C Analyser
|
||||
Tools/c-analyzer/ @ericsnowcurrently
|
||||
|
||||
# dbm
|
||||
**/*dbm* @corona10 @erlend-aasland @serhiy-storchaka
|
||||
# Fuzzing
|
||||
Modules/_xxtestfuzz/ @ammaraskar
|
||||
|
||||
# runtime state/lifecycle
|
||||
**/*pylifecycle* @ericsnowcurrently
|
||||
**/*pystate* @ericsnowcurrently
|
||||
**/*preconfig* @ericsnowcurrently
|
||||
**/*initconfig* @ericsnowcurrently
|
||||
**/*pathconfig* @ericsnowcurrently
|
||||
**/*sysmodule* @ericsnowcurrently
|
||||
# Limited C API & Stable ABI
|
||||
Doc/c-api/stable.rst @encukou
|
||||
Doc/data/*.abi @encukou
|
||||
Misc/stable_abi.toml @encukou
|
||||
Tools/build/stable_abi.py @encukou
|
||||
|
||||
# SBOM
|
||||
Misc/externals.spdx.json @sethmlarson
|
||||
Misc/sbom.spdx.json @sethmlarson
|
||||
Tools/build/generate_sbom.py @sethmlarson
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Platform Support
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# Android
|
||||
Android/ @mhsmith @freakboy3742
|
||||
Doc/using/android.rst @mhsmith @freakboy3742
|
||||
Lib/_android_support.py @mhsmith @freakboy3742
|
||||
Lib/test/test_android.py @mhsmith @freakboy3742
|
||||
|
||||
# iOS
|
||||
Doc/using/ios.rst @freakboy3742
|
||||
Lib/_ios_support.py @freakboy3742
|
||||
Apple/ @freakboy3742
|
||||
iOS/ @freakboy3742
|
||||
|
||||
# macOS
|
||||
Mac/ @python/macos-team
|
||||
Lib/_osx_support.py @python/macos-team
|
||||
Lib/test/test__osx_support.py @python/macos-team
|
||||
|
||||
# WebAssembly
|
||||
Tools/wasm/README.md @brettcannon @freakboy3742 @emmatyping
|
||||
|
||||
# WebAssembly (Emscripten)
|
||||
Tools/wasm/config.site-wasm32-emscripten @freakboy3742 @emmatyping
|
||||
Tools/wasm/emscripten @freakboy3742 @emmatyping
|
||||
|
||||
# WebAssembly (WASI)
|
||||
Tools/wasm/wasi-env @brettcannon @emmatyping
|
||||
Tools/wasm/wasi.py @brettcannon @emmatyping
|
||||
Tools/wasm/wasi @brettcannon @emmatyping
|
||||
|
||||
# Windows
|
||||
PC/ @python/windows-team
|
||||
PCbuild/ @python/windows-team
|
||||
|
||||
# Windows installer packages
|
||||
Tools/msi/ @python/windows-team
|
||||
Tools/nuget/ @python/windows-team
|
||||
|
||||
# Windows Launcher
|
||||
PC/launcher.c @python/windows-team @vsajip
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Interpreter Core
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# AST
|
||||
Lib/_ast_unparse.py @isidentical @JelleZijlstra @eclips4 @tomasr8
|
||||
Lib/ast.py @isidentical @JelleZijlstra @eclips4 @tomasr8
|
||||
Lib/test/test_ast/ @eclips4 @tomasr8
|
||||
Parser/asdl.py @isidentical @JelleZijlstra @eclips4 @tomasr8
|
||||
Parser/asdl_c.py @isidentical @JelleZijlstra @eclips4 @tomasr8
|
||||
Python/ast.c @isidentical @JelleZijlstra @eclips4 @tomasr8
|
||||
Python/ast_preprocess.c @isidentical @eclips4 @tomasr8
|
||||
|
||||
# Built-in types
|
||||
Objects/call.c @markshannon
|
||||
Objects/codeobject.c @markshannon
|
||||
Objects/dict* @methane @markshannon
|
||||
Objects/frameobject.c @markshannon
|
||||
**/*genobject* @markshannon
|
||||
Objects/object.c @ZeroIntensity
|
||||
Objects/set* @rhettinger
|
||||
Objects/type* @markshannon
|
||||
Objects/typevarobject.c @JelleZijlstra
|
||||
Objects/unionobject.c @JelleZijlstra
|
||||
|
||||
# Byte code interpreter ('the eval loop')
|
||||
Python/bytecodes.c @markshannon
|
||||
Python/ceval* @markshannon
|
||||
Tools/cases_generator/ @markshannon
|
||||
|
||||
# Compiler (AST to byte code)
|
||||
Python/assemble.c @markshannon @iritkatriel
|
||||
Python/codegen.c @markshannon @iritkatriel
|
||||
Python/compile.c @markshannon @iritkatriel
|
||||
Python/flowgraph.c @markshannon @iritkatriel
|
||||
Python/instruction_sequence.c @iritkatriel
|
||||
Python/symtable.c @JelleZijlstra @carljm
|
||||
|
||||
# Context variables & HAMT
|
||||
**/contextvars* @1st1
|
||||
**/*hamt* @1st1
|
||||
Include/cpython/context.h @1st1
|
||||
Include/internal/pycore_context.h @1st1
|
||||
Lib/test/test_context.py @1st1
|
||||
Python/context.c @1st1
|
||||
|
||||
# Core Modules
|
||||
**/*bltinmodule* @ericsnowcurrently
|
||||
**/*gil* @ericsnowcurrently
|
||||
Include/internal/pycore_runtime.h @ericsnowcurrently
|
||||
Include/internal/pycore_interp.h @ericsnowcurrently
|
||||
Include/internal/pycore_tstate.h @ericsnowcurrently
|
||||
Include/internal/pycore_*_state.h @ericsnowcurrently
|
||||
Include/internal/pycore_*_init.h @ericsnowcurrently
|
||||
Include/internal/pycore_atexit.h @ericsnowcurrently
|
||||
Include/internal/pycore_freelist.h @ericsnowcurrently
|
||||
Include/internal/pycore_global_objects.h @ericsnowcurrently
|
||||
Include/internal/pycore_obmalloc.h @ericsnowcurrently
|
||||
Include/internal/pycore_pymem.h @ericsnowcurrently
|
||||
Modules/main.c @ericsnowcurrently
|
||||
Programs/_bootstrap_python.c @ericsnowcurrently
|
||||
Programs/python.c @ericsnowcurrently
|
||||
Tools/build/generate_global_objects.py @ericsnowcurrently
|
||||
**/*sysmodule* @ericsnowcurrently
|
||||
|
||||
# Exceptions
|
||||
Lib/traceback.py @iritkatriel
|
||||
Lib/test/test_except*.py @iritkatriel
|
||||
Lib/test/test_traceback.py @iritkatriel
|
||||
Objects/exceptions.c @iritkatriel
|
||||
Python/traceback.c @iritkatriel
|
||||
|
||||
# Hashing
|
||||
**/*hashlib* @gpshead @tiran
|
||||
**/*pyhash* @gpshead @tiran
|
||||
**/sha* @gpshead @tiran
|
||||
Modules/md5* @gpshead @tiran
|
||||
**/*blake* @gpshead @tiran
|
||||
Modules/_blake2/** @gpshead @tiran
|
||||
Modules/_hacl/** @gpshead
|
||||
# Getpath
|
||||
Lib/test/test_getpath.py @FFY00
|
||||
Modules/getpath* @FFY00
|
||||
|
||||
# logging
|
||||
**/*logging* @vsajip
|
||||
# Hashing / ``hash()`` and related
|
||||
Include/cpython/pyhash.h @gpshead @picnixz
|
||||
Include/internal/pycore_pyhash.h @gpshead @picnixz
|
||||
Include/pyhash.h @gpshead @picnixz
|
||||
Python/pyhash.c @gpshead @picnixz
|
||||
|
||||
# venv
|
||||
**/*venv* @vsajip
|
||||
|
||||
# Launcher
|
||||
/PC/launcher.c @vsajip
|
||||
|
||||
# HTML
|
||||
/Lib/html/ @ezio-melotti
|
||||
/Lib/_markupbase.py @ezio-melotti
|
||||
/Lib/test/test_html*.py @ezio-melotti
|
||||
/Tools/build/parse_html5_entities.py @ezio-melotti
|
||||
|
||||
# Import (including importlib).
|
||||
# The import system (including importlib)
|
||||
**/*import* @brettcannon @ericsnowcurrently @ncoghlan @warsaw
|
||||
/Python/import.c @kumaraditya303
|
||||
Python/dynload_*.c @ericsnowcurrently
|
||||
Python/import.c @brettcannon @ericsnowcurrently @ncoghlan @warsaw @kumaraditya303
|
||||
**/*freeze* @ericsnowcurrently
|
||||
**/*frozen* @ericsnowcurrently
|
||||
**/*modsupport* @ericsnowcurrently
|
||||
|
|
@ -116,19 +263,171 @@ Python/dynload_*.c @ericsnowcurrently
|
|||
**/*pythonrun* @ericsnowcurrently
|
||||
**/*runpy* @ericsnowcurrently
|
||||
**/*singlephase* @ericsnowcurrently
|
||||
Lib/test/test_module/ @ericsnowcurrently
|
||||
Doc/c-api/module.rst @ericsnowcurrently
|
||||
**/*importlib/resources/* @jaraco @warsaw @FFY00
|
||||
**/*importlib/metadata/* @jaraco @warsaw
|
||||
Lib/test/test_module/ @ericsnowcurrently
|
||||
Python/dynload_*.c @ericsnowcurrently
|
||||
|
||||
# Initialisation
|
||||
**/*initconfig* @ericsnowcurrently
|
||||
**/*pathconfig* @ericsnowcurrently
|
||||
**/*preconfig* @ericsnowcurrently
|
||||
Doc/library/sys_path_init.rst @FFY00
|
||||
Doc/c-api/init_config.rst @FFY00
|
||||
|
||||
# Interpreter main program
|
||||
Modules/main.c @ericsnowcurrently
|
||||
Programs/_bootstrap_python.c @ericsnowcurrently
|
||||
Programs/python.c @ericsnowcurrently
|
||||
|
||||
# JIT
|
||||
Include/internal/pycore_jit.h @brandtbucher @savannahostrowski @diegorusso
|
||||
Python/jit.c @brandtbucher @savannahostrowski @diegorusso
|
||||
Tools/jit/ @brandtbucher @savannahostrowski @diegorusso
|
||||
InternalDocs/jit.md @brandtbucher @savannahostrowski @diegorusso @AA-Turner
|
||||
|
||||
# Micro-op / μop / Tier 2 Optimiser
|
||||
Python/optimizer.c @markshannon
|
||||
Python/optimizer_analysis.c @markshannon @tomasr8 @Fidget-Spinner
|
||||
Python/optimizer_bytecodes.c @markshannon @tomasr8 @Fidget-Spinner
|
||||
Python/optimizer_symbols.c @markshannon @tomasr8
|
||||
|
||||
# Parser, Lexer, and Grammar
|
||||
Grammar/python.gram @pablogsal @lysnikolaou
|
||||
Lib/test/test_peg_generator/ @pablogsal @lysnikolaou
|
||||
Lib/test/test_tokenize.py @pablogsal @lysnikolaou
|
||||
Lib/tokenize.py @pablogsal @lysnikolaou
|
||||
Parser/ @pablogsal @lysnikolaou
|
||||
Tools/peg_generator/ @pablogsal @lysnikolaou
|
||||
|
||||
# Runtime state/lifecycle
|
||||
**/*gil* @ericsnowcurrently
|
||||
**/*pylifecycle* @ericsnowcurrently @ZeroIntensity
|
||||
**/*pystate* @ericsnowcurrently @ZeroIntensity
|
||||
Include/internal/pycore_*_init.h @ericsnowcurrently
|
||||
Include/internal/pycore_*_state.h @ericsnowcurrently
|
||||
Include/internal/pycore_atexit.h @ericsnowcurrently
|
||||
Include/internal/pycore_freelist.h @ericsnowcurrently
|
||||
Include/internal/pycore_global_objects.h @ericsnowcurrently
|
||||
Include/internal/pycore_interp.h @ericsnowcurrently
|
||||
Include/internal/pycore_obmalloc.h @ericsnowcurrently
|
||||
Include/internal/pycore_pymem.h @ericsnowcurrently
|
||||
Include/internal/pycore_runtime.h @ericsnowcurrently
|
||||
Include/internal/pycore_stackref.h @Fidget-Spinner
|
||||
Include/internal/pycore_tstate.h @ericsnowcurrently
|
||||
Tools/build/generate_global_objects.py @ericsnowcurrently
|
||||
|
||||
# Remote Debugging
|
||||
Python/remote_debug.h @pablogsal
|
||||
Python/remote_debugging.c @pablogsal
|
||||
Modules/_remote_debugging_module.c @pablogsal @ambv @1st1
|
||||
|
||||
# Sub-Interpreters
|
||||
**/*crossinterp* @ericsnowcurrently
|
||||
**/*interpreteridobject.* @ericsnowcurrently
|
||||
Doc/library/concurrent.interpreters.rst @ericsnowcurrently
|
||||
Lib/concurrent/futures/interpreter.py @ericsnowcurrently
|
||||
Lib/concurrent/interpreters/ @ericsnowcurrently
|
||||
Lib/test/support/channels.py @ericsnowcurrently
|
||||
Lib/test/test__interp*.py @ericsnowcurrently
|
||||
Lib/test/test_interpreters/ @ericsnowcurrently
|
||||
Modules/_interp*module.c @ericsnowcurrently
|
||||
|
||||
# Template string literals (t-strings)
|
||||
Lib/test/test_tstring.py @lysnikolaou
|
||||
Objects/interpolationobject.c @lysnikolaou
|
||||
Objects/templateobject.c @lysnikolaou
|
||||
|
||||
# Tests
|
||||
Lib/test/test_patma.py @brandtbucher
|
||||
Lib/test/test_type_*.py @JelleZijlstra
|
||||
Lib/test/test_capi/test_misc.py @markshannon
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Standard Library
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# Annotationlib
|
||||
Doc/library/annotationlib.rst @JelleZijlstra
|
||||
Lib/annotationlib.py @JelleZijlstra
|
||||
Lib/test/test_annotationlib.py @JelleZijlstra
|
||||
|
||||
# Argparse
|
||||
Doc/**/argparse*.rst @savannahostrowski
|
||||
Lib/argparse.py @savannahostrowski
|
||||
Lib/test/test_argparse.py @savannahostrowski
|
||||
|
||||
# Asyncio
|
||||
Doc/library/asyncio*.rst @1st1 @asvetlov @kumaraditya303 @willingc
|
||||
InternalDocs/asyncio.md @1st1 @asvetlov @kumaraditya303 @willingc @AA-Turner
|
||||
Lib/asyncio/ @1st1 @asvetlov @kumaraditya303 @willingc
|
||||
Lib/test/test_asyncio/ @1st1 @asvetlov @kumaraditya303 @willingc
|
||||
Modules/_asynciomodule.c @1st1 @asvetlov @kumaraditya303 @willingc
|
||||
|
||||
# Bisect
|
||||
Doc/library/bisect.rst @rhettinger
|
||||
Lib/bisect.py @rhettinger
|
||||
Lib/test/test_bisect.py @rhettinger
|
||||
Modules/_bisectmodule.c @rhettinger
|
||||
|
||||
# Calendar
|
||||
Lib/calendar.py @AA-Turner
|
||||
Lib/test/test_calendar.py @AA-Turner
|
||||
|
||||
# Cryptographic Primitives and Applications
|
||||
**/*hashlib* @gpshead @picnixz
|
||||
**/*hashopenssl* @gpshead @picnixz
|
||||
**/*hmac* @gpshead @picnixz
|
||||
**/*ssl* @gpshead @picnixz
|
||||
Modules/_hacl/ @gpshead @picnixz
|
||||
Modules/*blake* @gpshead @picnixz
|
||||
Modules/*md5* @gpshead @picnixz
|
||||
Modules/*sha* @gpshead @picnixz
|
||||
|
||||
# Codecs
|
||||
Modules/cjkcodecs/ @corona10
|
||||
Tools/unicode/gencjkcodecs.py @corona10
|
||||
|
||||
# Collections
|
||||
Doc/library/collections.abc.rst @rhettinger
|
||||
Doc/library/collections.rst @rhettinger
|
||||
Lib/_collections_abc.py @rhettinger
|
||||
Lib/collections/ @rhettinger
|
||||
Lib/test/test_collections.py @rhettinger
|
||||
Modules/_collectionsmodule.c @rhettinger
|
||||
|
||||
# Colorize
|
||||
Lib/_colorize.py @hugovk
|
||||
Lib/test/test__colorize.py @hugovk
|
||||
|
||||
# Config Parser
|
||||
Lib/configparser.py @jaraco
|
||||
Lib/test/test_configparser.py @jaraco
|
||||
|
||||
# Dataclasses
|
||||
Doc/library/dataclasses.rst @ericvsmith
|
||||
Lib/dataclasses.py @ericvsmith
|
||||
Lib/test/test_dataclasses/ @ericvsmith
|
||||
|
||||
# Dates and times
|
||||
**/*datetime* @pganssle @abalkin
|
||||
**/*str*time* @pganssle @abalkin
|
||||
Doc/library/time.rst @pganssle @abalkin
|
||||
Lib/test/test_time.py @pganssle @abalkin
|
||||
Modules/timemodule.c @pganssle @abalkin
|
||||
Python/pytime.c @pganssle @abalkin
|
||||
Include/internal/pycore_time.h @pganssle @abalkin
|
||||
Doc/**/*time.rst @pganssle @abalkin
|
||||
Doc/library/zoneinfo.rst @pganssle
|
||||
Include/datetime.h @pganssle @abalkin
|
||||
Include/internal/pycore_time.h @pganssle @abalkin
|
||||
Lib/test/test_zoneinfo/ @pganssle
|
||||
Lib/zoneinfo/ @pganssle
|
||||
Lib/*time.py @pganssle @abalkin
|
||||
Lib/test/datetimetester.py @pganssle @abalkin
|
||||
Lib/test/test_*time.py @pganssle @abalkin
|
||||
Modules/*zoneinfo* @pganssle
|
||||
Modules/*time* @pganssle @abalkin
|
||||
Python/pytime.c @pganssle @abalkin
|
||||
|
||||
# Dbm
|
||||
Doc/library/dbm.rst @corona10 @erlend-aasland @serhiy-storchaka
|
||||
Lib/dbm/ @corona10 @erlend-aasland @serhiy-storchaka
|
||||
Lib/test/test_dbm*.py @corona10 @erlend-aasland @serhiy-storchaka
|
||||
Modules/*dbm* @corona10 @erlend-aasland @serhiy-storchaka
|
||||
|
||||
# Email and related
|
||||
**/*mail* @python/email-team
|
||||
|
|
@ -137,120 +436,197 @@ Include/internal/pycore_time.h @pganssle @abalkin
|
|||
**/*imap* @python/email-team
|
||||
**/*poplib* @python/email-team
|
||||
|
||||
# Ensurepip
|
||||
Doc/library/ensurepip.rst @pfmoore @pradyunsg
|
||||
Lib/ensurepip/ @pfmoore @pradyunsg
|
||||
Lib/test/test_ensurepip.py @pfmoore @pradyunsg
|
||||
|
||||
# Enum
|
||||
Doc/howto/enum.rst @ethanfurman
|
||||
Doc/library/enum.rst @ethanfurman
|
||||
Lib/enum.py @ethanfurman
|
||||
Lib/test/test_enum.py @ethanfurman
|
||||
Lib/test/test_json/test_enum.py @ethanfurman
|
||||
|
||||
# FTP
|
||||
Doc/library/ftplib.rst @giampaolo
|
||||
Lib/ftplib.py @giampaolo
|
||||
Lib/test/test_ftplib.py @giampaolo
|
||||
|
||||
# Functools
|
||||
Doc/library/functools.rst @rhettinger
|
||||
Lib/functools.py @rhettinger
|
||||
Lib/test/test_functools.py @rhettinger
|
||||
Modules/_functoolsmodule.c @rhettinger
|
||||
|
||||
# Garbage collector
|
||||
/Modules/gcmodule.c @pablogsal
|
||||
/Doc/library/gc.rst @pablogsal
|
||||
Modules/gcmodule.c @pablogsal
|
||||
Doc/library/gc.rst @pablogsal
|
||||
|
||||
# Parser
|
||||
/Parser/ @pablogsal @lysnikolaou
|
||||
/Tools/peg_generator/ @pablogsal @lysnikolaou
|
||||
/Lib/test/test_peg_generator/ @pablogsal @lysnikolaou
|
||||
/Grammar/python.gram @pablogsal @lysnikolaou
|
||||
/Lib/tokenize.py @pablogsal @lysnikolaou
|
||||
/Lib/test/test_tokenize.py @pablogsal @lysnikolaou
|
||||
# Gettext
|
||||
Doc/library/gettext.rst @tomasr8
|
||||
Lib/gettext.py @tomasr8
|
||||
Lib/test/test_gettext.py @tomasr8
|
||||
Tools/i18n/pygettext.py @tomasr8
|
||||
|
||||
# Code generator
|
||||
/Tools/cases_generator/ @gvanrossum
|
||||
# Heapq
|
||||
Doc/library/heapq* @rhettinger
|
||||
Lib/heapq.py @rhettinger
|
||||
Lib/test/test_heapq.py @rhettinger
|
||||
Modules/_heapqmodule.c @rhettinger
|
||||
|
||||
# AST
|
||||
Python/ast.c @isidentical
|
||||
Parser/asdl.py @isidentical
|
||||
Parser/asdl_c.py @isidentical
|
||||
Lib/ast.py @isidentical
|
||||
# HTML
|
||||
Doc/library/html* @ezio-melotti
|
||||
Lib/html/ @ezio-melotti
|
||||
Lib/_markupbase.py @ezio-melotti
|
||||
Lib/test/test_html*.py @ezio-melotti
|
||||
Tools/build/parse_html5_entities.py @ezio-melotti
|
||||
|
||||
# Mock
|
||||
/Lib/unittest/mock.py @cjw296
|
||||
/Lib/test/test_unittest/testmock/* @cjw296
|
||||
# IDLE
|
||||
Doc/library/idle.rst @terryjreedy
|
||||
Lib/idlelib/ @terryjreedy
|
||||
Lib/turtledemo/ @terryjreedy
|
||||
|
||||
# multiprocessing
|
||||
**/*multiprocessing* @gpshead
|
||||
# importlib.metadata
|
||||
Doc/library/importlib.metadata.rst @jaraco @warsaw
|
||||
Lib/importlib/metadata/ @jaraco @warsaw
|
||||
Lib/test/test_importlib/metadata/ @jaraco @warsaw
|
||||
|
||||
# importlib.resources
|
||||
Doc/library/importlib.resources.abc.rst @jaraco @warsaw
|
||||
Doc/library/importlib.resources.rst @jaraco @warsaw
|
||||
Lib/importlib/resources/ @jaraco @warsaw @FFY00
|
||||
Lib/test/test_importlib/resources/ @jaraco @warsaw @FFY00
|
||||
|
||||
# Itertools
|
||||
Doc/library/itertools.rst @rhettinger
|
||||
Lib/test/test_itertools.py @rhettinger
|
||||
Modules/itertoolsmodule.c @rhettinger
|
||||
|
||||
# Logging
|
||||
Doc/**/logging* @vsajip
|
||||
Lib/logging/ @vsajip
|
||||
Lib/test/test_logging.py @vsajip
|
||||
|
||||
# Multiprocessing
|
||||
Doc/library/multiprocessing*.rst @gpshead
|
||||
Lib/multiprocessing/ @gpshead
|
||||
Lib/test/*multiprocessing.py @gpshead
|
||||
Lib/test/test_multiprocessing*/ @gpshead
|
||||
Modules/_multiprocessing/ @gpshead
|
||||
|
||||
# Pathlib
|
||||
Doc/library/pathlib.rst @barneygale
|
||||
Lib/pathlib/ @barneygale
|
||||
Lib/test/test_pathlib/ @barneygale
|
||||
|
||||
# Pdb & Bdb
|
||||
Doc/library/bdb.rst @gaogaotiantian
|
||||
Doc/library/pdb.rst @gaogaotiantian
|
||||
Lib/bdb.py @gaogaotiantian
|
||||
Lib/pdb.py @gaogaotiantian
|
||||
Lib/test/test_bdb.py @gaogaotiantian
|
||||
Lib/test/test_pdb.py @gaogaotiantian
|
||||
Lib/test/test_remote_pdb.py @gaogaotiantian
|
||||
|
||||
# Pydoc
|
||||
Lib/pydoc.py @AA-Turner
|
||||
Lib/pydoc_data/ @AA-Turner
|
||||
Lib/test/test_pydoc/ @AA-Turner
|
||||
|
||||
# PyREPL
|
||||
Lib/_pyrepl/ @pablogsal @lysnikolaou @ambv
|
||||
Lib/test/test_pyrepl/ @pablogsal @lysnikolaou @ambv
|
||||
|
||||
# Random
|
||||
Doc/library/random.rst @rhettinger
|
||||
Lib/random.py @rhettinger
|
||||
Lib/test/test_random.py @rhettinger
|
||||
Modules/_randommodule.c @rhettinger
|
||||
|
||||
# Shutil
|
||||
Doc/library/shutil.rst @giampaolo
|
||||
Lib/shutil.py @giampaolo
|
||||
Lib/test/test_shutil.py @giampaolo
|
||||
|
||||
# Site
|
||||
Lib/site.py @FFY00
|
||||
Lib/test/test_site.py @FFY00
|
||||
Doc/library/site.rst @FFY00
|
||||
|
||||
# string.templatelib
|
||||
Doc/library/string.templatelib.rst @lysnikolaou @AA-Turner
|
||||
Lib/string/templatelib.py @lysnikolaou @AA-Turner
|
||||
Lib/test/test_string/test_templatelib.py @lysnikolaou @AA-Turner
|
||||
|
||||
# Sysconfig
|
||||
**/*sysconfig* @FFY00
|
||||
|
||||
# SQLite 3
|
||||
**/*sqlite* @berkerpeksag @erlend-aasland
|
||||
Doc/library/sqlite3.rst @berkerpeksag @erlend-aasland
|
||||
Lib/sqlite3/ @berkerpeksag @erlend-aasland
|
||||
Lib/test/test_sqlite3/ @berkerpeksag @erlend-aasland
|
||||
Modules/_sqlite/ @berkerpeksag @erlend-aasland
|
||||
|
||||
# subprocess
|
||||
/Lib/subprocess.py @gpshead
|
||||
/Lib/test/test_subprocess.py @gpshead
|
||||
/Modules/*subprocess* @gpshead
|
||||
# Subprocess
|
||||
Lib/subprocess.py @gpshead
|
||||
Lib/test/test_subprocess.py @gpshead
|
||||
Modules/*subprocess* @gpshead
|
||||
|
||||
# Limited C API & stable ABI
|
||||
Tools/build/stable_abi.py @encukou
|
||||
Misc/stable_abi.toml @encukou
|
||||
Doc/data/*.abi @encukou
|
||||
Doc/c-api/stable.rst @encukou
|
||||
# Tarfile
|
||||
Doc/library/tarfile.rst @ethanfurman
|
||||
Lib/tarfile.py @ethanfurman
|
||||
Lib/test/test_tarfile.py @ethanfurman
|
||||
|
||||
# Windows
|
||||
/PC/ @python/windows-team
|
||||
/PCbuild/ @python/windows-team
|
||||
# TOML
|
||||
Doc/library/tomllib.rst @encukou @hauntsaninja
|
||||
Lib/test/test_tomllib/ @encukou @hauntsaninja
|
||||
Lib/tomllib/ @encukou @hauntsaninja
|
||||
|
||||
# Typing
|
||||
Doc/library/typing.rst @JelleZijlstra @AlexWaygood
|
||||
Lib/test/test_typing.py @JelleZijlstra @AlexWaygood
|
||||
Lib/test/typinganndata/ @JelleZijlstra @AlexWaygood
|
||||
Lib/typing.py @JelleZijlstra @AlexWaygood
|
||||
Modules/_typingmodule.c @JelleZijlstra @AlexWaygood
|
||||
|
||||
# Types
|
||||
Lib/test/test_types.py @AA-Turner
|
||||
Lib/types.py @AA-Turner
|
||||
Modules/_typesmodule.c @AA-Turner
|
||||
|
||||
# Unittest
|
||||
Lib/unittest/mock.py @cjw296
|
||||
Lib/test/test_unittest/testmock/ @cjw296
|
||||
|
||||
# Urllib
|
||||
**/*robotparser* @berkerpeksag
|
||||
|
||||
# Windows installer packages
|
||||
/Tools/msi/ @python/windows-team
|
||||
/Tools/nuget/ @python/windows-team
|
||||
# Venv
|
||||
**/*venv* @vsajip @FFY00
|
||||
|
||||
# Misc
|
||||
**/*itertools* @rhettinger
|
||||
**/*collections* @rhettinger
|
||||
**/*random* @rhettinger
|
||||
**/*queue* @rhettinger
|
||||
**/*bisect* @rhettinger
|
||||
**/*heapq* @rhettinger
|
||||
**/*functools* @rhettinger
|
||||
**/*decimal* @rhettinger
|
||||
# Weakref
|
||||
**/*weakref* @kumaraditya303
|
||||
|
||||
**/*dataclasses* @ericvsmith
|
||||
# Zipfile.Path
|
||||
Lib/test/test_zipfile/_path/ @jaraco
|
||||
Lib/zipfile/_path/ @jaraco
|
||||
|
||||
**/*ensurepip* @pfmoore @pradyunsg
|
||||
# Zstandard
|
||||
Lib/compression/zstd/ @AA-Turner @emmatyping
|
||||
Lib/test/test_zstd.py @AA-Turner @emmatyping
|
||||
Modules/_zstd/ @AA-Turner @emmatyping
|
||||
|
||||
**/*idlelib* @terryjreedy
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
**/*typing* @JelleZijlstra @AlexWaygood
|
||||
# Exclusions from .github/CODEOWNERS should go at the very end
|
||||
# because the final matching pattern will take precedence.
|
||||
|
||||
**/*ftplib @giampaolo
|
||||
**/*shutil @giampaolo
|
||||
# Exclude .mailmap from being owned by @python/email-team
|
||||
.mailmap
|
||||
|
||||
**/*enum* @ethanfurman
|
||||
**/*cgi* @ethanfurman
|
||||
**/*tarfile* @ethanfurman
|
||||
|
||||
**/*tomllib* @encukou @hauntsaninja
|
||||
|
||||
**/*sysconfig* @FFY00
|
||||
|
||||
**/*cjkcodecs* @corona10
|
||||
|
||||
# macOS
|
||||
/Mac/ @python/macos-team
|
||||
**/*osx_support* @python/macos-team
|
||||
|
||||
# pathlib
|
||||
**/*pathlib* @barneygale
|
||||
|
||||
# zipfile.Path
|
||||
**/*zipfile/_path/* @jaraco
|
||||
|
||||
# Argument Clinic
|
||||
/Tools/clinic/** @erlend-aasland
|
||||
/Lib/test/test_clinic.py @erlend-aasland
|
||||
Doc/howto/clinic.rst @erlend-aasland
|
||||
|
||||
# Subinterpreters
|
||||
**/*interpreteridobject.* @ericsnowcurrently
|
||||
**/*crossinterp* @ericsnowcurrently
|
||||
Lib/test/support/interpreters/ @ericsnowcurrently
|
||||
Modules/_xx*interp*module.c @ericsnowcurrently
|
||||
Lib/test/test_interpreters/ @ericsnowcurrently
|
||||
|
||||
# WebAssembly
|
||||
/Tools/wasm/ @brettcannon
|
||||
|
||||
# SBOM
|
||||
/Misc/externals.spdx.json @sethmlarson
|
||||
/Misc/sbom.spdx.json @sethmlarson
|
||||
/Tools/build/generate_sbom.py @sethmlarson
|
||||
|
||||
# Config Parser
|
||||
Lib/configparser.py @jaraco
|
||||
Lib/test/test_configparser.py @jaraco
|
||||
# Exclude Argument Clinic directories
|
||||
Modules/**/clinic/
|
||||
Objects/**/clinic/
|
||||
PC/**/clinic/
|
||||
Python/**/clinic/
|
||||
|
|
|
|||
13
.github/CONTRIBUTING.rst
vendored
13
.github/CONTRIBUTING.rst
vendored
|
|
@ -4,7 +4,7 @@ Contributing to Python
|
|||
Build Status
|
||||
------------
|
||||
|
||||
- `Buildbot status overview <https://buildbot.python.org/all/#/release_status>`_
|
||||
- `Buildbot status overview <https://buildbot.python.org/#/release_status>`_
|
||||
|
||||
- `GitHub Actions status <https://github.com/python/cpython/actions/workflows/build.yml>`_
|
||||
|
||||
|
|
@ -34,17 +34,18 @@ our workflow that are not covered by a bot or status check are:
|
|||
- All discussions that are not directly related to the code in the pull request
|
||||
should happen on `GitHub Issues <https://github.com/python/cpython/issues>`_.
|
||||
- Upon your first non-trivial pull request (which includes documentation changes),
|
||||
feel free to add yourself to ``Misc/ACKS``
|
||||
feel free to add yourself to ``Misc/ACKS``.
|
||||
|
||||
|
||||
Setting Expectations
|
||||
--------------------
|
||||
Due to the fact that this project is entirely volunteer-run (i.e. no one is paid
|
||||
to work on Python full-time), we unfortunately can make no guarantees as to if
|
||||
Due to the fact that this project is run by volunteers,
|
||||
unfortunately we cannot make any guarantees as to if
|
||||
or when a core developer will get around to reviewing your pull request.
|
||||
If no core developer has done a review or responded to changes made because of a
|
||||
"changes requested" review, please feel free to email python-dev to ask if
|
||||
someone could take a look at your pull request.
|
||||
"changes requested" review within a month, you can ask for someone to
|
||||
review your pull request via a post in the `Core Development Discourse
|
||||
category <https://discuss.python.org/c/core-dev/23>`__.
|
||||
|
||||
|
||||
Code of Conduct
|
||||
|
|
|
|||
3
.github/ISSUE_TEMPLATE/bug.yml
vendored
3
.github/ISSUE_TEMPLATE/bug.yml
vendored
|
|
@ -34,12 +34,13 @@ body:
|
|||
label: "CPython versions tested on:"
|
||||
multiple: true
|
||||
options:
|
||||
- "3.8"
|
||||
- "3.9"
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
- "3.13"
|
||||
- "3.14"
|
||||
- "3.15"
|
||||
- "CPython main branch"
|
||||
validations:
|
||||
required: true
|
||||
|
|
|
|||
4
.github/ISSUE_TEMPLATE/crash.yml
vendored
4
.github/ISSUE_TEMPLATE/crash.yml
vendored
|
|
@ -27,11 +27,13 @@ body:
|
|||
label: "CPython versions tested on:"
|
||||
multiple: true
|
||||
options:
|
||||
- "3.8"
|
||||
- "3.9"
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
- "3.13"
|
||||
- "3.14"
|
||||
- "3.15"
|
||||
- "CPython main branch"
|
||||
validations:
|
||||
required: true
|
||||
|
|
|
|||
10
.github/PULL_REQUEST_TEMPLATE.md
vendored
10
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
|
@ -7,10 +7,10 @@ # Pull Request title
|
|||
It should be in the following format:
|
||||
|
||||
```
|
||||
gh-NNNNN: Summary of the changes made
|
||||
gh-NNNNNN: Summary of the changes made
|
||||
```
|
||||
|
||||
Where: gh-NNNNN refers to the GitHub issue number.
|
||||
Where: gh-NNNNNN refers to the GitHub issue number.
|
||||
|
||||
Most PRs will require an issue number. Trivial changes, like fixing a typo, do not need an issue.
|
||||
|
||||
|
|
@ -20,11 +20,11 @@ # Backport Pull Request title
|
|||
please ensure that the PR title is in the following format:
|
||||
|
||||
```
|
||||
[X.Y] <title from the original PR> (GH-NNNN)
|
||||
[X.Y] <title from the original PR> (GH-NNNNNN)
|
||||
```
|
||||
|
||||
Where: [X.Y] is the branch name, e.g. [3.6].
|
||||
Where: [X.Y] is the branch name, for example: [3.13].
|
||||
|
||||
GH-NNNN refers to the PR number from `main`.
|
||||
GH-NNNNNN refers to the PR number from `main`.
|
||||
|
||||
-->
|
||||
|
|
|
|||
12
.github/actionlint.yaml
vendored
Normal file
12
.github/actionlint.yaml
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
self-hosted-runner:
|
||||
# Pending https://github.com/rhysd/actionlint/issues/533
|
||||
# and https://github.com/rhysd/actionlint/issues/571
|
||||
labels: ["windows-11-arm", "macos-15-intel"]
|
||||
|
||||
config-variables: null
|
||||
|
||||
paths:
|
||||
.github/workflows/**/*.yml:
|
||||
ignore:
|
||||
- 1st argument of function call is not assignable
|
||||
- SC2(015|038|086|091|097|098|129|155)
|
||||
1
.github/workflows/add-issue-header.yml
vendored
1
.github/workflows/add-issue-header.yml
vendored
|
|
@ -18,6 +18,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
|
|
|
|||
774
.github/workflows/build.yml
vendored
774
.github/workflows/build.yml
vendored
File diff suppressed because it is too large
Load diff
40
.github/workflows/build_msi.yml
vendored
40
.github/workflows/build_msi.yml
vendored
|
|
@ -1,40 +0,0 @@
|
|||
name: TestsMSI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
- '3.*'
|
||||
paths:
|
||||
- 'Tools/msi/**'
|
||||
- '.github/workflows/build_msi.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'main'
|
||||
- '3.*'
|
||||
paths:
|
||||
- 'Tools/msi/**'
|
||||
- '.github/workflows/build_msi.yml'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Windows Installer
|
||||
runs-on: windows-latest
|
||||
timeout-minutes: 60
|
||||
strategy:
|
||||
matrix:
|
||||
type: [x86, x64, arm64]
|
||||
env:
|
||||
IncludeFreethreaded: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build CPython installer
|
||||
run: .\Tools\msi\build.bat --doc -${{ matrix.type }}
|
||||
7
.github/workflows/documentation-links.yml
vendored
7
.github/workflows/documentation-links.yml
vendored
|
|
@ -10,9 +10,6 @@ on:
|
|||
- 'Doc/**'
|
||||
- '.github/workflows/doc.yml'
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
|
@ -20,6 +17,10 @@ concurrency:
|
|||
jobs:
|
||||
documentation-links:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
timeout-minutes: 5
|
||||
|
||||
steps:
|
||||
- uses: readthedocs/actions/preview@v1
|
||||
with:
|
||||
|
|
|
|||
171
.github/workflows/jit.yml
vendored
171
.github/workflows/jit.yml
vendored
|
|
@ -5,11 +5,21 @@ on:
|
|||
- '**jit**'
|
||||
- 'Python/bytecodes.c'
|
||||
- 'Python/optimizer*.c'
|
||||
- 'Python/executor_cases.c.h'
|
||||
- 'Python/optimizer_cases.c.h'
|
||||
- '!Python/perf_jit_trampoline.c'
|
||||
- '!**/*.md'
|
||||
- '!**/*.ini'
|
||||
push:
|
||||
paths:
|
||||
- '**jit**'
|
||||
- 'Python/bytecodes.c'
|
||||
- 'Python/optimizer*.c'
|
||||
- 'Python/executor_cases.c.h'
|
||||
- 'Python/optimizer_cases.c.h'
|
||||
- '!Python/perf_jit_trampoline.c'
|
||||
- '!**/*.md'
|
||||
- '!**/*.ini'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
|
|
@ -19,11 +29,30 @@ concurrency:
|
|||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
interpreter:
|
||||
name: Interpreter (Debug)
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 90
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Build tier two interpreter
|
||||
run: |
|
||||
./configure --enable-experimental-jit=interpreter --with-pydebug
|
||||
make all --jobs 4
|
||||
- name: Test tier two interpreter
|
||||
run: |
|
||||
./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
jit:
|
||||
name: ${{ matrix.target }} (${{ matrix.debug && 'Debug' || 'Release' }})
|
||||
needs: interpreter
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 75
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
|
@ -34,111 +63,123 @@ jobs:
|
|||
- x86_64-apple-darwin/clang
|
||||
- aarch64-apple-darwin/clang
|
||||
- x86_64-unknown-linux-gnu/gcc
|
||||
- x86_64-unknown-linux-gnu/clang
|
||||
- aarch64-unknown-linux-gnu/gcc
|
||||
- aarch64-unknown-linux-gnu/clang
|
||||
debug:
|
||||
- true
|
||||
- false
|
||||
llvm:
|
||||
- 16
|
||||
- 19
|
||||
include:
|
||||
- target: i686-pc-windows-msvc/msvc
|
||||
architecture: Win32
|
||||
runner: windows-latest
|
||||
compiler: msvc
|
||||
runner: windows-2022
|
||||
- target: x86_64-pc-windows-msvc/msvc
|
||||
architecture: x64
|
||||
runner: windows-latest
|
||||
compiler: msvc
|
||||
runner: windows-2022
|
||||
- target: aarch64-pc-windows-msvc/msvc
|
||||
architecture: ARM64
|
||||
runner: windows-latest
|
||||
compiler: msvc
|
||||
runner: windows-11-arm
|
||||
- target: x86_64-apple-darwin/clang
|
||||
architecture: x86_64
|
||||
runner: macos-13
|
||||
compiler: clang
|
||||
runner: macos-15-intel
|
||||
- target: aarch64-apple-darwin/clang
|
||||
architecture: aarch64
|
||||
runner: macos-14
|
||||
compiler: clang
|
||||
- target: x86_64-unknown-linux-gnu/gcc
|
||||
architecture: x86_64
|
||||
runner: ubuntu-latest
|
||||
compiler: gcc
|
||||
- target: x86_64-unknown-linux-gnu/clang
|
||||
architecture: x86_64
|
||||
runner: ubuntu-latest
|
||||
compiler: clang
|
||||
runner: ubuntu-24.04
|
||||
- target: aarch64-unknown-linux-gnu/gcc
|
||||
architecture: aarch64
|
||||
runner: ubuntu-latest
|
||||
compiler: gcc
|
||||
# These fail because of emulation, not because of the JIT:
|
||||
exclude: test_unix_events test_init test_process_pool test_shutdown test_multiprocessing_fork test_cmd_line test_faulthandler test_os test_perf_profiler test_posix test_signal test_socket test_subprocess test_threading test_venv test_external_inspection
|
||||
- target: aarch64-unknown-linux-gnu/clang
|
||||
architecture: aarch64
|
||||
runner: ubuntu-latest
|
||||
compiler: clang
|
||||
# These fail because of emulation, not because of the JIT:
|
||||
exclude: test_unix_events test_init test_process_pool test_shutdown test_multiprocessing_fork test_cmd_line test_faulthandler test_os test_perf_profiler test_posix test_signal test_socket test_subprocess test_threading test_venv test_external_inspection
|
||||
env:
|
||||
CC: ${{ matrix.compiler }}
|
||||
runner: ubuntu-24.04-arm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Native Windows
|
||||
if: runner.os == 'Windows' && matrix.architecture != 'ARM64'
|
||||
# PCbuild downloads LLVM automatically:
|
||||
- name: Windows
|
||||
if: runner.os == 'Windows'
|
||||
run: |
|
||||
choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}
|
||||
./PCbuild/build.bat --experimental-jit ${{ matrix.debug && '-d' || '--pgo' }} -p ${{ matrix.architecture }}
|
||||
./PCbuild/rt.bat ${{ matrix.debug && '-d' }} -p ${{ matrix.architecture }} -q --exclude ${{ matrix.exclude }} --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
|
||||
# No PGO or tests (yet):
|
||||
- name: Emulated Windows
|
||||
if: runner.os == 'Windows' && matrix.architecture == 'ARM64'
|
||||
run: |
|
||||
choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}
|
||||
./PCbuild/build.bat --experimental-jit ${{ matrix.debug && '-d' || '' }} -p ${{ matrix.architecture }}
|
||||
./PCbuild/rt.bat ${{ matrix.debug && '-d' || '' }} -p ${{ matrix.architecture }} -q --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
|
||||
- name: Native macOS
|
||||
- name: macOS
|
||||
if: runner.os == 'macOS'
|
||||
run: |
|
||||
brew update
|
||||
brew install llvm@${{ matrix.llvm }}
|
||||
SDKROOT="$(xcrun --show-sdk-path)" \
|
||||
./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--enable-optimizations --with-lto' }}
|
||||
export SDKROOT="$(xcrun --show-sdk-path)"
|
||||
# Set MACOSX_DEPLOYMENT_TARGET and -Werror=unguarded-availability to
|
||||
# make sure we don't break downstream distributors (like uv):
|
||||
export CFLAGS_JIT='-Werror=unguarded-availability'
|
||||
export MACOSX_DEPLOYMENT_TARGET=10.15
|
||||
./configure --enable-experimental-jit --enable-universalsdk --with-universal-archs=universal2 ${{ matrix.debug && '--with-pydebug' || '' }}
|
||||
make all --jobs 4
|
||||
./python.exe -m test --exclude ${{ matrix.exclude }} --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
./python.exe -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
|
||||
- name: Native Linux
|
||||
if: runner.os == 'Linux' && matrix.architecture == 'x86_64'
|
||||
- name: Linux
|
||||
if: runner.os == 'Linux'
|
||||
run: |
|
||||
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }}
|
||||
export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH"
|
||||
./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--enable-optimizations --with-lto' }}
|
||||
./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '' }}
|
||||
make all --jobs 4
|
||||
./python -m test --exclude ${{ matrix.exclude }} --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
|
||||
- name: Emulated Linux
|
||||
if: runner.os == 'Linux' && matrix.architecture != 'x86_64'
|
||||
jit-with-disabled-gil:
|
||||
name: Free-Threaded (Debug)
|
||||
needs: interpreter
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
llvm:
|
||||
- 19
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Build with JIT enabled and GIL disabled
|
||||
run: |
|
||||
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }}
|
||||
export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH"
|
||||
./configure --prefix="$(pwd)/../build"
|
||||
make install --jobs 4
|
||||
make clean --jobs 4
|
||||
export HOST=${{ matrix.architecture }}-linux-gnu
|
||||
sudo apt install --yes "gcc-$HOST" qemu-user
|
||||
${{ !matrix.debug && matrix.compiler == 'clang' && './configure --enable-optimizations' || '' }}
|
||||
${{ !matrix.debug && matrix.compiler == 'clang' && 'make profile-run-stamp --jobs 4' || '' }}
|
||||
export QEMU_LD_PREFIX="/usr/$HOST"
|
||||
CC="${{ matrix.compiler == 'clang' && 'clang --target=$HOST' || '$HOST-gcc' }}" \
|
||||
CPP="$CC --preprocess" \
|
||||
HOSTRUNNER=qemu-${{ matrix.architecture }} \
|
||||
./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--enable-optimizations --with-lto' }} --build=x86_64-linux-gnu --host="$HOST" --with-build-python=../build/bin/python3 --with-pkg-config=no ac_cv_buggy_getaddrinfo=no ac_cv_file__dev_ptc=no ac_cv_file__dev_ptmx=yes
|
||||
./configure --enable-experimental-jit --with-pydebug --disable-gil
|
||||
make all --jobs 4
|
||||
./python -m test --exclude ${{ matrix.exclude }} --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
- name: Run tests
|
||||
run: |
|
||||
./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
continue-on-error: true
|
||||
|
||||
no-opt-jit:
|
||||
name: JIT without optimizations (Debug)
|
||||
needs: interpreter
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
llvm:
|
||||
- 19
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Build with JIT
|
||||
run: |
|
||||
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }}
|
||||
export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH"
|
||||
./configure --enable-experimental-jit --with-pydebug
|
||||
make all --jobs 4
|
||||
- name: Run tests without optimizations
|
||||
run: |
|
||||
PYTHON_UOPS_OPTIMIZE=0 ./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
|
|
|
|||
2
.github/workflows/lint.yml
vendored
2
.github/workflows/lint.yml
vendored
|
|
@ -20,6 +20,8 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
|
|
|||
33
.github/workflows/mypy.yml
vendored
33
.github/workflows/mypy.yml
vendored
|
|
@ -8,14 +8,28 @@ on:
|
|||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/mypy.yml"
|
||||
- "Lib/_colorize.py"
|
||||
- "Lib/_pyrepl/**"
|
||||
- "Lib/test/libregrtest/**"
|
||||
- "Lib/tomllib/**"
|
||||
- "Misc/mypy/**"
|
||||
- "Tools/build/check_extension_modules.py"
|
||||
- "Tools/build/check_warnings.py"
|
||||
- "Tools/build/compute-changes.py"
|
||||
- "Tools/build/consts_getter.py"
|
||||
- "Tools/build/deepfreeze.py"
|
||||
- "Tools/build/generate-build-details.py"
|
||||
- "Tools/build/generate_sbom.py"
|
||||
- "Tools/build/generate_stdlib_module_names.py"
|
||||
- "Tools/build/mypy.ini"
|
||||
- "Tools/build/umarshal.py"
|
||||
- "Tools/build/update_file.py"
|
||||
- "Tools/build/verify_ensurepip_wheels.py"
|
||||
- "Tools/cases_generator/**"
|
||||
- "Tools/clinic/**"
|
||||
- "Tools/jit/**"
|
||||
- "Tools/peg_generator/**"
|
||||
- "Tools/requirements-dev.txt"
|
||||
- "Tools/wasm/**"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
|
|
@ -32,26 +46,31 @@ concurrency:
|
|||
|
||||
jobs:
|
||||
mypy:
|
||||
name: Run mypy on ${{ matrix.target }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target: [
|
||||
"Lib/_pyrepl",
|
||||
"Lib/test/libregrtest",
|
||||
"Tools/build/",
|
||||
"Lib/tomllib",
|
||||
"Tools/build",
|
||||
"Tools/cases_generator",
|
||||
"Tools/clinic",
|
||||
"Tools/jit",
|
||||
"Tools/peg_generator",
|
||||
"Tools/wasm",
|
||||
]
|
||||
name: Run mypy on ${{ matrix.target }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.13"
|
||||
cache: pip
|
||||
cache-dependency-path: Tools/requirements-dev.txt
|
||||
- run: pip install -r Tools/requirements-dev.txt
|
||||
- run: python3 Misc/mypy/make_symlinks.py --symlink
|
||||
- run: mypy --config-file ${{ matrix.target }}/mypy.ini
|
||||
|
|
|
|||
11
.github/workflows/posix-deps-apt.sh
vendored
11
.github/workflows/posix-deps-apt.sh
vendored
|
|
@ -1,12 +1,11 @@
|
|||
#!/bin/sh
|
||||
apt-get update
|
||||
|
||||
# autoconf-archive is needed by autoreconf (check_generated_files job)
|
||||
apt-get -yq install \
|
||||
build-essential \
|
||||
pkg-config \
|
||||
autoconf-archive \
|
||||
ccache \
|
||||
cmake \
|
||||
gdb \
|
||||
lcov \
|
||||
libb2-dev \
|
||||
|
|
@ -19,6 +18,7 @@ apt-get -yq install \
|
|||
libreadline6-dev \
|
||||
libsqlite3-dev \
|
||||
libssl-dev \
|
||||
libzstd-dev \
|
||||
lzma \
|
||||
lzma-dev \
|
||||
strace \
|
||||
|
|
@ -26,3 +26,10 @@ apt-get -yq install \
|
|||
uuid-dev \
|
||||
xvfb \
|
||||
zlib1g-dev
|
||||
|
||||
# Workaround missing libmpdec-dev on ubuntu 24.04:
|
||||
# https://launchpad.net/~ondrej/+archive/ubuntu/php
|
||||
# https://deb.sury.org/
|
||||
sudo add-apt-repository ppa:ondrej/php
|
||||
apt-get update
|
||||
apt-get -yq install libmpdec-dev
|
||||
|
|
|
|||
30
.github/workflows/project-updater.yml
vendored
30
.github/workflows/project-updater.yml
vendored
|
|
@ -1,30 +0,0 @@
|
|||
name: Update GH projects
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
- labeled
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
add-to-project:
|
||||
name: Add issues to projects
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# if an issue has any of these labels, it will be added
|
||||
# to the corresponding project
|
||||
- { project: 2, label: "release-blocker, deferred-blocker" }
|
||||
- { project: 32, label: sprint }
|
||||
|
||||
steps:
|
||||
- uses: actions/add-to-project@v1.0.0
|
||||
with:
|
||||
project-url: https://github.com/orgs/python/projects/${{ matrix.project }}
|
||||
github-token: ${{ secrets.ADD_TO_PROJECT_PAT }}
|
||||
labeled: ${{ matrix.label }}
|
||||
54
.github/workflows/require-pr-label.yml
vendored
54
.github/workflows/require-pr-label.yml
vendored
|
|
@ -4,20 +4,58 @@ on:
|
|||
pull_request:
|
||||
types: [opened, reopened, labeled, unlabeled, synchronize]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
label:
|
||||
name: DO-NOT-MERGE / unresolved review
|
||||
label-dnm:
|
||||
name: DO-NOT-MERGE
|
||||
if: github.repository_owner == 'python'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: read
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- uses: mheap/github-action-required-labels@v5
|
||||
- name: Check there's no DO-NOT-MERGE
|
||||
uses: mheap/github-action-required-labels@v5
|
||||
with:
|
||||
mode: exactly
|
||||
count: 0
|
||||
labels: "DO-NOT-MERGE, awaiting changes, awaiting change review"
|
||||
labels: |
|
||||
DO-NOT-MERGE
|
||||
|
||||
label-reviews:
|
||||
name: Unresolved review
|
||||
if: github.repository_owner == 'python'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: read
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
# Check that the PR is not awaiting changes from the author due to previous review.
|
||||
- name: Check there's no required changes
|
||||
uses: mheap/github-action-required-labels@v5
|
||||
with:
|
||||
mode: exactly
|
||||
count: 0
|
||||
labels: |
|
||||
awaiting changes
|
||||
awaiting change review
|
||||
- id: is-feature
|
||||
name: Check whether this PR is a feature (contains a "type-feature" label)
|
||||
uses: mheap/github-action-required-labels@v5
|
||||
with:
|
||||
mode: exactly
|
||||
count: 1
|
||||
labels: |
|
||||
type-feature
|
||||
exit_type: success # don't fail the check if the PR is not a feature, just record the result
|
||||
# In case of a feature PR, check for a complete review (contains an "awaiting merge" label).
|
||||
- id: awaiting-merge
|
||||
if: steps.is-feature.outputs.status == 'success'
|
||||
name: Check for complete review
|
||||
uses: mheap/github-action-required-labels@v5
|
||||
with:
|
||||
mode: exactly
|
||||
count: 1
|
||||
labels: |
|
||||
awaiting merge
|
||||
|
|
|
|||
107
.github/workflows/reusable-context.yml
vendored
Normal file
107
.github/workflows/reusable-context.yml
vendored
Normal file
|
|
@ -0,0 +1,107 @@
|
|||
name: Reusable build context
|
||||
|
||||
on: # yamllint disable-line rule:truthy
|
||||
workflow_call:
|
||||
outputs:
|
||||
# Every referenced step MUST always set its output variable,
|
||||
# either via ``Tools/build/compute-changes.py`` or in this workflow file.
|
||||
# Boolean outputs (generally prefixed ``run-``) can then later be used
|
||||
# safely through the following idiom in job conditionals and other
|
||||
# expressions. Here's some examples:
|
||||
#
|
||||
# if: fromJSON(needs.build-context.outputs.run-tests)
|
||||
#
|
||||
# ${{
|
||||
# fromJSON(needs.build-context.outputs.run-tests)
|
||||
# && 'truthy-branch'
|
||||
# || 'falsy-branch'
|
||||
# }}
|
||||
#
|
||||
config-hash:
|
||||
description: Config hash value for use in cache keys
|
||||
value: ${{ jobs.compute-changes.outputs.config-hash }} # str
|
||||
run-docs:
|
||||
description: Whether to build the docs
|
||||
value: ${{ jobs.compute-changes.outputs.run-docs }} # bool
|
||||
run-tests:
|
||||
description: Whether to run the regular tests
|
||||
value: ${{ jobs.compute-changes.outputs.run-tests }} # bool
|
||||
run-windows-tests:
|
||||
description: Whether to run the Windows tests
|
||||
value: ${{ jobs.compute-changes.outputs.run-windows-tests }} # bool
|
||||
run-windows-msi:
|
||||
description: Whether to run the MSI installer smoke tests
|
||||
value: ${{ jobs.compute-changes.outputs.run-windows-msi }} # bool
|
||||
run-ci-fuzz:
|
||||
description: Whether to run the CIFuzz job
|
||||
value: ${{ jobs.compute-changes.outputs.run-ci-fuzz }} # bool
|
||||
|
||||
jobs:
|
||||
compute-changes:
|
||||
name: Create context from changed files
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
outputs:
|
||||
config-hash: ${{ steps.config-hash.outputs.hash }}
|
||||
run-ci-fuzz: ${{ steps.changes.outputs.run-ci-fuzz }}
|
||||
run-docs: ${{ steps.changes.outputs.run-docs }}
|
||||
run-tests: ${{ steps.changes.outputs.run-tests }}
|
||||
run-windows-msi: ${{ steps.changes.outputs.run-windows-msi }}
|
||||
run-windows-tests: ${{ steps.changes.outputs.run-windows-tests }}
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3"
|
||||
|
||||
- run: >-
|
||||
echo '${{ github.event_name }}'
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: >-
|
||||
${{
|
||||
github.event_name == 'pull_request'
|
||||
&& github.event.pull_request.head.sha
|
||||
|| ''
|
||||
}}
|
||||
|
||||
# Adapted from https://github.com/actions/checkout/issues/520#issuecomment-1167205721
|
||||
- name: Fetch commits to get branch diff
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
# Fetch enough history to find a common ancestor commit (aka merge-base):
|
||||
git fetch origin "${refspec_pr}" --depth=$(( commits + 1 )) \
|
||||
--no-tags --prune --no-recurse-submodules
|
||||
|
||||
# This should get the oldest commit in the local fetched history (which may not be the commit the PR branched from):
|
||||
COMMON_ANCESTOR=$( git rev-list --first-parent --max-parents=0 --max-count=1 "${branch_pr}" )
|
||||
DATE=$( git log --date=iso8601 --format=%cd "${COMMON_ANCESTOR}" )
|
||||
|
||||
# Get all commits since that commit date from the base branch (eg: main):
|
||||
git fetch origin "${refspec_base}" --shallow-since="${DATE}" \
|
||||
--no-tags --prune --no-recurse-submodules
|
||||
env:
|
||||
branch_pr: 'origin/${{ github.event.pull_request.head.ref }}'
|
||||
commits: ${{ github.event.pull_request.commits }}
|
||||
refspec_base: '+${{ github.event.pull_request.base.sha }}:remotes/origin/${{ github.event.pull_request.base.ref }}'
|
||||
refspec_pr: '+${{ github.event.pull_request.head.sha }}:remotes/origin/${{ github.event.pull_request.head.ref }}'
|
||||
|
||||
# We only want to run tests on PRs when related files are changed,
|
||||
# or when someone triggers a manual workflow run.
|
||||
- name: Compute changed files
|
||||
id: changes
|
||||
run: python Tools/build/compute-changes.py
|
||||
env:
|
||||
GITHUB_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
GITHUB_EVENT_NAME: ${{ github.event_name }}
|
||||
CCF_TARGET_REF: ${{ github.base_ref || github.event.repository.default_branch }}
|
||||
CCF_HEAD_REF: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
- name: Compute hash for config cache key
|
||||
id: config-hash
|
||||
run: |
|
||||
echo "hash=${{ hashFiles('configure', 'configure.ac', '.github/workflows/build.yml') }}" >> "$GITHUB_OUTPUT"
|
||||
83
.github/workflows/reusable-docs.yml
vendored
83
.github/workflows/reusable-docs.yml
vendored
|
|
@ -1,4 +1,4 @@
|
|||
name: Docs
|
||||
name: Reusable Docs
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
|
@ -11,34 +11,45 @@ concurrency:
|
|||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
build_doc:
|
||||
build-doc:
|
||||
name: 'Docs'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
branch_base: 'origin/${{ github.event.pull_request.base.ref }}'
|
||||
branch_pr: 'origin/${{ github.event.pull_request.head.ref }}'
|
||||
commits: ${{ github.event.pull_request.commits }}
|
||||
refspec_base: '+${{ github.event.pull_request.base.sha }}:remotes/origin/${{ github.event.pull_request.base.ref }}'
|
||||
refspec_pr: '+${{ github.event.pull_request.head.sha }}:remotes/origin/${{ github.event.pull_request.head.ref }}'
|
||||
steps:
|
||||
- name: 'Check out latest PR branch commit'
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
persist-credentials: false
|
||||
ref: >-
|
||||
${{
|
||||
github.event_name == 'pull_request'
|
||||
&& github.event.pull_request.head.sha
|
||||
|| ''
|
||||
}}
|
||||
# Adapted from https://github.com/actions/checkout/issues/520#issuecomment-1167205721
|
||||
- name: 'Fetch commits to get branch diff'
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
# Fetch enough history to find a common ancestor commit (aka merge-base):
|
||||
git fetch origin ${{ env.refspec_pr }} --depth=$(( ${{ github.event.pull_request.commits }} + 1 )) \
|
||||
git fetch origin "${refspec_pr}" --depth=$(( commits + 1 )) \
|
||||
--no-tags --prune --no-recurse-submodules
|
||||
|
||||
# This should get the oldest commit in the local fetched history (which may not be the commit the PR branched from):
|
||||
COMMON_ANCESTOR=$( git rev-list --first-parent --max-parents=0 --max-count=1 ${{ env.branch_pr }} )
|
||||
COMMON_ANCESTOR=$( git rev-list --first-parent --max-parents=0 --max-count=1 "${branch_pr}" )
|
||||
DATE=$( git log --date=iso8601 --format=%cd "${COMMON_ANCESTOR}" )
|
||||
|
||||
# Get all commits since that commit date from the base branch (eg: master or main):
|
||||
git fetch origin ${{ env.refspec_base }} --shallow-since="${DATE}" \
|
||||
git fetch origin "${refspec_base}" --shallow-since="${DATE}" \
|
||||
--no-tags --prune --no-recurse-submodules
|
||||
- name: 'Set up Python'
|
||||
uses: actions/setup-python@v5
|
||||
|
|
@ -54,41 +65,26 @@ jobs:
|
|||
continue-on-error: true
|
||||
run: |
|
||||
set -Eeuo pipefail
|
||||
# Build docs with the '-n' (nit-picky) option; write warnings to file
|
||||
make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n -W --keep-going -w sphinx-warnings.txt" html
|
||||
# Build docs with the nit-picky option; write warnings to file
|
||||
make -C Doc/ PYTHON=../python SPHINXOPTS="--quiet --nitpicky --warning-file sphinx-warnings.txt" html
|
||||
- name: 'Check warnings'
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
python Doc/tools/check-warnings.py \
|
||||
--annotate-diff '${{ env.branch_base }}' '${{ env.branch_pr }}' \
|
||||
--annotate-diff "${branch_base}" "${branch_pr}" \
|
||||
--fail-if-regression \
|
||||
--fail-if-improved
|
||||
|
||||
# This build doesn't use problem matchers or check annotations
|
||||
build_doc_oldest_supported_sphinx:
|
||||
name: 'Docs (Oldest Sphinx)'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: 'Set up Python'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11' # known to work with Sphinx 4.2
|
||||
cache: 'pip'
|
||||
cache-dependency-path: 'Doc/requirements-oldest-sphinx.txt'
|
||||
- name: 'Install build dependencies'
|
||||
run: make -C Doc/ venv REQUIREMENTS="requirements-oldest-sphinx.txt"
|
||||
- name: 'Build HTML documentation'
|
||||
run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html
|
||||
--fail-if-improved \
|
||||
--fail-if-new-news-nit
|
||||
|
||||
# Run "doctest" on HEAD as new syntax doesn't exist in the latest stable release
|
||||
doctest:
|
||||
name: 'Doctest'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
|
|
@ -105,4 +101,31 @@ jobs:
|
|||
run: make -C Doc/ PYTHON=../python venv
|
||||
# Use "xvfb-run" since some doctest tests open GUI windows
|
||||
- name: 'Run documentation doctest'
|
||||
run: xvfb-run make -C Doc/ PYTHON=../python SPHINXERRORHANDLING="-W --keep-going" doctest
|
||||
run: xvfb-run make -C Doc/ PYTHON=../python SPHINXERRORHANDLING="--fail-on-warning" doctest
|
||||
|
||||
check-epub:
|
||||
name: 'Check EPUB'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: 'Set up Python'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: 'Doc/requirements.txt'
|
||||
- name: 'Install build dependencies'
|
||||
run: |
|
||||
make -C Doc/ venv
|
||||
python -m pip install epubcheck
|
||||
- name: 'Build EPUB documentation'
|
||||
run: make -C Doc/ PYTHON=../python epub
|
||||
- name: 'Run epubcheck'
|
||||
continue-on-error: true
|
||||
run: epubcheck Doc/build/epub/Python.epub &> Doc/epubcheck.txt
|
||||
- run: cat Doc/epubcheck.txt
|
||||
- name: 'Check for fatal errors in EPUB'
|
||||
run: python Doc/tools/check-epub.py
|
||||
|
|
|
|||
53
.github/workflows/reusable-macos.yml
vendored
53
.github/workflows/reusable-macos.yml
vendored
|
|
@ -1,3 +1,5 @@
|
|||
name: Reusable macOS
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
|
|
@ -8,13 +10,18 @@ on:
|
|||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
os-matrix:
|
||||
required: false
|
||||
os:
|
||||
description: OS to run the job
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
build_macos:
|
||||
name: 'build and test'
|
||||
build-macos:
|
||||
name: build and test (${{ inputs.os }})
|
||||
runs-on: ${{ inputs.os }}
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
HOMEBREW_NO_ANALYTICS: 1
|
||||
|
|
@ -22,35 +29,53 @@ jobs:
|
|||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
PYTHONSTRICTEXTENSIONBUILD: 1
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: ${{fromJson(inputs.os-matrix)}}
|
||||
runs-on: ${{ matrix.os }}
|
||||
TERM: linux
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Runner image version
|
||||
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
|
||||
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
|
||||
- name: Restore config.cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: config.cache
|
||||
key: ${{ github.job }}-${{ matrix.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}
|
||||
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ inputs.config_hash }}
|
||||
- name: Install Homebrew dependencies
|
||||
run: brew install pkg-config openssl@3.0 xz gdbm tcl-tk
|
||||
run: |
|
||||
brew install pkg-config openssl@3.0 xz gdbm tcl-tk@9 make
|
||||
# Because alternate versions are not symlinked into place by default:
|
||||
brew link --overwrite tcl-tk@9
|
||||
- name: Configure CPython
|
||||
run: |
|
||||
MACOSX_DEPLOYMENT_TARGET=10.15 \
|
||||
GDBM_CFLAGS="-I$(brew --prefix gdbm)/include" \
|
||||
GDBM_LIBS="-L$(brew --prefix gdbm)/lib -lgdbm" \
|
||||
./configure \
|
||||
--config-cache \
|
||||
--with-pydebug \
|
||||
--enable-slower-safety \
|
||||
--enable-safety \
|
||||
${{ inputs.free-threading && '--disable-gil' || '' }} \
|
||||
--prefix=/opt/python-dev \
|
||||
--with-openssl="$(brew --prefix openssl@3.0)"
|
||||
- name: Build CPython
|
||||
run: make -j4
|
||||
if : ${{ inputs.free-threading || inputs.os != 'macos-15-intel' }}
|
||||
run: gmake -j8
|
||||
- name: Build CPython for compiler warning check
|
||||
if : ${{ !inputs.free-threading && inputs.os == 'macos-15-intel' }}
|
||||
run: set -o pipefail; gmake -j8 --output-sync 2>&1 | tee compiler_output_macos.txt
|
||||
- name: Display build info
|
||||
run: make pythoninfo
|
||||
- name: Check compiler warnings
|
||||
if : ${{ !inputs.free-threading && inputs.os == 'macos-15-intel' }}
|
||||
run: >-
|
||||
python3 Tools/build/check_warnings.py
|
||||
--compiler-output-file-path=compiler_output_macos.txt
|
||||
--warning-ignore-file-path=Tools/build/.warningignore_macos
|
||||
--compiler-output-type=clang
|
||||
--fail-on-regression
|
||||
--fail-on-improvement
|
||||
--path-prefix="./"
|
||||
- name: Tests
|
||||
run: make test
|
||||
run: make ci
|
||||
|
|
|
|||
124
.github/workflows/reusable-san.yml
vendored
Normal file
124
.github/workflows/reusable-san.yml
vendored
Normal file
|
|
@ -0,0 +1,124 @@
|
|||
name: Reusable Sanitizer
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
sanitizer:
|
||||
required: true
|
||||
type: string
|
||||
config_hash:
|
||||
required: true
|
||||
type: string
|
||||
free-threading:
|
||||
description: Whether to use free-threaded mode
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
build-san-reusable:
|
||||
name: >-
|
||||
${{ inputs.sanitizer }}${{
|
||||
inputs.free-threading
|
||||
&& ' (free-threading)'
|
||||
|| ''
|
||||
}}
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Runner image version
|
||||
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
|
||||
- name: Restore config.cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: config.cache
|
||||
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ inputs.sanitizer }}-${{ inputs.config_hash }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo ./.github/workflows/posix-deps-apt.sh
|
||||
# Install clang
|
||||
wget https://apt.llvm.org/llvm.sh
|
||||
chmod +x llvm.sh
|
||||
|
||||
if [ "${SANITIZER}" = "TSan" ]; then
|
||||
sudo ./llvm.sh 17 # gh-121946: llvm-18 package is temporarily broken
|
||||
sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-17 100
|
||||
sudo update-alternatives --set clang /usr/bin/clang-17
|
||||
sudo update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-17 100
|
||||
sudo update-alternatives --set clang++ /usr/bin/clang++-17
|
||||
# Reduce ASLR to avoid TSan crashing
|
||||
sudo sysctl -w vm.mmap_rnd_bits=28
|
||||
else
|
||||
sudo ./llvm.sh 20
|
||||
fi
|
||||
|
||||
- name: Sanitizer option setup
|
||||
run: |
|
||||
if [ "${SANITIZER}" = "TSan" ]; then
|
||||
echo "TSAN_OPTIONS=${SAN_LOG_OPTION} suppressions=${GITHUB_WORKSPACE}/Tools/tsan/suppressions${{
|
||||
fromJSON(inputs.free-threading)
|
||||
&& '_free_threading'
|
||||
|| ''
|
||||
}}.txt handle_segv=0" >> "$GITHUB_ENV"
|
||||
else
|
||||
echo "UBSAN_OPTIONS=${SAN_LOG_OPTION}" >> "$GITHUB_ENV"
|
||||
fi
|
||||
echo "CC=clang" >> "$GITHUB_ENV"
|
||||
echo "CXX=clang++" >> "$GITHUB_ENV"
|
||||
env:
|
||||
SANITIZER: ${{ inputs.sanitizer }}
|
||||
SAN_LOG_OPTION: log_path=${{ github.workspace }}/san_log
|
||||
- name: Add ccache to PATH
|
||||
run: |
|
||||
echo "PATH=/usr/lib/ccache:$PATH" >> "$GITHUB_ENV"
|
||||
- name: Configure ccache action
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
save: ${{ github.event_name == 'push' }}
|
||||
max-size: "200M"
|
||||
- name: Configure CPython
|
||||
run: >-
|
||||
./configure
|
||||
--config-cache
|
||||
${{
|
||||
inputs.sanitizer == 'TSan'
|
||||
&& '--with-thread-sanitizer'
|
||||
|| '--with-undefined-behavior-sanitizer'
|
||||
}}
|
||||
--with-pydebug
|
||||
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
|
||||
- name: Build CPython
|
||||
run: make -j4
|
||||
- name: Display build info
|
||||
run: make pythoninfo
|
||||
- name: Tests
|
||||
run: >-
|
||||
./python -m test
|
||||
${{ inputs.sanitizer == 'TSan' && '--tsan' || '' }}
|
||||
-j4
|
||||
- name: Parallel tests
|
||||
if: >-
|
||||
inputs.sanitizer == 'TSan'
|
||||
&& fromJSON(inputs.free-threading)
|
||||
run: ./python -m test --tsan-parallel --parallel-threads=4 -j4
|
||||
- name: Display logs
|
||||
if: always()
|
||||
run: find "${GITHUB_WORKSPACE}" -name 'san_log.*' | xargs head -n 1000
|
||||
- name: Archive logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: >-
|
||||
${{ inputs.sanitizer }}-logs-${{
|
||||
fromJSON(inputs.free-threading)
|
||||
&& 'free-threading'
|
||||
|| 'default'
|
||||
}}
|
||||
path: san_log.*
|
||||
if-no-files-found: ignore
|
||||
55
.github/workflows/reusable-tsan.yml
vendored
55
.github/workflows/reusable-tsan.yml
vendored
|
|
@ -1,55 +0,0 @@
|
|||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
config_hash:
|
||||
required: true
|
||||
type: string
|
||||
options:
|
||||
required: true
|
||||
type: string
|
||||
suppressions_path:
|
||||
description: 'A repo relative path to the suppressions file'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
build_tsan_reusable:
|
||||
name: 'Thread sanitizer'
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Runner image version
|
||||
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
|
||||
- name: Restore config.cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: config.cache
|
||||
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
sudo ./.github/workflows/posix-deps-apt.sh
|
||||
sudo apt install -y clang
|
||||
# Reduce ASLR to avoid TSAN crashing
|
||||
sudo sysctl -w vm.mmap_rnd_bits=28
|
||||
- name: TSAN Option Setup
|
||||
run: |
|
||||
echo "TSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/${{ inputs.suppressions_path }}" >> $GITHUB_ENV
|
||||
echo "CC=clang" >> $GITHUB_ENV
|
||||
echo "CXX=clang++" >> $GITHUB_ENV
|
||||
- name: Add ccache to PATH
|
||||
run: |
|
||||
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
|
||||
- name: Configure ccache action
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
save: ${{ github.event_name == 'push' }}
|
||||
max-size: "200M"
|
||||
- name: Configure CPython
|
||||
run: ${{ inputs.options }}
|
||||
- name: Build CPython
|
||||
run: make -j4
|
||||
- name: Display build info
|
||||
run: make pythoninfo
|
||||
- name: Tests
|
||||
run: ./python -m test --tsan -j4
|
||||
97
.github/workflows/reusable-ubuntu.yml
vendored
97
.github/workflows/reusable-ubuntu.yml
vendored
|
|
@ -1,44 +1,69 @@
|
|||
name: Reusable Ubuntu
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
config_hash:
|
||||
required: true
|
||||
type: string
|
||||
options:
|
||||
required: true
|
||||
type: string
|
||||
bolt-optimizations:
|
||||
description: Whether to enable BOLT optimizations
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
free-threading:
|
||||
description: Whether to use free-threaded mode
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
os:
|
||||
description: OS to run the job
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
build_ubuntu_reusable:
|
||||
name: 'build and test'
|
||||
build-ubuntu-reusable:
|
||||
name: build and test (${{ inputs.os }})
|
||||
runs-on: ${{ inputs.os }}
|
||||
timeout-minutes: 60
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
OPENSSL_VER: 3.0.13
|
||||
OPENSSL_VER: 3.0.18
|
||||
PYTHONSTRICTEXTENSIONBUILD: 1
|
||||
TERM: linux
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Register gcc problem matcher
|
||||
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
|
||||
- name: Install dependencies
|
||||
run: sudo ./.github/workflows/posix-deps-apt.sh
|
||||
- name: Install Clang and BOLT
|
||||
if: ${{ fromJSON(inputs.bolt-optimizations) }}
|
||||
run: |
|
||||
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh 19
|
||||
sudo apt-get install bolt-19
|
||||
echo PATH="$(llvm-config-19 --bindir):$PATH" >> $GITHUB_ENV
|
||||
- name: Configure OpenSSL env vars
|
||||
run: |
|
||||
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> $GITHUB_ENV
|
||||
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
|
||||
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> "$GITHUB_ENV"
|
||||
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> "$GITHUB_ENV"
|
||||
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> "$GITHUB_ENV"
|
||||
- name: 'Restore OpenSSL build'
|
||||
id: cache-openssl
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ./multissl/openssl/${{ env.OPENSSL_VER }}
|
||||
key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
|
||||
key: ${{ inputs.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
|
||||
- name: Install OpenSSL
|
||||
if: steps.cache-openssl.outputs.cache-hit != 'true'
|
||||
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux
|
||||
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory "$MULTISSL_DIR" --openssl "$OPENSSL_VER" --system Linux
|
||||
- name: Add ccache to PATH
|
||||
run: |
|
||||
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
|
||||
echo "PATH=/usr/lib/ccache:$PATH" >> "$GITHUB_ENV"
|
||||
- name: Configure ccache action
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
|
|
@ -46,31 +71,57 @@ jobs:
|
|||
max-size: "200M"
|
||||
- name: Setup directory envs for out-of-tree builds
|
||||
run: |
|
||||
echo "CPYTHON_RO_SRCDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-ro-srcdir)" >> $GITHUB_ENV
|
||||
echo "CPYTHON_BUILDDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-builddir)" >> $GITHUB_ENV
|
||||
echo "CPYTHON_RO_SRCDIR=$(realpath -m "${GITHUB_WORKSPACE}"/../cpython-ro-srcdir)" >> "$GITHUB_ENV"
|
||||
echo "CPYTHON_BUILDDIR=$(realpath -m "${GITHUB_WORKSPACE}"/../cpython-builddir)" >> "$GITHUB_ENV"
|
||||
- name: Create directories for read-only out-of-tree builds
|
||||
run: mkdir -p $CPYTHON_RO_SRCDIR $CPYTHON_BUILDDIR
|
||||
run: mkdir -p "$CPYTHON_RO_SRCDIR" "$CPYTHON_BUILDDIR"
|
||||
- name: Bind mount sources read-only
|
||||
run: sudo mount --bind -o ro $GITHUB_WORKSPACE $CPYTHON_RO_SRCDIR
|
||||
run: sudo mount --bind -o ro "$GITHUB_WORKSPACE" "$CPYTHON_RO_SRCDIR"
|
||||
- name: Runner image version
|
||||
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
|
||||
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
|
||||
- name: Restore config.cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ env.CPYTHON_BUILDDIR }}/config.cache
|
||||
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}
|
||||
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ inputs.config_hash }}
|
||||
- name: Configure CPython out-of-tree
|
||||
working-directory: ${{ env.CPYTHON_BUILDDIR }}
|
||||
run: ${{ inputs.options }}
|
||||
# `test_unpickle_module_race` writes to the source directory, which is
|
||||
# read-only during builds — so we exclude it from profiling with BOLT.
|
||||
run: >-
|
||||
PROFILE_TASK='-m test --pgo --ignore test_unpickle_module_race'
|
||||
../cpython-ro-srcdir/configure
|
||||
--config-cache
|
||||
--with-pydebug
|
||||
--enable-slower-safety
|
||||
--enable-safety
|
||||
--with-openssl="$OPENSSL_DIR"
|
||||
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
|
||||
${{ fromJSON(inputs.bolt-optimizations) && '--enable-bolt' || '' }}
|
||||
- name: Build CPython out-of-tree
|
||||
if: ${{ inputs.free-threading }}
|
||||
working-directory: ${{ env.CPYTHON_BUILDDIR }}
|
||||
run: make -j4
|
||||
run: make -j
|
||||
- name: Build CPython out-of-tree (for compiler warning check)
|
||||
if: ${{ !inputs.free-threading }}
|
||||
working-directory: ${{ env.CPYTHON_BUILDDIR }}
|
||||
run: set -o pipefail; make -j --output-sync 2>&1 | tee compiler_output_ubuntu.txt
|
||||
- name: Display build info
|
||||
working-directory: ${{ env.CPYTHON_BUILDDIR }}
|
||||
run: make pythoninfo
|
||||
- name: Check compiler warnings
|
||||
if: ${{ !inputs.free-threading }}
|
||||
run: >-
|
||||
python Tools/build/check_warnings.py
|
||||
--compiler-output-file-path="${CPYTHON_BUILDDIR}/compiler_output_ubuntu.txt"
|
||||
--warning-ignore-file-path "${GITHUB_WORKSPACE}/Tools/build/.warningignore_ubuntu"
|
||||
--compiler-output-type=gcc
|
||||
--fail-on-regression
|
||||
--fail-on-improvement
|
||||
--path-prefix="../cpython-ro-srcdir/"
|
||||
- name: Remount sources writable for tests
|
||||
# some tests write to srcdir, lack of pyc files slows down testing
|
||||
run: sudo mount $CPYTHON_RO_SRCDIR -oremount,rw
|
||||
run: sudo mount "$CPYTHON_RO_SRCDIR" -oremount,rw
|
||||
- name: Tests
|
||||
working-directory: ${{ env.CPYTHON_BUILDDIR }}
|
||||
run: xvfb-run make test
|
||||
run: xvfb-run make ci
|
||||
|
|
|
|||
47
.github/workflows/reusable-wasi.yml
vendored
47
.github/workflows/reusable-wasi.yml
vendored
|
|
@ -1,3 +1,5 @@
|
|||
name: Reusable WASI
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
|
|
@ -5,53 +7,62 @@ on:
|
|||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
build_wasi_reusable:
|
||||
build-wasi-reusable:
|
||||
name: 'build and test'
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 60
|
||||
runs-on: ubuntu-20.04
|
||||
env:
|
||||
WASMTIME_VERSION: 18.0.3
|
||||
WASI_SDK_VERSION: 21
|
||||
WASMTIME_VERSION: 22.0.0
|
||||
WASI_SDK_VERSION: 24
|
||||
WASI_SDK_PATH: /opt/wasi-sdk
|
||||
CROSS_BUILD_PYTHON: cross-build/build
|
||||
CROSS_BUILD_WASI: cross-build/wasm32-wasi
|
||||
CROSS_BUILD_WASI: cross-build/wasm32-wasip1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
# No problem resolver registered as one doesn't currently exist for Clang.
|
||||
- name: "Install wasmtime"
|
||||
uses: jcbhmr/setup-wasmtime@v2
|
||||
uses: bytecodealliance/actions/wasmtime/setup@v1
|
||||
with:
|
||||
wasmtime-version: ${{ env.WASMTIME_VERSION }}
|
||||
version: ${{ env.WASMTIME_VERSION }}
|
||||
- name: "Restore WASI SDK"
|
||||
id: cache-wasi-sdk
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ env.WASI_SDK_PATH }}
|
||||
key: ${{ runner.os }}-wasi-sdk-${{ env.WASI_SDK_VERSION }}
|
||||
- name: "Install WASI SDK"
|
||||
- name: "Install WASI SDK" # Hard-coded to x64.
|
||||
if: steps.cache-wasi-sdk.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
mkdir ${{ env.WASI_SDK_PATH }} && \
|
||||
curl -s -S --location https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${{ env.WASI_SDK_VERSION }}/wasi-sdk-${{ env.WASI_SDK_VERSION }}.0-linux.tar.gz | \
|
||||
tar --strip-components 1 --directory ${{ env.WASI_SDK_PATH }} --extract --gunzip
|
||||
mkdir "${WASI_SDK_PATH}" && \
|
||||
curl -s -S --location "https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_SDK_VERSION}/wasi-sdk-${WASI_SDK_VERSION}.0-x86_64-linux.tar.gz" | \
|
||||
tar --strip-components 1 --directory "${WASI_SDK_PATH}" --extract --gunzip
|
||||
- name: "Configure ccache action"
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
save: ${{ github.event_name == 'push' }}
|
||||
max-size: "200M"
|
||||
- name: "Add ccache to PATH"
|
||||
run: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
|
||||
run: echo "PATH=/usr/lib/ccache:$PATH" >> "$GITHUB_ENV"
|
||||
- name: "Install Python"
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: "Runner image version"
|
||||
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
|
||||
- name: "Restore Python build config.cache"
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ env.CROSS_BUILD_PYTHON }}/config.cache
|
||||
# Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python
|
||||
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}-${{ env.pythonLocation }}
|
||||
# Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python.
|
||||
# Include the hash of `Tools/wasm/wasi.py` as it may change the environment variables.
|
||||
# (Make sure to keep the key in sync with the other config.cache step below.)
|
||||
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ env.WASI_SDK_VERSION }}-${{ env.WASMTIME_VERSION }}-${{ inputs.config_hash }}-${{ hashFiles('Tools/wasm/wasi.py') }}-${{ env.pythonLocation }}
|
||||
- name: "Configure build Python"
|
||||
run: python3 Tools/wasm/wasi.py configure-build-python -- --config-cache --with-pydebug
|
||||
- name: "Make build Python"
|
||||
|
|
@ -60,14 +71,14 @@ jobs:
|
|||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ env.CROSS_BUILD_WASI }}/config.cache
|
||||
# Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python
|
||||
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-wasi-sdk-${{ env.WASI_SDK_VERSION }}-${{ inputs.config_hash }}-${{ env.pythonLocation }}
|
||||
# Should be kept in sync with the other config.cache step above.
|
||||
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ env.WASI_SDK_VERSION }}-${{ env.WASMTIME_VERSION }}-${{ inputs.config_hash }}-${{ hashFiles('Tools/wasm/wasi.py') }}-${{ env.pythonLocation }}
|
||||
- name: "Configure host"
|
||||
# `--with-pydebug` inferred from configure-build-python
|
||||
run: python3 Tools/wasm/wasi.py configure-host -- --config-cache
|
||||
- name: "Make host"
|
||||
run: python3 Tools/wasm/wasi.py make-host
|
||||
- name: "Display build info"
|
||||
run: make --directory ${{ env.CROSS_BUILD_WASI }} pythoninfo
|
||||
run: make --directory "${CROSS_BUILD_WASI}" pythoninfo
|
||||
- name: "Test"
|
||||
run: make --directory ${{ env.CROSS_BUILD_WASI }} test
|
||||
run: make --directory "${CROSS_BUILD_WASI}" test
|
||||
|
|
|
|||
31
.github/workflows/reusable-windows-msi.yml
vendored
Normal file
31
.github/workflows/reusable-windows-msi.yml
vendored
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
name: Reusable Windows MSI
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
arch:
|
||||
description: CPU architecture
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: installer for ${{ inputs.arch }}
|
||||
runs-on: ${{ inputs.arch == 'arm64' && 'windows-11-arm' || 'windows-2022' }}
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
ARCH: ${{ inputs.arch }}
|
||||
IncludeFreethreaded: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Build CPython installer
|
||||
run: ./Tools/msi/build.bat --doc -"${ARCH}"
|
||||
shell: bash
|
||||
67
.github/workflows/reusable-windows.yml
vendored
67
.github/workflows/reusable-windows.yml
vendored
|
|
@ -1,53 +1,50 @@
|
|||
name: Reusable Windows
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
arch:
|
||||
description: CPU architecture
|
||||
required: true
|
||||
type: string
|
||||
free-threading:
|
||||
description: Whether to compile CPython in free-threading mode
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
IncludeUwp: >-
|
||||
true
|
||||
|
||||
jobs:
|
||||
build_win32:
|
||||
name: 'build and test (x86)'
|
||||
runs-on: windows-latest
|
||||
build:
|
||||
name: Build and test (${{ inputs.arch }})
|
||||
runs-on: ${{ inputs.arch == 'arm64' && 'windows-11-arm' || 'windows-2022' }}
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
IncludeUwp: 'true'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build CPython
|
||||
run: .\PCbuild\build.bat -e -d -v -p Win32 ${{ inputs.free-threading && '--disable-gil' || '' }}
|
||||
- name: Display build info
|
||||
run: .\python.bat -m test.pythoninfo
|
||||
- name: Tests
|
||||
run: .\PCbuild\rt.bat -p Win32 -d -q --fast-ci ${{ inputs.free-threading && '--disable-gil' || '' }}
|
||||
|
||||
build_win_amd64:
|
||||
name: 'build and test (x64)'
|
||||
runs-on: windows-latest
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
IncludeUwp: 'true'
|
||||
ARCH: ${{ inputs.arch }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Register MSVC problem matcher
|
||||
if: inputs.arch != 'Win32'
|
||||
run: echo "::add-matcher::.github/problem-matchers/msvc.json"
|
||||
- name: Build CPython
|
||||
run: .\PCbuild\build.bat -e -d -v -p x64 ${{ inputs.free-threading && '--disable-gil' || '' }}
|
||||
run: >-
|
||||
.\\PCbuild\\build.bat
|
||||
-e -d -v
|
||||
-p "${ARCH}"
|
||||
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
|
||||
shell: bash
|
||||
- name: Display build info
|
||||
run: .\python.bat -m test.pythoninfo
|
||||
run: .\\python.bat -m test.pythoninfo
|
||||
- name: Tests
|
||||
run: .\PCbuild\rt.bat -p x64 -d -q --fast-ci ${{ inputs.free-threading && '--disable-gil' || '' }}
|
||||
|
||||
build_win_arm64:
|
||||
name: 'build (arm64)'
|
||||
runs-on: windows-latest
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
IncludeUwp: 'true'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Register MSVC problem matcher
|
||||
run: echo "::add-matcher::.github/problem-matchers/msvc.json"
|
||||
- name: Build CPython
|
||||
run: .\PCbuild\build.bat -e -d -v -p arm64 ${{ inputs.free-threading && '--disable-gil' || '' }}
|
||||
run: >-
|
||||
.\\PCbuild\\rt.bat
|
||||
-p "${ARCH}"
|
||||
-d -q --fast-ci
|
||||
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
|
||||
shell: bash
|
||||
|
|
|
|||
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
|
|
@ -4,14 +4,12 @@ on:
|
|||
schedule:
|
||||
- cron: "0 */6 * * *"
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
if: github.repository_owner == 'python'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
|
|
|
|||
132
.github/workflows/tail-call.yml
vendored
Normal file
132
.github/workflows/tail-call.yml
vendored
Normal file
|
|
@ -0,0 +1,132 @@
|
|||
name: Tail calling interpreter
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/tail-call.yml'
|
||||
- 'Python/bytecodes.c'
|
||||
- 'Python/ceval.c'
|
||||
- 'Python/ceval_macros.h'
|
||||
- 'Python/generated_cases.c.h'
|
||||
push:
|
||||
paths:
|
||||
- '.github/workflows/tail-call.yml'
|
||||
- 'Python/bytecodes.c'
|
||||
- 'Python/ceval.c'
|
||||
- 'Python/ceval_macros.h'
|
||||
- 'Python/generated_cases.c.h'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
tail-call:
|
||||
name: ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target:
|
||||
# Un-comment as we add support for more platforms for tail-calling interpreters.
|
||||
# - i686-pc-windows-msvc/msvc
|
||||
- x86_64-pc-windows-msvc/msvc
|
||||
# - aarch64-pc-windows-msvc/msvc
|
||||
- x86_64-apple-darwin/clang
|
||||
- aarch64-apple-darwin/clang
|
||||
- x86_64-unknown-linux-gnu/gcc
|
||||
- aarch64-unknown-linux-gnu/gcc
|
||||
- free-threading
|
||||
llvm:
|
||||
- 20
|
||||
include:
|
||||
# - target: i686-pc-windows-msvc/msvc
|
||||
# architecture: Win32
|
||||
# runner: windows-2022
|
||||
- target: x86_64-pc-windows-msvc/msvc
|
||||
architecture: x64
|
||||
runner: windows-2022
|
||||
# - target: aarch64-pc-windows-msvc/msvc
|
||||
# architecture: ARM64
|
||||
# runner: windows-2022
|
||||
- target: x86_64-apple-darwin/clang
|
||||
architecture: x86_64
|
||||
runner: macos-15-intel
|
||||
- target: aarch64-apple-darwin/clang
|
||||
architecture: aarch64
|
||||
runner: macos-14
|
||||
- target: x86_64-unknown-linux-gnu/gcc
|
||||
architecture: x86_64
|
||||
runner: ubuntu-24.04
|
||||
- target: aarch64-unknown-linux-gnu/gcc
|
||||
architecture: aarch64
|
||||
runner: ubuntu-24.04-arm
|
||||
- target: free-threading
|
||||
architecture: x86_64
|
||||
runner: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Native Windows (debug)
|
||||
if: runner.os == 'Windows' && matrix.architecture != 'ARM64'
|
||||
shell: cmd
|
||||
run: |
|
||||
choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}.1.0
|
||||
set PlatformToolset=clangcl
|
||||
set LLVMToolsVersion=${{ matrix.llvm }}.1.0
|
||||
set LLVMInstallDir=C:\Program Files\LLVM
|
||||
call ./PCbuild/build.bat --tail-call-interp -d -p ${{ matrix.architecture }}
|
||||
call ./PCbuild/rt.bat -d -p ${{ matrix.architecture }} -q --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
|
||||
# No tests (yet):
|
||||
- name: Emulated Windows (release)
|
||||
if: runner.os == 'Windows' && matrix.architecture == 'ARM64'
|
||||
shell: cmd
|
||||
run: |
|
||||
choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}.1.0
|
||||
set PlatformToolset=clangcl
|
||||
set LLVMToolsVersion=${{ matrix.llvm }}.1.0
|
||||
set LLVMInstallDir=C:\Program Files\LLVM
|
||||
./PCbuild/build.bat --tail-call-interp -p ${{ matrix.architecture }}
|
||||
|
||||
- name: Native macOS (release)
|
||||
if: runner.os == 'macOS'
|
||||
run: |
|
||||
brew update
|
||||
brew install llvm@${{ matrix.llvm }}
|
||||
export SDKROOT="$(xcrun --show-sdk-path)"
|
||||
export PATH="/usr/local/opt/llvm@${{ matrix.llvm }}/bin:$PATH"
|
||||
export PATH="/opt/homebrew/opt/llvm@${{ matrix.llvm }}/bin:$PATH"
|
||||
CC=clang-20 ./configure --with-tail-call-interp
|
||||
make all --jobs 4
|
||||
./python.exe -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
|
||||
- name: Native Linux (debug)
|
||||
if: runner.os == 'Linux' && matrix.target != 'free-threading'
|
||||
run: |
|
||||
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }}
|
||||
export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH"
|
||||
CC=clang-20 ./configure --with-tail-call-interp --with-pydebug
|
||||
make all --jobs 4
|
||||
./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
|
||||
- name: Native Linux with free-threading (release)
|
||||
if: matrix.target == 'free-threading'
|
||||
run: |
|
||||
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }}
|
||||
export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH"
|
||||
CC=clang-20 ./configure --with-tail-call-interp --disable-gil
|
||||
make all --jobs 4
|
||||
./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
|
||||
|
|
@ -26,6 +26,8 @@ jobs:
|
|||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3'
|
||||
|
|
|
|||
10
.github/zizmor.yml
vendored
Normal file
10
.github/zizmor.yml
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
|
||||
# https://woodruffw.github.io/zizmor/configuration/
|
||||
rules:
|
||||
dangerous-triggers:
|
||||
ignore:
|
||||
- documentation-links.yml
|
||||
unpinned-uses:
|
||||
config:
|
||||
policies:
|
||||
"*": ref-pin
|
||||
29
.gitignore
vendored
29
.gitignore
vendored
|
|
@ -38,6 +38,7 @@ tags
|
|||
TAGS
|
||||
.vs/
|
||||
.vscode/
|
||||
.cache/
|
||||
gmon.out
|
||||
.coverage
|
||||
.mypy_cache/
|
||||
|
|
@ -70,16 +71,15 @@ Lib/test/data/*
|
|||
/Makefile
|
||||
/Makefile.pre
|
||||
/iOSTestbed.*
|
||||
iOS/Frameworks/
|
||||
iOS/Resources/Info.plist
|
||||
iOS/testbed/build
|
||||
iOS/testbed/Python.xcframework/ios-*/bin
|
||||
iOS/testbed/Python.xcframework/ios-*/include
|
||||
iOS/testbed/Python.xcframework/ios-*/lib
|
||||
iOS/testbed/Python.xcframework/ios-*/Python.framework
|
||||
iOS/testbed/iOSTestbed.xcodeproj/project.xcworkspace
|
||||
iOS/testbed/iOSTestbed.xcodeproj/xcuserdata
|
||||
iOS/testbed/iOSTestbed.xcodeproj/xcshareddata
|
||||
Apple/iOS/Frameworks/
|
||||
Apple/iOS/Resources/Info.plist
|
||||
Apple/testbed/build
|
||||
Apple/testbed/Python.xcframework/*/bin
|
||||
Apple/testbed/Python.xcframework/*/include
|
||||
Apple/testbed/Python.xcframework/*/lib
|
||||
Apple/testbed/Python.xcframework/*/Python.framework
|
||||
Apple/testbed/*Testbed.xcodeproj/project.xcworkspace
|
||||
Apple/testbed/*Testbed.xcodeproj/xcuserdata
|
||||
Mac/Makefile
|
||||
Mac/PythonLauncher/Info.plist
|
||||
Mac/PythonLauncher/Makefile
|
||||
|
|
@ -130,6 +130,7 @@ Tools/unicode/data/
|
|||
/autom4te.cache
|
||||
/build/
|
||||
/builddir/
|
||||
/compile_commands.json
|
||||
/config.cache
|
||||
/config.log
|
||||
/config.status
|
||||
|
|
@ -137,11 +138,12 @@ Tools/unicode/data/
|
|||
# hendrikmuhs/ccache-action@v1
|
||||
/.ccache
|
||||
/cross-build/
|
||||
/jit_stencils.h
|
||||
/jit_stencils*.h
|
||||
/platform
|
||||
/profile-clean-stamp
|
||||
/profile-run-stamp
|
||||
/profile-bolt-stamp
|
||||
/profile-gen-stamp
|
||||
/pybuilddir.txt
|
||||
/pyconfig.h
|
||||
/python-config
|
||||
|
|
@ -169,5 +171,10 @@ Python/frozen_modules/MANIFEST
|
|||
/python
|
||||
!/Python/
|
||||
|
||||
# People's custom https://docs.anthropic.com/en/docs/claude-code/memory configs.
|
||||
/.claude/
|
||||
CLAUDE.local.md
|
||||
|
||||
#### main branch only stuff below this line, things to backport go above. ####
|
||||
# main branch only: ABI files are not checked/maintained.
|
||||
Doc/data/python*.abi
|
||||
|
|
|
|||
1
.mailmap
1
.mailmap
|
|
@ -1,3 +1,4 @@
|
|||
# This file sets the canonical name for contributors to the repository.
|
||||
# Documentation: https://git-scm.com/docs/gitmailmap
|
||||
Willow Chargin <wchargin@gmail.com>
|
||||
Amethyst Reese <amethyst@n7.gg> <john@noswap.com>
|
||||
|
|
|
|||
|
|
@ -1,18 +1,63 @@
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.3.4
|
||||
rev: v0.13.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
name: Run Ruff on Lib/test/
|
||||
- id: ruff-check
|
||||
name: Run Ruff (lint) on Doc/
|
||||
args: [--exit-non-zero-on-fix]
|
||||
files: ^Doc/
|
||||
- id: ruff-check
|
||||
name: Run Ruff (lint) on Lib/test/
|
||||
args: [--exit-non-zero-on-fix]
|
||||
files: ^Lib/test/
|
||||
- id: ruff
|
||||
name: Run Ruff on Argument Clinic
|
||||
- id: ruff-check
|
||||
name: Run Ruff (lint) on Tools/build/
|
||||
args: [--exit-non-zero-on-fix, --config=Tools/build/.ruff.toml]
|
||||
files: ^Tools/build/
|
||||
- id: ruff-check
|
||||
name: Run Ruff (lint) on Tools/i18n/
|
||||
args: [--exit-non-zero-on-fix, --config=Tools/i18n/.ruff.toml]
|
||||
files: ^Tools/i18n/
|
||||
- id: ruff-check
|
||||
name: Run Ruff (lint) on Argument Clinic
|
||||
args: [--exit-non-zero-on-fix, --config=Tools/clinic/.ruff.toml]
|
||||
files: ^Tools/clinic/|Lib/test/test_clinic.py
|
||||
- id: ruff-check
|
||||
name: Run Ruff (lint) on Tools/peg_generator/
|
||||
args: [--exit-non-zero-on-fix, --config=Tools/peg_generator/.ruff.toml]
|
||||
files: ^Tools/peg_generator/
|
||||
- id: ruff-check
|
||||
name: Run Ruff (lint) on Tools/wasm/
|
||||
args: [--exit-non-zero-on-fix, --config=Tools/wasm/.ruff.toml]
|
||||
files: ^Tools/wasm/
|
||||
- id: ruff-format
|
||||
name: Run Ruff (format) on Doc/
|
||||
args: [--check]
|
||||
files: ^Doc/
|
||||
- id: ruff-format
|
||||
name: Run Ruff (format) on Tools/build/check_warnings.py
|
||||
args: [--check, --config=Tools/build/.ruff.toml]
|
||||
files: ^Tools/build/check_warnings.py
|
||||
- id: ruff-format
|
||||
name: Run Ruff (format) on Tools/wasm/
|
||||
args: [--check, --config=Tools/wasm/.ruff.toml]
|
||||
files: ^Tools/wasm/
|
||||
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 25.9.0
|
||||
hooks:
|
||||
- id: black
|
||||
name: Run Black on Tools/jit/
|
||||
files: ^Tools/jit/
|
||||
|
||||
- repo: https://github.com/Lucas-C/pre-commit-hooks
|
||||
rev: v1.5.5
|
||||
hooks:
|
||||
- id: remove-tabs
|
||||
types: [python]
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: check-case-conflict
|
||||
- id: check-merge-conflict
|
||||
|
|
@ -20,18 +65,55 @@ repos:
|
|||
exclude: ^Lib/test/test_tomllib/
|
||||
- id: check-yaml
|
||||
- id: end-of-file-fixer
|
||||
types: [python]
|
||||
types_or: [python, yaml]
|
||||
exclude: Lib/test/tokenizedata/coding20731.py
|
||||
- id: end-of-file-fixer
|
||||
files: '^\.github/CODEOWNERS$'
|
||||
- id: trailing-whitespace
|
||||
types_or: [c, inc, python, rst]
|
||||
types_or: [c, inc, python, rst, yaml]
|
||||
- id: trailing-whitespace
|
||||
files: '^\.github/CODEOWNERS|\.(gram)$'
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.34.0
|
||||
hooks:
|
||||
- id: check-dependabot
|
||||
- id: check-github-workflows
|
||||
- id: check-readthedocs
|
||||
|
||||
- repo: https://github.com/rhysd/actionlint
|
||||
rev: v1.7.7
|
||||
hooks:
|
||||
- id: actionlint
|
||||
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.14.1
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/sphinx-contrib/sphinx-lint
|
||||
rev: v0.9.1
|
||||
rev: v1.0.0
|
||||
hooks:
|
||||
- id: sphinx-lint
|
||||
args: [--enable=default-role]
|
||||
files: ^Doc/|^Misc/NEWS.d/
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: blurb-no-space-c-api
|
||||
name: Check C API news entries
|
||||
language: fail
|
||||
entry: Space found in path, move to Misc/NEWS.d/next/C_API/
|
||||
files: Misc/NEWS.d/next/C API/20.*.rst
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: blurb-no-space-core-and-builtins
|
||||
name: Check Core and Builtins news entries
|
||||
language: fail
|
||||
entry: Space found in path, move to Misc/NEWS.d/next/Core_and_Builtins/
|
||||
files: Misc/NEWS.d/next/Core and Builtins/20.*.rst
|
||||
|
||||
- repo: meta
|
||||
hooks:
|
||||
- id: check-hooks-apply
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ sphinx:
|
|||
configuration: Doc/conf.py
|
||||
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
os: ubuntu-24.04
|
||||
tools:
|
||||
python: "3"
|
||||
|
||||
|
|
@ -26,7 +26,9 @@ build:
|
|||
exit 183;
|
||||
fi
|
||||
|
||||
- asdf plugin add uv
|
||||
- asdf install uv latest
|
||||
- asdf global uv latest
|
||||
- make -C Doc venv html
|
||||
- mkdir _readthedocs
|
||||
- mv Doc/build/html _readthedocs/html
|
||||
|
||||
|
|
|
|||
12
.ruff.toml
Normal file
12
.ruff.toml
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
# Default settings for Ruff in CPython
|
||||
|
||||
# PYTHON_FOR_REGEN
|
||||
target-version = "py310"
|
||||
|
||||
# PEP 8
|
||||
line-length = 79
|
||||
|
||||
# Enable automatic fixes by default.
|
||||
# To override this, use ``fix = false`` in a subdirectory's config file
|
||||
# or ``--no-fix`` on the command line.
|
||||
fix = true
|
||||
1
.well-known/funding-manifest-urls
Normal file
1
.well-known/funding-manifest-urls
Normal file
|
|
@ -0,0 +1 @@
|
|||
https://www.python.org/funding.json
|
||||
|
|
@ -1,19 +1,22 @@
|
|||
# Python for Android
|
||||
|
||||
These instructions are only needed if you're planning to compile Python for
|
||||
Android yourself. Most users should *not* need to do this. If you're looking to
|
||||
use Python on Android, one of the following tools will provide a much more
|
||||
approachable user experience:
|
||||
If you obtained this README as part of a release package, then the only
|
||||
applicable sections are "Prerequisites", "Testing", and "Using in your own app".
|
||||
|
||||
* [Briefcase](https://briefcase.readthedocs.io), from the BeeWare project
|
||||
* [Buildozer](https://buildozer.readthedocs.io), from the Kivy project
|
||||
* [Chaquopy](https://chaquo.com/chaquopy/)
|
||||
If you obtained this README as part of the CPython source tree, then you can
|
||||
also follow the other sections to compile Python for Android yourself.
|
||||
|
||||
However, most app developers should not need to do any of these things manually.
|
||||
Instead, use one of the tools listed
|
||||
[here](https://docs.python.org/3/using/android.html), which will provide a much
|
||||
easier experience.
|
||||
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Export the `ANDROID_HOME` environment variable to point at your Android SDK. If
|
||||
you don't already have the SDK, here's how to install it:
|
||||
If you already have an Android SDK installed, export the `ANDROID_HOME`
|
||||
environment variable to point at its location. Otherwise, here's how to install
|
||||
it:
|
||||
|
||||
* Download the "Command line tools" from <https://developer.android.com/studio>.
|
||||
* Create a directory `android-sdk/cmdline-tools`, and unzip the command line
|
||||
|
|
@ -22,12 +25,23 @@ ## Prerequisites
|
|||
`android-sdk/cmdline-tools/latest`.
|
||||
* `export ANDROID_HOME=/path/to/android-sdk`
|
||||
|
||||
The `android.py` script will automatically use the SDK's `sdkmanager` to install
|
||||
any packages it needs.
|
||||
|
||||
The script also requires the following commands to be on the `PATH`:
|
||||
|
||||
* `curl`
|
||||
* `java` (or set the `JAVA_HOME` environment variable)
|
||||
|
||||
|
||||
## Building
|
||||
|
||||
Building for Android requires doing a cross-build where you have a "build"
|
||||
Python to help produce an Android build of CPython. This procedure has been
|
||||
tested on Linux and macOS.
|
||||
Python can be built for Android on any POSIX platform supported by the Android
|
||||
development tools, which currently means Linux or macOS.
|
||||
|
||||
First we'll make a "build" Python (for your development machine), then use it to
|
||||
help produce a "host" Python for Android. So make sure you have all the usual
|
||||
tools and libraries needed to build Python for your development machine.
|
||||
|
||||
The easiest way to do a build is to use the `android.py` script. You can either
|
||||
have it perform the entire build process from start to finish in one step, or
|
||||
|
|
@ -43,16 +57,17 @@ ## Building
|
|||
./android.py make-host HOST
|
||||
```
|
||||
|
||||
To see the possible values of HOST, run `./android.py configure-host --help`.
|
||||
`HOST` identifies which architecture to build. To see the possible values, run
|
||||
`./android.py configure-host --help`.
|
||||
|
||||
Or to do it all in a single command, run:
|
||||
To do all steps in a single command, run:
|
||||
|
||||
```sh
|
||||
./android.py build HOST
|
||||
```
|
||||
|
||||
In the end you should have a build Python in `cross-build/build`, and an Android
|
||||
build in `cross-build/HOST`.
|
||||
In the end you should have a build Python in `cross-build/build`, and a host
|
||||
Python in `cross-build/HOST`.
|
||||
|
||||
You can use `--` as a separator for any of the `configure`-related commands –
|
||||
including `build` itself – to pass arguments to the underlying `configure`
|
||||
|
|
@ -62,3 +77,92 @@ ## Building
|
|||
```sh
|
||||
./android.py build HOST -- -C --with-pydebug
|
||||
```
|
||||
|
||||
|
||||
## Packaging
|
||||
|
||||
After building an architecture as described in the section above, you can
|
||||
package it for release with this command:
|
||||
|
||||
```sh
|
||||
./android.py package HOST
|
||||
```
|
||||
|
||||
`HOST` is defined in the section above.
|
||||
|
||||
This will generate a tarball in `cross-build/HOST/dist`, whose structure is
|
||||
similar to the `Android` directory of the CPython source tree.
|
||||
|
||||
|
||||
## Testing
|
||||
|
||||
The Python test suite can be run on Linux, macOS, or Windows.
|
||||
|
||||
On Linux, the emulator needs access to the KVM virtualization interface. This may
|
||||
require adding your user to a group, or changing your udev rules. On GitHub
|
||||
Actions, the test script will do this automatically using the commands shown
|
||||
[here](https://github.blog/changelog/2024-04-02-github-actions-hardware-accelerated-android-virtualization-now-available/).
|
||||
|
||||
The test suite can usually be run on a device with 2 GB of RAM, but this is
|
||||
borderline, so you may need to increase it to 4 GB. As of Android
|
||||
Studio Koala, 2 GB is the default for all emulators, although the user interface
|
||||
may indicate otherwise. Locate the emulator's directory under `~/.android/avd`,
|
||||
and find `hw.ramSize` in both config.ini and hardware-qemu.ini. Either set these
|
||||
manually to the same value, or use the Android Studio Device Manager, which will
|
||||
update both files.
|
||||
|
||||
You can run the test suite either:
|
||||
|
||||
* Within the CPython repository, after doing a build as described above. On
|
||||
Windows, you won't be able to do the build on the same machine, so you'll have
|
||||
to copy the `cross-build/HOST/prefix` directory from somewhere else.
|
||||
|
||||
* Or by taking a release package built using the `package` command, extracting
|
||||
it wherever you want, and using its own copy of `android.py`.
|
||||
|
||||
The test script supports the following modes:
|
||||
|
||||
* In `--connected` mode, it runs on a device or emulator you have already
|
||||
connected to the build machine. List the available devices with
|
||||
`$ANDROID_HOME/platform-tools/adb devices -l`, then pass a device ID to the
|
||||
script like this:
|
||||
|
||||
```sh
|
||||
./android.py test --connected emulator-5554
|
||||
```
|
||||
|
||||
* In `--managed` mode, it uses a temporary headless emulator defined in the
|
||||
`managedDevices` section of testbed/app/build.gradle.kts. This mode is slower,
|
||||
but more reproducible.
|
||||
|
||||
We currently define two devices: `minVersion` and `maxVersion`, corresponding
|
||||
to our minimum and maximum supported Android versions. For example:
|
||||
|
||||
```sh
|
||||
./android.py test --managed maxVersion
|
||||
```
|
||||
|
||||
By default, the only messages the script will show are Python's own stdout and
|
||||
stderr. Add the `-v` option to also show Gradle output, and non-Python logcat
|
||||
messages.
|
||||
|
||||
Any other arguments on the `android.py test` command line will be passed through
|
||||
to `python -m test` – use `--` to separate them from android.py's own options.
|
||||
See the [Python Developer's
|
||||
Guide](https://devguide.python.org/testing/run-write-tests/) for common options
|
||||
– most of them will work on Android, except for those that involve subprocesses,
|
||||
such as `-j`.
|
||||
|
||||
Every time you run `android.py test`, changes in pure-Python files in the
|
||||
repository's `Lib` directory will be picked up immediately. Changes in C files,
|
||||
and architecture-specific files such as sysconfigdata, will not take effect
|
||||
until you re-run `android.py make-host` or `build`.
|
||||
|
||||
The testbed app can also be used to test third-party packages. For more details,
|
||||
run `android.py test --help`, paying attention to the options `--site-packages`,
|
||||
`--cwd`, `-c` and `-m`.
|
||||
|
||||
|
||||
## Using in your own app
|
||||
|
||||
See https://docs.python.org/3/using/android.html.
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
# This script must be sourced with the following variables already set:
|
||||
: ${ANDROID_HOME:?} # Path to Android SDK
|
||||
: ${HOST:?} # GNU target triplet
|
||||
: "${ANDROID_HOME:?}" # Path to Android SDK
|
||||
: "${HOST:?}" # GNU target triplet
|
||||
|
||||
# You may also override the following:
|
||||
: ${api_level:=21} # Minimum Android API level the build will run on
|
||||
: ${PREFIX:-} # Path in which to find required libraries
|
||||
: "${ANDROID_API_LEVEL:=24}" # Minimum Android API level the build will run on
|
||||
: "${PREFIX:-}" # Path in which to find required libraries
|
||||
|
||||
|
||||
# Print all messages on stderr so they're visible when running within build-wheel.
|
||||
|
|
@ -24,26 +24,26 @@ fail() {
|
|||
# * https://android.googlesource.com/platform/ndk/+/ndk-rXX-release/docs/BuildSystemMaintainers.md
|
||||
# where XX is the NDK version. Do a diff against the version you're upgrading from, e.g.:
|
||||
# https://android.googlesource.com/platform/ndk/+/ndk-r25-release..ndk-r26-release/docs/BuildSystemMaintainers.md
|
||||
ndk_version=26.2.11394342
|
||||
ndk_version=27.3.13750724
|
||||
|
||||
ndk=$ANDROID_HOME/ndk/$ndk_version
|
||||
if ! [ -e $ndk ]; then
|
||||
log "Installing NDK: this may take several minutes"
|
||||
yes | $ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager "ndk;$ndk_version"
|
||||
if ! [ -e "$ndk" ]; then
|
||||
log "Installing NDK - this may take several minutes"
|
||||
yes | "$ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager" "ndk;$ndk_version"
|
||||
fi
|
||||
|
||||
if [ $HOST = "arm-linux-androideabi" ]; then
|
||||
if [ "$HOST" = "arm-linux-androideabi" ]; then
|
||||
clang_triplet=armv7a-linux-androideabi
|
||||
else
|
||||
clang_triplet=$HOST
|
||||
clang_triplet="$HOST"
|
||||
fi
|
||||
|
||||
# These variables are based on BuildSystemMaintainers.md above, and
|
||||
# $ndk/build/cmake/android.toolchain.cmake.
|
||||
toolchain=$(echo $ndk/toolchains/llvm/prebuilt/*)
|
||||
toolchain=$(echo "$ndk"/toolchains/llvm/prebuilt/*)
|
||||
export AR="$toolchain/bin/llvm-ar"
|
||||
export AS="$toolchain/bin/llvm-as"
|
||||
export CC="$toolchain/bin/${clang_triplet}${api_level}-clang"
|
||||
export CC="$toolchain/bin/${clang_triplet}${ANDROID_API_LEVEL}-clang"
|
||||
export CXX="${CC}++"
|
||||
export LD="$toolchain/bin/ld"
|
||||
export NM="$toolchain/bin/llvm-nm"
|
||||
|
|
@ -58,20 +58,26 @@ for path in "$AR" "$AS" "$CC" "$CXX" "$LD" "$NM" "$RANLIB" "$READELF" "$STRIP";
|
|||
fi
|
||||
done
|
||||
|
||||
export CFLAGS=""
|
||||
export LDFLAGS="-Wl,--build-id=sha1 -Wl,--no-rosegment"
|
||||
export CFLAGS="-D__BIONIC_NO_PAGE_SIZE_MACRO"
|
||||
export LDFLAGS="-Wl,--build-id=sha1 -Wl,--no-rosegment -Wl,-z,max-page-size=16384"
|
||||
|
||||
# Unlike Linux, Android does not implicitly use a dlopened library to resolve
|
||||
# relocations in subsequently-loaded libraries, even if RTLD_GLOBAL is used
|
||||
# (https://github.com/android/ndk/issues/1244). So any library that fails to
|
||||
# build with this flag, would also fail to load at runtime.
|
||||
LDFLAGS="$LDFLAGS -Wl,--no-undefined"
|
||||
|
||||
# Many packages get away with omitting -lm on Linux, but Android is stricter.
|
||||
LDFLAGS="$LDFLAGS -lm"
|
||||
|
||||
# -mstackrealign is included where necessary in the clang launcher scripts which are
|
||||
# pointed to by $CC, so we don't need to include it here.
|
||||
if [ $HOST = "arm-linux-androideabi" ]; then
|
||||
if [ "$HOST" = "arm-linux-androideabi" ]; then
|
||||
CFLAGS="$CFLAGS -march=armv7-a -mthumb"
|
||||
fi
|
||||
|
||||
if [ -n "${PREFIX:-}" ]; then
|
||||
abs_prefix=$(realpath $PREFIX)
|
||||
abs_prefix="$(realpath "$PREFIX")"
|
||||
CFLAGS="$CFLAGS -I$abs_prefix/include"
|
||||
LDFLAGS="$LDFLAGS -L$abs_prefix/lib"
|
||||
|
||||
|
|
@ -79,9 +85,15 @@ if [ -n "${PREFIX:-}" ]; then
|
|||
export PKG_CONFIG_LIBDIR="$abs_prefix/lib/pkgconfig"
|
||||
fi
|
||||
|
||||
# When compiling C++, some build systems will combine CFLAGS and CXXFLAGS, and some will
|
||||
# use CXXFLAGS alone.
|
||||
export CXXFLAGS="$CFLAGS"
|
||||
|
||||
# Use the same variable name as conda-build
|
||||
if [ $(uname) = "Darwin" ]; then
|
||||
export CPU_COUNT=$(sysctl -n hw.ncpu)
|
||||
if [ "$(uname)" = "Darwin" ]; then
|
||||
CPU_COUNT="$(sysctl -n hw.ncpu)"
|
||||
export CPU_COUNT
|
||||
else
|
||||
export CPU_COUNT=$(nproc)
|
||||
CPU_COUNT="$(nproc)"
|
||||
export CPU_COUNT
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -1,32 +1,87 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import asyncio
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import sysconfig
|
||||
from os.path import relpath
|
||||
from asyncio import wait_for
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime, timezone
|
||||
from glob import glob
|
||||
from os.path import abspath, basename, relpath
|
||||
from pathlib import Path
|
||||
from subprocess import CalledProcessError
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
|
||||
SCRIPT_NAME = Path(__file__).name
|
||||
CHECKOUT = Path(__file__).resolve().parent.parent
|
||||
CROSS_BUILD_DIR = CHECKOUT / "cross-build"
|
||||
ANDROID_DIR = Path(__file__).resolve().parent
|
||||
PYTHON_DIR = ANDROID_DIR.parent
|
||||
in_source_tree = (
|
||||
ANDROID_DIR.name == "Android" and (PYTHON_DIR / "pyconfig.h.in").exists()
|
||||
)
|
||||
|
||||
TESTBED_DIR = ANDROID_DIR / "testbed"
|
||||
CROSS_BUILD_DIR = PYTHON_DIR / "cross-build"
|
||||
|
||||
HOSTS = ["aarch64-linux-android", "x86_64-linux-android"]
|
||||
APP_ID = "org.python.testbed"
|
||||
DECODE_ARGS = ("UTF-8", "backslashreplace")
|
||||
|
||||
|
||||
def delete_if_exists(path):
|
||||
if path.exists():
|
||||
try:
|
||||
android_home = Path(os.environ['ANDROID_HOME'])
|
||||
except KeyError:
|
||||
sys.exit("The ANDROID_HOME environment variable is required.")
|
||||
|
||||
adb = Path(
|
||||
f"{android_home}/platform-tools/adb"
|
||||
+ (".exe" if os.name == "nt" else "")
|
||||
)
|
||||
|
||||
gradlew = Path(
|
||||
f"{TESTBED_DIR}/gradlew"
|
||||
+ (".bat" if os.name == "nt" else "")
|
||||
)
|
||||
|
||||
# Whether we've seen any output from Python yet.
|
||||
python_started = False
|
||||
|
||||
# Buffer for verbose output which will be displayed only if a test fails and
|
||||
# there has been no output from Python.
|
||||
hidden_output = []
|
||||
|
||||
|
||||
def log_verbose(context, line, stream=sys.stdout):
|
||||
if context.verbose:
|
||||
stream.write(line)
|
||||
else:
|
||||
hidden_output.append((stream, line))
|
||||
|
||||
|
||||
def delete_glob(pattern):
|
||||
# Path.glob doesn't accept non-relative patterns.
|
||||
for path in glob(str(pattern)):
|
||||
path = Path(path)
|
||||
print(f"Deleting {path} ...")
|
||||
shutil.rmtree(path)
|
||||
if path.is_dir() and not path.is_symlink():
|
||||
shutil.rmtree(path)
|
||||
else:
|
||||
path.unlink()
|
||||
|
||||
|
||||
def subdir(name, *, clean=None):
|
||||
path = CROSS_BUILD_DIR / name
|
||||
if clean:
|
||||
delete_if_exists(path)
|
||||
def subdir(*parts, create=False):
|
||||
path = CROSS_BUILD_DIR.joinpath(*parts)
|
||||
if not path.exists():
|
||||
if clean is None:
|
||||
if not create:
|
||||
sys.exit(
|
||||
f"{path} does not exist. Create it by running the appropriate "
|
||||
f"`configure` subcommand of {SCRIPT_NAME}.")
|
||||
|
|
@ -35,39 +90,70 @@ def subdir(name, *, clean=None):
|
|||
return path
|
||||
|
||||
|
||||
def run(command, *, host=None, **kwargs):
|
||||
env = os.environ.copy()
|
||||
def run(command, *, host=None, env=None, log=True, **kwargs):
|
||||
kwargs.setdefault("check", True)
|
||||
if env is None:
|
||||
env = os.environ.copy()
|
||||
|
||||
if host:
|
||||
env_script = CHECKOUT / "Android/android-env.sh"
|
||||
env_output = subprocess.run(
|
||||
f"set -eu; "
|
||||
f"HOST={host}; "
|
||||
f"PREFIX={subdir(host)}/prefix; "
|
||||
f". {env_script}; "
|
||||
f"export",
|
||||
check=True, shell=True, text=True, stdout=subprocess.PIPE
|
||||
).stdout
|
||||
host_env = android_env(host)
|
||||
print_env(host_env)
|
||||
env.update(host_env)
|
||||
|
||||
for line in env_output.splitlines():
|
||||
# We don't require every line to match, as there may be some other
|
||||
# output from installing the NDK.
|
||||
if match := re.search(
|
||||
"^(declare -x |export )?(\\w+)=['\"]?(.*?)['\"]?$", line
|
||||
):
|
||||
key, value = match[2], match[3]
|
||||
if env.get(key) != value:
|
||||
print(line)
|
||||
env[key] = value
|
||||
if log:
|
||||
print(">", join_command(command))
|
||||
return subprocess.run(command, env=env, **kwargs)
|
||||
|
||||
if env == os.environ:
|
||||
raise ValueError(f"Found no variables in {env_script.name} output:\n"
|
||||
+ env_output)
|
||||
|
||||
print(">", " ".join(map(str, command)))
|
||||
try:
|
||||
subprocess.run(command, check=True, env=env, **kwargs)
|
||||
except subprocess.CalledProcessError as e:
|
||||
sys.exit(e)
|
||||
# Format a command so it can be copied into a shell. Like shlex.join, but also
|
||||
# accepts arguments which are Paths, or a single string/Path outside of a list.
|
||||
def join_command(args):
|
||||
if isinstance(args, (str, Path)):
|
||||
return str(args)
|
||||
else:
|
||||
return shlex.join(map(str, args))
|
||||
|
||||
|
||||
# Format the environment so it can be pasted into a shell.
|
||||
def print_env(env):
|
||||
for key, value in sorted(env.items()):
|
||||
print(f"export {key}={shlex.quote(value)}")
|
||||
|
||||
|
||||
def android_env(host):
|
||||
if host:
|
||||
prefix = subdir(host) / "prefix"
|
||||
else:
|
||||
prefix = ANDROID_DIR / "prefix"
|
||||
sysconfig_files = prefix.glob("lib/python*/_sysconfigdata__android_*.py")
|
||||
sysconfig_filename = next(sysconfig_files).name
|
||||
host = re.fullmatch(r"_sysconfigdata__android_(.+).py", sysconfig_filename)[1]
|
||||
|
||||
env_script = ANDROID_DIR / "android-env.sh"
|
||||
env_output = subprocess.run(
|
||||
f"set -eu; "
|
||||
f"HOST={host}; "
|
||||
f"PREFIX={prefix}; "
|
||||
f". {env_script}; "
|
||||
f"export",
|
||||
check=True, shell=True, capture_output=True, encoding='utf-8',
|
||||
).stdout
|
||||
|
||||
env = {}
|
||||
for line in env_output.splitlines():
|
||||
# We don't require every line to match, as there may be some other
|
||||
# output from installing the NDK.
|
||||
if match := re.search(
|
||||
"^(declare -x |export )?(\\w+)=['\"]?(.*?)['\"]?$", line
|
||||
):
|
||||
key, value = match[2], match[3]
|
||||
if os.environ.get(key) != value:
|
||||
env[key] = value
|
||||
|
||||
if not env:
|
||||
raise ValueError(f"Found no variables in {env_script.name} output:\n"
|
||||
+ env_output)
|
||||
return env
|
||||
|
||||
|
||||
def build_python_path():
|
||||
|
|
@ -84,9 +170,11 @@ def build_python_path():
|
|||
|
||||
|
||||
def configure_build_python(context):
|
||||
os.chdir(subdir("build", clean=context.clean))
|
||||
if context.clean:
|
||||
clean("build")
|
||||
os.chdir(subdir("build", create=True))
|
||||
|
||||
command = [relpath(CHECKOUT / "configure")]
|
||||
command = [relpath(PYTHON_DIR / "configure")]
|
||||
if context.args:
|
||||
command.extend(context.args)
|
||||
run(command)
|
||||
|
|
@ -97,32 +185,43 @@ def make_build_python(context):
|
|||
run(["make", "-j", str(os.cpu_count())])
|
||||
|
||||
|
||||
def unpack_deps(host):
|
||||
# To create new builds of these dependencies, usually all that's necessary is to
|
||||
# push a tag to the cpython-android-source-deps repository, and GitHub Actions
|
||||
# will do the rest.
|
||||
#
|
||||
# If you're a member of the Python core team, and you'd like to be able to push
|
||||
# these tags yourself, please contact Malcolm Smith or Russell Keith-Magee.
|
||||
def unpack_deps(host, prefix_dir):
|
||||
os.chdir(prefix_dir)
|
||||
deps_url = "https://github.com/beeware/cpython-android-source-deps/releases/download"
|
||||
for name_ver in ["bzip2-1.0.8-1", "libffi-3.4.4-2", "openssl-3.0.13-1",
|
||||
"sqlite-3.45.1-0", "xz-5.4.6-0"]:
|
||||
for name_ver in ["bzip2-1.0.8-3", "libffi-3.4.4-3", "openssl-3.0.18-0",
|
||||
"sqlite-3.50.4-0", "xz-5.4.6-1", "zstd-1.5.7-1"]:
|
||||
filename = f"{name_ver}-{host}.tar.gz"
|
||||
run(["wget", f"{deps_url}/{name_ver}/{filename}"])
|
||||
run(["tar", "-xf", filename])
|
||||
download(f"{deps_url}/{name_ver}/{filename}")
|
||||
shutil.unpack_archive(filename)
|
||||
os.remove(filename)
|
||||
|
||||
|
||||
def configure_host_python(context):
|
||||
host_dir = subdir(context.host, clean=context.clean)
|
||||
def download(url, target_dir="."):
|
||||
out_path = f"{target_dir}/{basename(url)}"
|
||||
run(["curl", "-Lf", "--retry", "5", "--retry-all-errors", "-o", out_path, url])
|
||||
return out_path
|
||||
|
||||
|
||||
def configure_host_python(context):
|
||||
if context.clean:
|
||||
clean(context.host)
|
||||
|
||||
host_dir = subdir(context.host, create=True)
|
||||
prefix_dir = host_dir / "prefix"
|
||||
if not prefix_dir.exists():
|
||||
prefix_dir.mkdir()
|
||||
os.chdir(prefix_dir)
|
||||
unpack_deps(context.host)
|
||||
|
||||
build_dir = host_dir / "build"
|
||||
build_dir.mkdir(exist_ok=True)
|
||||
os.chdir(build_dir)
|
||||
unpack_deps(context.host, prefix_dir)
|
||||
|
||||
os.chdir(host_dir)
|
||||
command = [
|
||||
# Basic cross-compiling configuration
|
||||
relpath(CHECKOUT / "configure"),
|
||||
relpath(PYTHON_DIR / "configure"),
|
||||
f"--host={context.host}",
|
||||
f"--build={sysconfig.get_config_var('BUILD_GNU_TYPE')}",
|
||||
f"--with-build-python={build_python_path()}",
|
||||
|
|
@ -143,10 +242,26 @@ def configure_host_python(context):
|
|||
|
||||
|
||||
def make_host_python(context):
|
||||
# The CFLAGS and LDFLAGS set in android-env include the prefix dir, so
|
||||
# delete any previous Python installation to prevent it being used during
|
||||
# the build.
|
||||
host_dir = subdir(context.host)
|
||||
os.chdir(host_dir / "build")
|
||||
run(["make", "-j", str(os.cpu_count())], host=context.host)
|
||||
run(["make", "install", f"prefix={host_dir}/prefix"], host=context.host)
|
||||
prefix_dir = host_dir / "prefix"
|
||||
for pattern in ("include/python*", "lib/libpython*", "lib/python*"):
|
||||
delete_glob(f"{prefix_dir}/{pattern}")
|
||||
|
||||
# The Android environment variables were already captured in the Makefile by
|
||||
# `configure`, and passing them again when running `make` may cause some
|
||||
# flags to be duplicated. So we don't use the `host` argument here.
|
||||
os.chdir(host_dir)
|
||||
run(["make", "-j", str(os.cpu_count())])
|
||||
|
||||
# The `make install` output is very verbose and rarely useful, so
|
||||
# suppress it by default.
|
||||
run(
|
||||
["make", "install", f"prefix={prefix_dir}"],
|
||||
capture_output=not context.verbose,
|
||||
)
|
||||
|
||||
|
||||
def build_all(context):
|
||||
|
|
@ -156,46 +271,645 @@ def build_all(context):
|
|||
step(context)
|
||||
|
||||
|
||||
def clean(host):
|
||||
delete_glob(CROSS_BUILD_DIR / host)
|
||||
|
||||
|
||||
def clean_all(context):
|
||||
delete_if_exists(CROSS_BUILD_DIR)
|
||||
for host in HOSTS + ["build"]:
|
||||
clean(host)
|
||||
|
||||
|
||||
def main():
|
||||
def setup_ci():
|
||||
# https://github.blog/changelog/2024-04-02-github-actions-hardware-accelerated-android-virtualization-now-available/
|
||||
if "GITHUB_ACTIONS" in os.environ and platform.system() == "Linux":
|
||||
run(
|
||||
["sudo", "tee", "/etc/udev/rules.d/99-kvm4all.rules"],
|
||||
input='KERNEL=="kvm", GROUP="kvm", MODE="0666", OPTIONS+="static_node=kvm"\n',
|
||||
text=True,
|
||||
)
|
||||
run(["sudo", "udevadm", "control", "--reload-rules"])
|
||||
run(["sudo", "udevadm", "trigger", "--name-match=kvm"])
|
||||
|
||||
|
||||
def setup_sdk():
|
||||
sdkmanager = android_home / (
|
||||
"cmdline-tools/latest/bin/sdkmanager"
|
||||
+ (".bat" if os.name == "nt" else "")
|
||||
)
|
||||
|
||||
# Gradle will fail if it needs to install an SDK package whose license
|
||||
# hasn't been accepted, so pre-accept all licenses.
|
||||
if not all((android_home / "licenses" / path).exists() for path in [
|
||||
"android-sdk-arm-dbt-license", "android-sdk-license"
|
||||
]):
|
||||
run(
|
||||
[sdkmanager, "--licenses"],
|
||||
text=True,
|
||||
capture_output=True,
|
||||
input="y\n" * 100,
|
||||
)
|
||||
|
||||
# Gradle may install this automatically, but we can't rely on that because
|
||||
# we need to run adb within the logcat task.
|
||||
if not adb.exists():
|
||||
run([sdkmanager, "platform-tools"])
|
||||
|
||||
|
||||
# To avoid distributing compiled artifacts without corresponding source code,
|
||||
# the Gradle wrapper is not included in the CPython repository. Instead, we
|
||||
# extract it from the Gradle GitHub repository.
|
||||
def setup_testbed():
|
||||
paths = ["gradlew", "gradlew.bat", "gradle/wrapper/gradle-wrapper.jar"]
|
||||
if all((TESTBED_DIR / path).exists() for path in paths):
|
||||
return
|
||||
|
||||
# The wrapper version isn't important, as any version of the wrapper can
|
||||
# download any version of Gradle. The Gradle version actually used for the
|
||||
# build is specified in testbed/gradle/wrapper/gradle-wrapper.properties.
|
||||
version = "8.9.0"
|
||||
|
||||
for path in paths:
|
||||
out_path = TESTBED_DIR / path
|
||||
out_path.parent.mkdir(exist_ok=True)
|
||||
download(
|
||||
f"https://raw.githubusercontent.com/gradle/gradle/v{version}/{path}",
|
||||
out_path.parent,
|
||||
)
|
||||
os.chmod(out_path, 0o755)
|
||||
|
||||
|
||||
# run_testbed will build the app automatically, but it's useful to have this as
|
||||
# a separate command to allow running the app outside of this script.
|
||||
def build_testbed(context):
|
||||
setup_sdk()
|
||||
setup_testbed()
|
||||
run(
|
||||
[gradlew, "--console", "plain", "packageDebug", "packageDebugAndroidTest"],
|
||||
cwd=TESTBED_DIR,
|
||||
)
|
||||
|
||||
|
||||
# Work around a bug involving sys.exit and TaskGroups
|
||||
# (https://github.com/python/cpython/issues/101515).
|
||||
def exit(*args):
|
||||
raise MySystemExit(*args)
|
||||
|
||||
|
||||
class MySystemExit(Exception):
|
||||
pass
|
||||
|
||||
|
||||
# The `test` subcommand runs all subprocesses through this context manager so
|
||||
# that no matter what happens, they can always be cancelled from another task,
|
||||
# and they will always be cleaned up on exit.
|
||||
@asynccontextmanager
|
||||
async def async_process(*args, **kwargs):
|
||||
process = await asyncio.create_subprocess_exec(*args, **kwargs)
|
||||
try:
|
||||
yield process
|
||||
finally:
|
||||
if process.returncode is None:
|
||||
# Allow a reasonably long time for Gradle to clean itself up,
|
||||
# because we don't want stale emulators left behind.
|
||||
timeout = 10
|
||||
process.terminate()
|
||||
try:
|
||||
await wait_for(process.wait(), timeout)
|
||||
except TimeoutError:
|
||||
print(
|
||||
f"Command {args} did not terminate after {timeout} seconds "
|
||||
f" - sending SIGKILL"
|
||||
)
|
||||
process.kill()
|
||||
|
||||
# Even after killing the process we must still wait for it,
|
||||
# otherwise we'll get the warning "Exception ignored in __del__".
|
||||
await wait_for(process.wait(), timeout=1)
|
||||
|
||||
|
||||
async def async_check_output(*args, **kwargs):
|
||||
async with async_process(
|
||||
*args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs
|
||||
) as process:
|
||||
stdout, stderr = await process.communicate()
|
||||
if process.returncode == 0:
|
||||
return stdout.decode(*DECODE_ARGS)
|
||||
else:
|
||||
raise CalledProcessError(
|
||||
process.returncode, args,
|
||||
stdout.decode(*DECODE_ARGS), stderr.decode(*DECODE_ARGS)
|
||||
)
|
||||
|
||||
|
||||
# Return a list of the serial numbers of connected devices. Emulators will have
|
||||
# serials of the form "emulator-5678".
|
||||
async def list_devices():
|
||||
serials = []
|
||||
header_found = False
|
||||
|
||||
lines = (await async_check_output(adb, "devices")).splitlines()
|
||||
for line in lines:
|
||||
# Ignore blank lines, and all lines before the header.
|
||||
line = line.strip()
|
||||
if line == "List of devices attached":
|
||||
header_found = True
|
||||
elif header_found and line:
|
||||
try:
|
||||
serial, status = line.split()
|
||||
except ValueError:
|
||||
raise ValueError(f"failed to parse {line!r}")
|
||||
if status == "device":
|
||||
serials.append(serial)
|
||||
|
||||
if not header_found:
|
||||
raise ValueError(f"failed to parse {lines}")
|
||||
return serials
|
||||
|
||||
|
||||
async def find_device(context, initial_devices):
|
||||
if context.managed:
|
||||
print("Waiting for managed device - this may take several minutes")
|
||||
while True:
|
||||
new_devices = set(await list_devices()).difference(initial_devices)
|
||||
if len(new_devices) == 0:
|
||||
await asyncio.sleep(1)
|
||||
elif len(new_devices) == 1:
|
||||
serial = new_devices.pop()
|
||||
print(f"Serial: {serial}")
|
||||
return serial
|
||||
else:
|
||||
exit(f"Found more than one new device: {new_devices}")
|
||||
else:
|
||||
return context.connected
|
||||
|
||||
|
||||
# An older version of this script in #121595 filtered the logs by UID instead.
|
||||
# But logcat can't filter by UID until API level 31. If we ever switch back to
|
||||
# filtering by UID, we'll also have to filter by time so we only show messages
|
||||
# produced after the initial call to `stop_app`.
|
||||
#
|
||||
# We're more likely to miss the PID because it's shorter-lived, so there's a
|
||||
# workaround in PythonSuite.kt to stop it being *too* short-lived.
|
||||
async def find_pid(serial):
|
||||
print("Waiting for app to start - this may take several minutes")
|
||||
shown_error = False
|
||||
while True:
|
||||
try:
|
||||
# `pidof` requires API level 24 or higher. The level 23 emulator
|
||||
# includes it, but it doesn't work (it returns all processes).
|
||||
pid = (await async_check_output(
|
||||
adb, "-s", serial, "shell", "pidof", "-s", APP_ID
|
||||
)).strip()
|
||||
except CalledProcessError as e:
|
||||
# If the app isn't running yet, pidof gives no output. So if there
|
||||
# is output, there must have been some other error. However, this
|
||||
# sometimes happens transiently, especially when running a managed
|
||||
# emulator for the first time, so don't make it fatal.
|
||||
if (e.stdout or e.stderr) and not shown_error:
|
||||
print_called_process_error(e)
|
||||
print("This may be transient, so continuing to wait")
|
||||
shown_error = True
|
||||
else:
|
||||
# Some older devices (e.g. Nexus 4) return zero even when no process
|
||||
# was found, so check whether we actually got any output.
|
||||
if pid:
|
||||
print(f"PID: {pid}")
|
||||
return pid
|
||||
|
||||
# Loop fairly rapidly to avoid missing a short-lived process.
|
||||
await asyncio.sleep(0.2)
|
||||
|
||||
|
||||
async def logcat_task(context, initial_devices):
|
||||
# Gradle may need to do some large downloads of libraries and emulator
|
||||
# images. This will happen during find_device in --managed mode, or find_pid
|
||||
# in --connected mode.
|
||||
startup_timeout = 600
|
||||
serial = await wait_for(find_device(context, initial_devices), startup_timeout)
|
||||
pid = await wait_for(find_pid(serial), startup_timeout)
|
||||
|
||||
# `--pid` requires API level 24 or higher.
|
||||
args = [adb, "-s", serial, "logcat", "--pid", pid, "--format", "tag"]
|
||||
logcat_started = False
|
||||
async with async_process(
|
||||
*args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
) as process:
|
||||
while line := (await process.stdout.readline()).decode(*DECODE_ARGS):
|
||||
if match := re.fullmatch(r"([A-Z])/(.*)", line, re.DOTALL):
|
||||
logcat_started = True
|
||||
level, message = match.groups()
|
||||
else:
|
||||
# If the regex doesn't match, this is either a logcat startup
|
||||
# error, or the second or subsequent line of a multi-line
|
||||
# message. Python won't produce multi-line messages, but other
|
||||
# components might.
|
||||
level, message = None, line
|
||||
|
||||
# Exclude high-volume messages which are rarely useful.
|
||||
if context.verbose < 2 and "from python test_syslog" in message:
|
||||
continue
|
||||
|
||||
# Put high-level messages on stderr so they're highlighted in the
|
||||
# buildbot logs. This will include Python's own stderr.
|
||||
stream = (
|
||||
sys.stderr
|
||||
if level in ["W", "E", "F"] # WARNING, ERROR, FATAL (aka ASSERT)
|
||||
else sys.stdout
|
||||
)
|
||||
|
||||
# To simplify automated processing of the output, e.g. a buildbot
|
||||
# posting a failure notice on a GitHub PR, we strip the level and
|
||||
# tag indicators from Python's stdout and stderr.
|
||||
for prefix in ["python.stdout: ", "python.stderr: "]:
|
||||
if message.startswith(prefix):
|
||||
global python_started
|
||||
python_started = True
|
||||
stream.write(message.removeprefix(prefix))
|
||||
break
|
||||
else:
|
||||
# Non-Python messages add a lot of noise, but they may
|
||||
# sometimes help explain a failure.
|
||||
log_verbose(context, line, stream)
|
||||
|
||||
# If the device disconnects while logcat is running, which always
|
||||
# happens in --managed mode, some versions of adb return non-zero.
|
||||
# Distinguish this from a logcat startup error by checking whether we've
|
||||
# received any logcat messages yet.
|
||||
status = await wait_for(process.wait(), timeout=1)
|
||||
if status != 0 and not logcat_started:
|
||||
raise CalledProcessError(status, args)
|
||||
|
||||
|
||||
def stop_app(serial):
|
||||
run([adb, "-s", serial, "shell", "am", "force-stop", APP_ID], log=False)
|
||||
|
||||
|
||||
async def gradle_task(context):
|
||||
env = os.environ.copy()
|
||||
if context.managed:
|
||||
task_prefix = context.managed
|
||||
else:
|
||||
task_prefix = "connected"
|
||||
env["ANDROID_SERIAL"] = context.connected
|
||||
|
||||
if context.ci_mode:
|
||||
context.args[0:0] = [
|
||||
# See _add_ci_python_opts in libregrtest/main.py.
|
||||
"-W", "error", "-bb", "-E",
|
||||
|
||||
# Randomization is disabled because order-dependent failures are
|
||||
# much less likely to pass on a rerun in single-process mode.
|
||||
"-m", "test",
|
||||
f"--{context.ci_mode}-ci", "--single-process", "--no-randomize"
|
||||
]
|
||||
|
||||
if not any(arg in context.args for arg in ["-c", "-m"]):
|
||||
context.args[0:0] = ["-m", "test"]
|
||||
|
||||
args = [
|
||||
gradlew, "--console", "plain", f"{task_prefix}DebugAndroidTest",
|
||||
] + [
|
||||
f"-P{name}={value}"
|
||||
for name, value in [
|
||||
("python.sitePackages", context.site_packages),
|
||||
("python.cwd", context.cwd),
|
||||
(
|
||||
"android.testInstrumentationRunnerArguments.pythonArgs",
|
||||
json.dumps(context.args),
|
||||
),
|
||||
]
|
||||
if value
|
||||
]
|
||||
if context.verbose >= 2:
|
||||
args.append("--info")
|
||||
log_verbose(context, f"> {join_command(args)}\n")
|
||||
|
||||
try:
|
||||
async with async_process(
|
||||
*args, cwd=TESTBED_DIR, env=env,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
) as process:
|
||||
while line := (await process.stdout.readline()).decode(*DECODE_ARGS):
|
||||
# Gradle may take several minutes to install SDK packages, so
|
||||
# it's worth showing those messages even in non-verbose mode.
|
||||
if line.startswith('Preparing "Install'):
|
||||
sys.stdout.write(line)
|
||||
else:
|
||||
log_verbose(context, line)
|
||||
|
||||
status = await wait_for(process.wait(), timeout=1)
|
||||
if status == 0:
|
||||
exit(0)
|
||||
else:
|
||||
raise CalledProcessError(status, args)
|
||||
finally:
|
||||
# Gradle does not stop the tests when interrupted.
|
||||
if context.connected:
|
||||
stop_app(context.connected)
|
||||
|
||||
|
||||
async def run_testbed(context):
|
||||
setup_ci()
|
||||
setup_sdk()
|
||||
setup_testbed()
|
||||
|
||||
if context.managed:
|
||||
# In this mode, Gradle will create a device with an unpredictable name.
|
||||
# So we save a list of the running devices before starting Gradle, and
|
||||
# find_device then waits for a new device to appear.
|
||||
initial_devices = await list_devices()
|
||||
else:
|
||||
# In case the previous shutdown was unclean, make sure the app isn't
|
||||
# running, otherwise we might show logs from a previous run. This is
|
||||
# unnecessary in --managed mode, because Gradle creates a new emulator
|
||||
# every time.
|
||||
stop_app(context.connected)
|
||||
initial_devices = None
|
||||
|
||||
try:
|
||||
async with asyncio.TaskGroup() as tg:
|
||||
tg.create_task(logcat_task(context, initial_devices))
|
||||
tg.create_task(gradle_task(context))
|
||||
except* MySystemExit as e:
|
||||
raise SystemExit(*e.exceptions[0].args) from None
|
||||
except* CalledProcessError as e:
|
||||
# If Python produced no output, then the user probably wants to see the
|
||||
# verbose output to explain why the test failed.
|
||||
if not python_started:
|
||||
for stream, line in hidden_output:
|
||||
stream.write(line)
|
||||
|
||||
# Extract it from the ExceptionGroup so it can be handled by `main`.
|
||||
raise e.exceptions[0]
|
||||
|
||||
|
||||
def package_version(prefix_dir):
|
||||
patchlevel_glob = f"{prefix_dir}/include/python*/patchlevel.h"
|
||||
patchlevel_paths = glob(patchlevel_glob)
|
||||
if len(patchlevel_paths) != 1:
|
||||
sys.exit(f"{patchlevel_glob} matched {len(patchlevel_paths)} paths.")
|
||||
|
||||
for line in open(patchlevel_paths[0]):
|
||||
if match := re.fullmatch(r'\s*#define\s+PY_VERSION\s+"(.+)"\s*', line):
|
||||
version = match[1]
|
||||
break
|
||||
else:
|
||||
sys.exit(f"Failed to find Python version in {patchlevel_paths[0]}.")
|
||||
|
||||
# If not building against a tagged commit, add a timestamp to the version.
|
||||
# Follow the PyPA version number rules, as this will make it easier to
|
||||
# process with other tools.
|
||||
if version.endswith("+"):
|
||||
version += datetime.now(timezone.utc).strftime("%Y%m%d.%H%M%S")
|
||||
|
||||
return version
|
||||
|
||||
|
||||
def package(context):
|
||||
prefix_dir = subdir(context.host, "prefix")
|
||||
version = package_version(prefix_dir)
|
||||
|
||||
with TemporaryDirectory(prefix=SCRIPT_NAME) as temp_dir:
|
||||
temp_dir = Path(temp_dir)
|
||||
|
||||
# Include all tracked files from the Android directory.
|
||||
for line in run(
|
||||
["git", "ls-files"],
|
||||
cwd=ANDROID_DIR, capture_output=True, text=True, log=False,
|
||||
).stdout.splitlines():
|
||||
src = ANDROID_DIR / line
|
||||
dst = temp_dir / line
|
||||
dst.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(src, dst, follow_symlinks=False)
|
||||
|
||||
# Include anything from the prefix directory which could be useful
|
||||
# either for embedding Python in an app, or building third-party
|
||||
# packages against it.
|
||||
for rel_dir, patterns in [
|
||||
("include", ["openssl*", "python*", "sqlite*"]),
|
||||
("lib", ["engines-3", "libcrypto*.so", "libpython*", "libsqlite*",
|
||||
"libssl*.so", "ossl-modules", "python*"]),
|
||||
("lib/pkgconfig", ["*crypto*", "*ssl*", "*python*", "*sqlite*"]),
|
||||
]:
|
||||
for pattern in patterns:
|
||||
for src in glob(f"{prefix_dir}/{rel_dir}/{pattern}"):
|
||||
dst = temp_dir / relpath(src, prefix_dir.parent)
|
||||
dst.parent.mkdir(parents=True, exist_ok=True)
|
||||
if Path(src).is_dir():
|
||||
shutil.copytree(
|
||||
src, dst, symlinks=True,
|
||||
ignore=lambda *args: ["__pycache__"]
|
||||
)
|
||||
else:
|
||||
shutil.copy2(src, dst, follow_symlinks=False)
|
||||
|
||||
# Strip debug information.
|
||||
if not context.debug:
|
||||
so_files = glob(f"{temp_dir}/**/*.so", recursive=True)
|
||||
run([android_env(context.host)["STRIP"], *so_files], log=False)
|
||||
|
||||
dist_dir = subdir(context.host, "dist", create=True)
|
||||
package_path = shutil.make_archive(
|
||||
f"{dist_dir}/python-{version}-{context.host}", "gztar", temp_dir
|
||||
)
|
||||
print(f"Wrote {package_path}")
|
||||
return package_path
|
||||
|
||||
|
||||
def ci(context):
|
||||
for step in [
|
||||
configure_build_python,
|
||||
make_build_python,
|
||||
configure_host_python,
|
||||
make_host_python,
|
||||
package,
|
||||
]:
|
||||
caption = (
|
||||
step.__name__.replace("_", " ")
|
||||
.capitalize()
|
||||
.replace("python", "Python")
|
||||
)
|
||||
print(f"::group::{caption}")
|
||||
result = step(context)
|
||||
if step is package:
|
||||
package_path = result
|
||||
print("::endgroup::")
|
||||
|
||||
if (
|
||||
"GITHUB_ACTIONS" in os.environ
|
||||
and (platform.system(), platform.machine()) != ("Linux", "x86_64")
|
||||
):
|
||||
print(
|
||||
"Skipping tests: GitHub Actions does not support the Android "
|
||||
"emulator on this platform."
|
||||
)
|
||||
else:
|
||||
with TemporaryDirectory(prefix=SCRIPT_NAME) as temp_dir:
|
||||
print("::group::Tests")
|
||||
|
||||
# Prove the package is self-contained by using it to run the tests.
|
||||
shutil.unpack_archive(package_path, temp_dir)
|
||||
launcher_args = [
|
||||
"--managed", "maxVersion", "-v", f"--{context.ci_mode}-ci"
|
||||
]
|
||||
run(
|
||||
["./android.py", "test", *launcher_args],
|
||||
cwd=temp_dir
|
||||
)
|
||||
print("::endgroup::")
|
||||
|
||||
|
||||
def env(context):
|
||||
print_env(android_env(getattr(context, "host", None)))
|
||||
|
||||
|
||||
# Handle SIGTERM the same way as SIGINT. This ensures that if we're terminated
|
||||
# by the buildbot worker, we'll make an attempt to clean up our subprocesses.
|
||||
def install_signal_handler():
|
||||
def signal_handler(*args):
|
||||
os.kill(os.getpid(), signal.SIGINT)
|
||||
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
subcommands = parser.add_subparsers(dest="subcommand")
|
||||
build = subcommands.add_parser("build", help="Build everything")
|
||||
configure_build = subcommands.add_parser("configure-build",
|
||||
help="Run `configure` for the "
|
||||
"build Python")
|
||||
make_build = subcommands.add_parser("make-build",
|
||||
help="Run `make` for the build Python")
|
||||
configure_host = subcommands.add_parser("configure-host",
|
||||
help="Run `configure` for Android")
|
||||
make_host = subcommands.add_parser("make-host",
|
||||
help="Run `make` for Android")
|
||||
clean = subcommands.add_parser("clean", help="Delete files and directories "
|
||||
"created by this script")
|
||||
for subcommand in build, configure_build, configure_host:
|
||||
subcommands = parser.add_subparsers(dest="subcommand", required=True)
|
||||
|
||||
def add_parser(*args, **kwargs):
|
||||
parser = subcommands.add_parser(*args, **kwargs)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", action="count", default=0,
|
||||
help="Show verbose output. Use twice to be even more verbose.")
|
||||
return parser
|
||||
|
||||
# Subcommands
|
||||
build = add_parser(
|
||||
"build", help="Run configure-build, make-build, configure-host and "
|
||||
"make-host")
|
||||
configure_build = add_parser(
|
||||
"configure-build", help="Run `configure` for the build Python")
|
||||
add_parser(
|
||||
"make-build", help="Run `make` for the build Python")
|
||||
configure_host = add_parser(
|
||||
"configure-host", help="Run `configure` for Android")
|
||||
make_host = add_parser(
|
||||
"make-host", help="Run `make` for Android")
|
||||
|
||||
add_parser("clean", help="Delete all build directories")
|
||||
add_parser("build-testbed", help="Build the testbed app")
|
||||
test = add_parser("test", help="Run the testbed app")
|
||||
package = add_parser("package", help="Make a release package")
|
||||
ci = add_parser("ci", help="Run build, package and test")
|
||||
env = add_parser("env", help="Print environment variables")
|
||||
|
||||
# Common arguments
|
||||
for subcommand in [build, configure_build, configure_host, ci]:
|
||||
subcommand.add_argument(
|
||||
"--clean", action="store_true", default=False, dest="clean",
|
||||
help="Delete any relevant directories before building")
|
||||
for subcommand in build, configure_host, make_host:
|
||||
help="Delete the relevant build directories first")
|
||||
|
||||
host_commands = [build, configure_host, make_host, package, ci]
|
||||
if in_source_tree:
|
||||
host_commands.append(env)
|
||||
for subcommand in host_commands:
|
||||
subcommand.add_argument(
|
||||
"host", metavar="HOST",
|
||||
choices=["aarch64-linux-android", "x86_64-linux-android"],
|
||||
"host", metavar="HOST", choices=HOSTS,
|
||||
help="Host triplet: choices=[%(choices)s]")
|
||||
for subcommand in build, configure_build, configure_host:
|
||||
|
||||
for subcommand in [build, configure_build, configure_host, ci]:
|
||||
subcommand.add_argument("args", nargs="*",
|
||||
help="Extra arguments to pass to `configure`")
|
||||
|
||||
context = parser.parse_args()
|
||||
dispatch = {"configure-build": configure_build_python,
|
||||
"make-build": make_build_python,
|
||||
"configure-host": configure_host_python,
|
||||
"make-host": make_host_python,
|
||||
"build": build_all,
|
||||
"clean": clean_all}
|
||||
dispatch[context.subcommand](context)
|
||||
# Test arguments
|
||||
device_group = test.add_mutually_exclusive_group(required=True)
|
||||
device_group.add_argument(
|
||||
"--connected", metavar="SERIAL", help="Run on a connected device. "
|
||||
"Connect it yourself, then get its serial from `adb devices`.")
|
||||
device_group.add_argument(
|
||||
"--managed", metavar="NAME", help="Run on a Gradle-managed device. "
|
||||
"These are defined in `managedDevices` in testbed/app/build.gradle.kts.")
|
||||
|
||||
test.add_argument(
|
||||
"--site-packages", metavar="DIR", type=abspath,
|
||||
help="Directory to copy as the app's site-packages.")
|
||||
test.add_argument(
|
||||
"--cwd", metavar="DIR", type=abspath,
|
||||
help="Directory to copy as the app's working directory.")
|
||||
test.add_argument(
|
||||
"args", nargs="*", help=f"Python command-line arguments. "
|
||||
f"Separate them from {SCRIPT_NAME}'s own arguments with `--`. "
|
||||
f"If neither -c nor -m are included, `-m test` will be prepended, "
|
||||
f"which will run Python's own test suite.")
|
||||
|
||||
# Package arguments.
|
||||
for subcommand in [package, ci]:
|
||||
subcommand.add_argument(
|
||||
"-g", action="store_true", default=False, dest="debug",
|
||||
help="Include debug information in package")
|
||||
|
||||
# CI arguments
|
||||
for subcommand in [test, ci]:
|
||||
group = subcommand.add_mutually_exclusive_group(required=subcommand is ci)
|
||||
group.add_argument(
|
||||
"--fast-ci", action="store_const", dest="ci_mode", const="fast",
|
||||
help="Add test arguments for GitHub Actions")
|
||||
group.add_argument(
|
||||
"--slow-ci", action="store_const", dest="ci_mode", const="slow",
|
||||
help="Add test arguments for buildbots")
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
install_signal_handler()
|
||||
|
||||
# Under the buildbot, stdout is not a TTY, but we must still flush after
|
||||
# every line to make sure our output appears in the correct order relative
|
||||
# to the output of our subprocesses.
|
||||
for stream in [sys.stdout, sys.stderr]:
|
||||
stream.reconfigure(line_buffering=True)
|
||||
|
||||
context = parse_args()
|
||||
dispatch = {
|
||||
"configure-build": configure_build_python,
|
||||
"make-build": make_build_python,
|
||||
"configure-host": configure_host_python,
|
||||
"make-host": make_host_python,
|
||||
"build": build_all,
|
||||
"clean": clean_all,
|
||||
"build-testbed": build_testbed,
|
||||
"test": run_testbed,
|
||||
"package": package,
|
||||
"ci": ci,
|
||||
"env": env,
|
||||
}
|
||||
|
||||
try:
|
||||
result = dispatch[context.subcommand](context)
|
||||
if asyncio.iscoroutine(result):
|
||||
asyncio.run(result)
|
||||
except CalledProcessError as e:
|
||||
print_called_process_error(e)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def print_called_process_error(e):
|
||||
for stream_name in ["stdout", "stderr"]:
|
||||
content = getattr(e, stream_name)
|
||||
if isinstance(content, bytes):
|
||||
content = content.decode(*DECODE_ARGS)
|
||||
stream = getattr(sys, stream_name)
|
||||
if content:
|
||||
stream.write(content)
|
||||
if not content.endswith("\n"):
|
||||
stream.write("\n")
|
||||
|
||||
# shlex uses single quotes, so we surround the command with double quotes.
|
||||
print(
|
||||
f'Command "{join_command(e.cmd)}" returned exit status {e.returncode}'
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
22
Android/testbed/.gitignore
vendored
Normal file
22
Android/testbed/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# The Gradle wrapper can be downloaded by running the `test` or `build-testbed`
|
||||
# commands of android.py.
|
||||
/gradlew
|
||||
/gradlew.bat
|
||||
/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
# The repository's top-level .gitignore file ignores all .idea directories, but
|
||||
# we want to keep any files which can't be regenerated from the Gradle
|
||||
# configuration.
|
||||
!.idea/
|
||||
/.idea/*
|
||||
!/.idea/inspectionProfiles
|
||||
|
||||
*.iml
|
||||
.gradle
|
||||
/local.properties
|
||||
.DS_Store
|
||||
/build
|
||||
/captures
|
||||
.externalNativeBuild
|
||||
.cxx
|
||||
local.properties
|
||||
8
Android/testbed/.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
8
Android/testbed/.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="AndroidLintGradleDependency" enabled="true" level="WEAK WARNING" enabled_by_default="true" editorAttributes="INFO_ATTRIBUTES" />
|
||||
<inspection_tool class="AndroidLintOldTargetApi" enabled="true" level="WEAK WARNING" enabled_by_default="true" editorAttributes="INFO_ATTRIBUTES" />
|
||||
<inspection_tool class="UnstableApiUsage" enabled="true" level="WEAK WARNING" enabled_by_default="true" editorAttributes="INFO_ATTRIBUTES" />
|
||||
</profile>
|
||||
</component>
|
||||
1
Android/testbed/app/.gitignore
vendored
Normal file
1
Android/testbed/app/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
/build
|
||||
269
Android/testbed/app/build.gradle.kts
Normal file
269
Android/testbed/app/build.gradle.kts
Normal file
|
|
@ -0,0 +1,269 @@
|
|||
import com.android.build.api.variant.*
|
||||
import kotlin.math.max
|
||||
|
||||
plugins {
|
||||
id("com.android.application")
|
||||
id("org.jetbrains.kotlin.android")
|
||||
}
|
||||
|
||||
val ANDROID_DIR = file("../..")
|
||||
val PYTHON_DIR = ANDROID_DIR.parentFile!!
|
||||
val PYTHON_CROSS_DIR = file("$PYTHON_DIR/cross-build")
|
||||
val inSourceTree = (
|
||||
ANDROID_DIR.name == "Android" && file("$PYTHON_DIR/pyconfig.h.in").exists()
|
||||
)
|
||||
|
||||
val KNOWN_ABIS = mapOf(
|
||||
"aarch64-linux-android" to "arm64-v8a",
|
||||
"x86_64-linux-android" to "x86_64",
|
||||
)
|
||||
|
||||
// Discover prefixes.
|
||||
val prefixes = ArrayList<File>()
|
||||
if (inSourceTree) {
|
||||
for ((triplet, _) in KNOWN_ABIS.entries) {
|
||||
val prefix = file("$PYTHON_CROSS_DIR/$triplet/prefix")
|
||||
if (prefix.exists()) {
|
||||
prefixes.add(prefix)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Testbed is inside a release package.
|
||||
val prefix = file("$ANDROID_DIR/prefix")
|
||||
if (prefix.exists()) {
|
||||
prefixes.add(prefix)
|
||||
}
|
||||
}
|
||||
if (prefixes.isEmpty()) {
|
||||
throw GradleException(
|
||||
"No Android prefixes found: see README.md for testing instructions"
|
||||
)
|
||||
}
|
||||
|
||||
// Detect Python versions and ABIs.
|
||||
lateinit var pythonVersion: String
|
||||
var abis = HashMap<File, String>()
|
||||
for ((i, prefix) in prefixes.withIndex()) {
|
||||
val libDir = file("$prefix/lib")
|
||||
val version = run {
|
||||
for (filename in libDir.list()!!) {
|
||||
"""python(\d+\.\d+[a-z]*)""".toRegex().matchEntire(filename)?.let {
|
||||
return@run it.groupValues[1]
|
||||
}
|
||||
}
|
||||
throw GradleException("Failed to find Python version in $libDir")
|
||||
}
|
||||
if (i == 0) {
|
||||
pythonVersion = version
|
||||
} else if (pythonVersion != version) {
|
||||
throw GradleException(
|
||||
"${prefixes[0]} is Python $pythonVersion, but $prefix is Python $version"
|
||||
)
|
||||
}
|
||||
|
||||
val libPythonDir = file("$libDir/python$pythonVersion")
|
||||
val triplet = run {
|
||||
for (filename in libPythonDir.list()!!) {
|
||||
"""_sysconfigdata_[a-z]*_android_(.+).py""".toRegex()
|
||||
.matchEntire(filename)?.let {
|
||||
return@run it.groupValues[1]
|
||||
}
|
||||
}
|
||||
throw GradleException("Failed to find Python triplet in $libPythonDir")
|
||||
}
|
||||
abis[prefix] = KNOWN_ABIS[triplet]!!
|
||||
}
|
||||
|
||||
|
||||
android {
|
||||
val androidEnvFile = file("../../android-env.sh").absoluteFile
|
||||
|
||||
namespace = "org.python.testbed"
|
||||
compileSdk = 34
|
||||
|
||||
defaultConfig {
|
||||
applicationId = "org.python.testbed"
|
||||
|
||||
minSdk = androidEnvFile.useLines {
|
||||
for (line in it) {
|
||||
"""ANDROID_API_LEVEL:=(\d+)""".toRegex().find(line)?.let {
|
||||
return@useLines it.groupValues[1].toInt()
|
||||
}
|
||||
}
|
||||
throw GradleException("Failed to find API level in $androidEnvFile")
|
||||
}
|
||||
targetSdk = 34
|
||||
|
||||
versionCode = 1
|
||||
versionName = "1.0"
|
||||
|
||||
ndk.abiFilters.addAll(abis.values)
|
||||
externalNativeBuild.cmake.arguments(
|
||||
"-DPYTHON_PREFIX_DIR=" + if (inSourceTree) {
|
||||
// AGP uses the ${} syntax for its own purposes, so use a Jinja style
|
||||
// placeholder.
|
||||
"$PYTHON_CROSS_DIR/{{triplet}}/prefix"
|
||||
} else {
|
||||
prefixes[0]
|
||||
},
|
||||
"-DPYTHON_VERSION=$pythonVersion",
|
||||
"-DANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES=ON",
|
||||
)
|
||||
|
||||
testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner"
|
||||
}
|
||||
|
||||
ndkVersion = androidEnvFile.useLines {
|
||||
for (line in it) {
|
||||
"""ndk_version=(\S+)""".toRegex().find(line)?.let {
|
||||
return@useLines it.groupValues[1]
|
||||
}
|
||||
}
|
||||
throw GradleException("Failed to find NDK version in $androidEnvFile")
|
||||
}
|
||||
externalNativeBuild.cmake {
|
||||
path("src/main/c/CMakeLists.txt")
|
||||
}
|
||||
|
||||
// Set this property to something non-empty, otherwise it'll use the default
|
||||
// list, which ignores asset directories beginning with an underscore.
|
||||
aaptOptions.ignoreAssetsPattern = ".git"
|
||||
|
||||
compileOptions {
|
||||
sourceCompatibility = JavaVersion.VERSION_1_8
|
||||
targetCompatibility = JavaVersion.VERSION_1_8
|
||||
}
|
||||
kotlinOptions {
|
||||
jvmTarget = "1.8"
|
||||
}
|
||||
|
||||
testOptions {
|
||||
managedDevices {
|
||||
localDevices {
|
||||
create("minVersion") {
|
||||
device = "Small Phone"
|
||||
|
||||
// Managed devices have a minimum API level of 27.
|
||||
apiLevel = max(27, defaultConfig.minSdk!!)
|
||||
|
||||
// ATD devices are smaller and faster, but have a minimum
|
||||
// API level of 30.
|
||||
systemImageSource = if (apiLevel >= 30) "aosp-atd" else "aosp"
|
||||
}
|
||||
|
||||
create("maxVersion") {
|
||||
device = "Small Phone"
|
||||
apiLevel = defaultConfig.targetSdk!!
|
||||
systemImageSource = "aosp-atd"
|
||||
}
|
||||
}
|
||||
|
||||
// If the previous test run succeeded and nothing has changed,
|
||||
// Gradle thinks there's no need to run it again. Override that.
|
||||
afterEvaluate {
|
||||
(localDevices.names + listOf("connected")).forEach {
|
||||
tasks.named("${it}DebugAndroidTest") {
|
||||
outputs.upToDateWhen { false }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation("androidx.appcompat:appcompat:1.6.1")
|
||||
implementation("com.google.android.material:material:1.11.0")
|
||||
implementation("androidx.constraintlayout:constraintlayout:2.1.4")
|
||||
androidTestImplementation("androidx.test.ext:junit:1.1.5")
|
||||
androidTestImplementation("androidx.test:rules:1.5.0")
|
||||
}
|
||||
|
||||
|
||||
// Create some custom tasks to copy Python and its standard library from
|
||||
// elsewhere in the repository.
|
||||
androidComponents.onVariants { variant ->
|
||||
val pyPlusVer = "python$pythonVersion"
|
||||
generateTask(variant, variant.sources.assets!!) {
|
||||
into("python") {
|
||||
// Include files such as pyconfig.h are used by some of the tests.
|
||||
into("include/$pyPlusVer") {
|
||||
for (prefix in prefixes) {
|
||||
from("$prefix/include/$pyPlusVer")
|
||||
}
|
||||
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
|
||||
}
|
||||
|
||||
into("lib/$pyPlusVer") {
|
||||
// To aid debugging, the source directory takes priority when
|
||||
// running inside a CPython source tree.
|
||||
if (inSourceTree) {
|
||||
from("$PYTHON_DIR/Lib")
|
||||
}
|
||||
for (prefix in prefixes) {
|
||||
from("$prefix/lib/$pyPlusVer")
|
||||
}
|
||||
|
||||
into("site-packages") {
|
||||
from("$projectDir/src/main/python")
|
||||
|
||||
val sitePackages = findProperty("python.sitePackages") as String?
|
||||
if (!sitePackages.isNullOrEmpty()) {
|
||||
if (!file(sitePackages).exists()) {
|
||||
throw GradleException("$sitePackages does not exist")
|
||||
}
|
||||
from(sitePackages)
|
||||
}
|
||||
}
|
||||
|
||||
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
|
||||
exclude("**/__pycache__")
|
||||
}
|
||||
|
||||
into("cwd") {
|
||||
val cwd = findProperty("python.cwd") as String?
|
||||
if (!cwd.isNullOrEmpty()) {
|
||||
if (!file(cwd).exists()) {
|
||||
throw GradleException("$cwd does not exist")
|
||||
}
|
||||
from(cwd)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
generateTask(variant, variant.sources.jniLibs!!) {
|
||||
for ((prefix, abi) in abis.entries) {
|
||||
into(abi) {
|
||||
from("$prefix/lib")
|
||||
include("libpython*.*.so")
|
||||
include("lib*_python.so")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fun generateTask(
|
||||
variant: ApplicationVariant, directories: SourceDirectories,
|
||||
configure: GenerateTask.() -> Unit
|
||||
) {
|
||||
val taskName = "generate" +
|
||||
listOf(variant.name, "Python", directories.name)
|
||||
.map { it.replaceFirstChar(Char::uppercase) }
|
||||
.joinToString("")
|
||||
|
||||
directories.addGeneratedSourceDirectory(
|
||||
tasks.register<GenerateTask>(taskName) {
|
||||
into(outputDir)
|
||||
configure()
|
||||
},
|
||||
GenerateTask::outputDir)
|
||||
}
|
||||
|
||||
|
||||
// addGeneratedSourceDirectory requires the task to have a DirectoryProperty.
|
||||
abstract class GenerateTask: Sync() {
|
||||
@get:OutputDirectory
|
||||
abstract val outputDir: DirectoryProperty
|
||||
}
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
package org.python.testbed
|
||||
|
||||
import androidx.test.annotation.UiThreadTest
|
||||
import androidx.test.platform.app.InstrumentationRegistry
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4
|
||||
|
||||
import org.junit.Test
|
||||
import org.junit.runner.RunWith
|
||||
|
||||
import org.junit.Assert.*
|
||||
|
||||
|
||||
@RunWith(AndroidJUnit4::class)
|
||||
class PythonSuite {
|
||||
@Test
|
||||
@UiThreadTest
|
||||
fun testPython() {
|
||||
val start = System.currentTimeMillis()
|
||||
try {
|
||||
val status = PythonTestRunner(
|
||||
InstrumentationRegistry.getInstrumentation().targetContext
|
||||
).run(
|
||||
InstrumentationRegistry.getArguments().getString("pythonArgs")!!,
|
||||
)
|
||||
assertEquals(0, status)
|
||||
} finally {
|
||||
// Make sure the process lives long enough for the test script to
|
||||
// detect it (see `find_pid` in android.py).
|
||||
val delay = 2000 - (System.currentTimeMillis() - start)
|
||||
if (delay > 0) {
|
||||
Thread.sleep(delay)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
20
Android/testbed/app/src/main/AndroidManifest.xml
Normal file
20
Android/testbed/app/src/main/AndroidManifest.xml
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
|
||||
|
||||
<uses-permission android:name="android.permission.INTERNET"/>
|
||||
|
||||
<application
|
||||
android:icon="@drawable/ic_launcher"
|
||||
android:label="@string/app_name"
|
||||
android:theme="@style/Theme.Material3.Light.NoActionBar">
|
||||
<activity
|
||||
android:name=".MainActivity"
|
||||
android:exported="true">
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.MAIN" />
|
||||
<category android:name="android.intent.category.LAUNCHER" />
|
||||
</intent-filter>
|
||||
</activity>
|
||||
</application>
|
||||
|
||||
</manifest>
|
||||
14
Android/testbed/app/src/main/c/CMakeLists.txt
Normal file
14
Android/testbed/app/src/main/c/CMakeLists.txt
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
cmake_minimum_required(VERSION 3.4.1)
|
||||
project(testbed)
|
||||
|
||||
# Resolve variables from the command line.
|
||||
string(
|
||||
REPLACE {{triplet}} ${CMAKE_LIBRARY_ARCHITECTURE}
|
||||
PYTHON_PREFIX_DIR ${PYTHON_PREFIX_DIR}
|
||||
)
|
||||
|
||||
include_directories(${PYTHON_PREFIX_DIR}/include/python${PYTHON_VERSION})
|
||||
link_directories(${PYTHON_PREFIX_DIR}/lib)
|
||||
link_libraries(log python${PYTHON_VERSION})
|
||||
|
||||
add_library(main_activity SHARED main_activity.c)
|
||||
202
Android/testbed/app/src/main/c/main_activity.c
Normal file
202
Android/testbed/app/src/main/c/main_activity.c
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
#include <android/log.h>
|
||||
#include <errno.h>
|
||||
#include <jni.h>
|
||||
#include <pthread.h>
|
||||
#include <Python.h>
|
||||
#include <signal.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <unistd.h>
|
||||
|
||||
|
||||
static void throw_runtime_exception(JNIEnv *env, const char *message) {
|
||||
(*env)->ThrowNew(
|
||||
env,
|
||||
(*env)->FindClass(env, "java/lang/RuntimeException"),
|
||||
message);
|
||||
}
|
||||
|
||||
static void throw_errno(JNIEnv *env, const char *error_prefix) {
|
||||
char error_message[1024];
|
||||
snprintf(error_message, sizeof(error_message),
|
||||
"%s: %s", error_prefix, strerror(errno));
|
||||
throw_runtime_exception(env, error_message);
|
||||
}
|
||||
|
||||
|
||||
// --- Stdio redirection ------------------------------------------------------
|
||||
|
||||
// Most apps won't need this, because the Python-level sys.stdout and sys.stderr
|
||||
// are redirected to the Android logcat by Python itself. However, in the
|
||||
// testbed it's useful to redirect the native streams as well, to debug problems
|
||||
// in the Python startup or redirection process.
|
||||
//
|
||||
// Based on
|
||||
// https://github.com/beeware/briefcase-android-gradle-template/blob/v0.3.11/%7B%7B%20cookiecutter.safe_formal_name%20%7D%7D/app/src/main/cpp/native-lib.cpp
|
||||
|
||||
typedef struct {
|
||||
FILE *file;
|
||||
int fd;
|
||||
android_LogPriority priority;
|
||||
char *tag;
|
||||
int pipe[2];
|
||||
} StreamInfo;
|
||||
|
||||
// The FILE member can't be initialized here because stdout and stderr are not
|
||||
// compile-time constants. Instead, it's initialized immediately before the
|
||||
// redirection.
|
||||
static StreamInfo STREAMS[] = {
|
||||
{NULL, STDOUT_FILENO, ANDROID_LOG_INFO, "native.stdout", {-1, -1}},
|
||||
{NULL, STDERR_FILENO, ANDROID_LOG_WARN, "native.stderr", {-1, -1}},
|
||||
{NULL, -1, ANDROID_LOG_UNKNOWN, NULL, {-1, -1}},
|
||||
};
|
||||
|
||||
// The maximum length of a log message in bytes, including the level marker and
|
||||
// tag, is defined as LOGGER_ENTRY_MAX_PAYLOAD in
|
||||
// platform/system/logging/liblog/include/log/log.h. As of API level 30, messages
|
||||
// longer than this will be be truncated by logcat. This limit has already been
|
||||
// reduced at least once in the history of Android (from 4076 to 4068 between API
|
||||
// level 23 and 26), so leave some headroom.
|
||||
static const int MAX_BYTES_PER_WRITE = 4000;
|
||||
|
||||
static void *redirection_thread(void *arg) {
|
||||
StreamInfo *si = (StreamInfo*)arg;
|
||||
ssize_t read_size;
|
||||
char buf[MAX_BYTES_PER_WRITE];
|
||||
while ((read_size = read(si->pipe[0], buf, sizeof buf - 1)) > 0) {
|
||||
buf[read_size] = '\0'; /* add null-terminator */
|
||||
__android_log_write(si->priority, si->tag, buf);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static char *redirect_stream(StreamInfo *si) {
|
||||
/* make the FILE unbuffered, to ensure messages are never lost */
|
||||
if (setvbuf(si->file, 0, _IONBF, 0)) {
|
||||
return "setvbuf";
|
||||
}
|
||||
|
||||
/* create the pipe and redirect the file descriptor */
|
||||
if (pipe(si->pipe)) {
|
||||
return "pipe";
|
||||
}
|
||||
if (dup2(si->pipe[1], si->fd) == -1) {
|
||||
return "dup2";
|
||||
}
|
||||
|
||||
/* start the logging thread */
|
||||
pthread_t thr;
|
||||
if ((errno = pthread_create(&thr, 0, redirection_thread, si))) {
|
||||
return "pthread_create";
|
||||
}
|
||||
if ((errno = pthread_detach(thr))) {
|
||||
return "pthread_detach";
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL Java_org_python_testbed_PythonTestRunner_redirectStdioToLogcat(
|
||||
JNIEnv *env, jobject obj
|
||||
) {
|
||||
STREAMS[0].file = stdout;
|
||||
STREAMS[1].file = stderr;
|
||||
for (StreamInfo *si = STREAMS; si->file; si++) {
|
||||
char *error_prefix;
|
||||
if ((error_prefix = redirect_stream(si))) {
|
||||
throw_errno(env, error_prefix);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// --- Python initialization ---------------------------------------------------
|
||||
|
||||
static char *init_signals() {
|
||||
// Some tests use SIGUSR1, but that's blocked by default in an Android app in
|
||||
// order to make it available to `sigwait` in the Signal Catcher thread.
|
||||
// (https://cs.android.com/android/platform/superproject/+/android14-qpr3-release:art/runtime/signal_catcher.cc).
|
||||
// That thread's functionality is only useful for debugging the JVM, so disabling
|
||||
// it should not weaken the tests.
|
||||
//
|
||||
// There's no safe way of stopping the thread completely (#123982), but simply
|
||||
// unblocking SIGUSR1 is enough to fix most tests.
|
||||
//
|
||||
// However, in tests that generate multiple different signals in quick
|
||||
// succession, it's possible for SIGUSR1 to arrive while the main thread is busy
|
||||
// running the C-level handler for a different signal. In that case, the SIGUSR1
|
||||
// may be sent to the Signal Catcher thread instead, which will generate a log
|
||||
// message containing the text "reacting to signal".
|
||||
//
|
||||
// Such tests may need to be changed in one of the following ways:
|
||||
// * Use a signal other than SIGUSR1 (e.g. test_stress_delivery_simultaneous in
|
||||
// test_signal.py).
|
||||
// * Send the signal to a specific thread rather than the whole process (e.g.
|
||||
// test_signals in test_threadsignals.py.
|
||||
sigset_t set;
|
||||
if (sigemptyset(&set)) {
|
||||
return "sigemptyset";
|
||||
}
|
||||
if (sigaddset(&set, SIGUSR1)) {
|
||||
return "sigaddset";
|
||||
}
|
||||
if ((errno = pthread_sigmask(SIG_UNBLOCK, &set, NULL))) {
|
||||
return "pthread_sigmask";
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static void throw_status(JNIEnv *env, PyStatus status) {
|
||||
throw_runtime_exception(env, status.err_msg ? status.err_msg : "");
|
||||
}
|
||||
|
||||
JNIEXPORT int JNICALL Java_org_python_testbed_PythonTestRunner_runPython(
|
||||
JNIEnv *env, jobject obj, jstring home, jarray args
|
||||
) {
|
||||
const char *home_utf8 = (*env)->GetStringUTFChars(env, home, NULL);
|
||||
char cwd[PATH_MAX];
|
||||
snprintf(cwd, sizeof(cwd), "%s/%s", home_utf8, "cwd");
|
||||
if (chdir(cwd)) {
|
||||
throw_errno(env, "chdir");
|
||||
return 1;
|
||||
}
|
||||
|
||||
char *error_prefix;
|
||||
if ((error_prefix = init_signals())) {
|
||||
throw_errno(env, error_prefix);
|
||||
return 1;
|
||||
}
|
||||
|
||||
PyConfig config;
|
||||
PyStatus status;
|
||||
PyConfig_InitPythonConfig(&config);
|
||||
|
||||
jsize argc = (*env)->GetArrayLength(env, args);
|
||||
const char *argv[argc + 1];
|
||||
for (int i = 0; i < argc; i++) {
|
||||
jobject arg = (*env)->GetObjectArrayElement(env, args, i);
|
||||
argv[i] = (*env)->GetStringUTFChars(env, arg, NULL);
|
||||
}
|
||||
argv[argc] = NULL;
|
||||
|
||||
// PyConfig_SetBytesArgv "must be called before other methods, since the
|
||||
// preinitialization configuration depends on command line arguments"
|
||||
if (PyStatus_Exception(status = PyConfig_SetBytesArgv(&config, argc, (char**)argv))) {
|
||||
throw_status(env, status);
|
||||
return 1;
|
||||
}
|
||||
|
||||
status = PyConfig_SetBytesString(&config, &config.home, home_utf8);
|
||||
if (PyStatus_Exception(status)) {
|
||||
throw_status(env, status);
|
||||
return 1;
|
||||
}
|
||||
|
||||
status = Py_InitializeFromConfig(&config);
|
||||
if (PyStatus_Exception(status)) {
|
||||
throw_status(env, status);
|
||||
return 1;
|
||||
}
|
||||
|
||||
return Py_RunMain();
|
||||
}
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
package org.python.testbed
|
||||
|
||||
import android.content.Context
|
||||
import android.os.*
|
||||
import android.system.Os
|
||||
import android.widget.TextView
|
||||
import androidx.appcompat.app.*
|
||||
import org.json.JSONArray
|
||||
import java.io.*
|
||||
|
||||
|
||||
// Launching the tests from an activity is OK for a quick check, but for
|
||||
// anything more complicated it'll be more convenient to use `android.py test`
|
||||
// to launch the tests via PythonSuite.
|
||||
class MainActivity : AppCompatActivity() {
|
||||
override fun onCreate(savedInstanceState: Bundle?) {
|
||||
super.onCreate(savedInstanceState)
|
||||
setContentView(R.layout.activity_main)
|
||||
val status = PythonTestRunner(this).run("""["-m", "test", "-W", "-uall"]""")
|
||||
findViewById<TextView>(R.id.tvHello).text = "Exit status $status"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class PythonTestRunner(val context: Context) {
|
||||
/** Run Python.
|
||||
*
|
||||
* @param args Python command-line, encoded as JSON.
|
||||
* @return The Python exit status: zero on success, nonzero on failure. */
|
||||
fun run(args: String) : Int {
|
||||
// We leave argument 0 as an empty string, which is a placeholder for the
|
||||
// executable name in embedded mode.
|
||||
val argsJsonArray = JSONArray(args)
|
||||
val argsStringArray = Array<String>(argsJsonArray.length() + 1) { it -> ""}
|
||||
for (i in 0..<argsJsonArray.length()) {
|
||||
argsStringArray[i + 1] = argsJsonArray.getString(i)
|
||||
}
|
||||
|
||||
// Python needs this variable to help it find the temporary directory,
|
||||
// but Android only sets it on API level 33 and later.
|
||||
Os.setenv("TMPDIR", context.cacheDir.toString(), false)
|
||||
|
||||
val pythonHome = extractAssets()
|
||||
System.loadLibrary("main_activity")
|
||||
redirectStdioToLogcat()
|
||||
return runPython(pythonHome.toString(), argsStringArray)
|
||||
}
|
||||
|
||||
private fun extractAssets() : File {
|
||||
val pythonHome = File(context.filesDir, "python")
|
||||
if (pythonHome.exists() && !pythonHome.deleteRecursively()) {
|
||||
throw RuntimeException("Failed to delete $pythonHome")
|
||||
}
|
||||
extractAssetDir("python", context.filesDir)
|
||||
|
||||
// Empty directories are lost in the asset packing/unpacking process.
|
||||
val cwd = File(pythonHome, "cwd")
|
||||
if (!cwd.exists()) {
|
||||
cwd.mkdir()
|
||||
}
|
||||
|
||||
return pythonHome
|
||||
}
|
||||
|
||||
private fun extractAssetDir(path: String, targetDir: File) {
|
||||
val names = context.assets.list(path)
|
||||
?: throw RuntimeException("Failed to list $path")
|
||||
val targetSubdir = File(targetDir, path)
|
||||
if (!targetSubdir.mkdirs()) {
|
||||
throw RuntimeException("Failed to create $targetSubdir")
|
||||
}
|
||||
|
||||
for (name in names) {
|
||||
val subPath = "$path/$name"
|
||||
val input: InputStream
|
||||
try {
|
||||
input = context.assets.open(subPath)
|
||||
} catch (e: FileNotFoundException) {
|
||||
extractAssetDir(subPath, targetDir)
|
||||
continue
|
||||
}
|
||||
input.use {
|
||||
File(targetSubdir, name).outputStream().use { output ->
|
||||
input.copyTo(output)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private external fun redirectStdioToLogcat()
|
||||
private external fun runPython(home: String, args: Array<String>) : Int
|
||||
}
|
||||
BIN
Android/testbed/app/src/main/res/drawable-xxhdpi/ic_launcher.png
Normal file
BIN
Android/testbed/app/src/main/res/drawable-xxhdpi/ic_launcher.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3 KiB |
19
Android/testbed/app/src/main/res/layout/activity_main.xml
Normal file
19
Android/testbed/app/src/main/res/layout/activity_main.xml
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
tools:context=".MainActivity">
|
||||
|
||||
<TextView
|
||||
android:id="@+id/tvHello"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Hello World!"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
app:layout_constraintEnd_toEndOf="parent"
|
||||
app:layout_constraintStart_toStartOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent" />
|
||||
|
||||
</androidx.constraintlayout.widget.ConstraintLayout>
|
||||
3
Android/testbed/app/src/main/res/values/strings.xml
Normal file
3
Android/testbed/app/src/main/res/values/strings.xml
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
<resources>
|
||||
<string name="app_name">Python testbed</string>
|
||||
</resources>
|
||||
5
Android/testbed/build.gradle.kts
Normal file
5
Android/testbed/build.gradle.kts
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
// Top-level build file where you can add configuration options common to all sub-projects/modules.
|
||||
plugins {
|
||||
id("com.android.application") version "8.10.0" apply false
|
||||
id("org.jetbrains.kotlin.android") version "1.9.22" apply false
|
||||
}
|
||||
28
Android/testbed/gradle.properties
Normal file
28
Android/testbed/gradle.properties
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
# Project-wide Gradle settings.
|
||||
# IDE (e.g. Android Studio) users:
|
||||
# Gradle settings configured through the IDE *will override*
|
||||
# any settings specified in this file.
|
||||
# For more details on how to configure your build environment visit
|
||||
# http://www.gradle.org/docs/current/userguide/build_environment.html
|
||||
# Specifies the JVM arguments used for the daemon process.
|
||||
# The setting is particularly useful for tweaking memory settings.
|
||||
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
|
||||
# When configured, Gradle will run in incubating parallel mode.
|
||||
# This option should only be used with decoupled projects. More details, visit
|
||||
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
|
||||
# org.gradle.parallel=true
|
||||
# AndroidX package structure to make it clearer which packages are bundled with the
|
||||
# Android operating system, and which are packaged with your app's APK
|
||||
# https://developer.android.com/topic/libraries/support-library/androidx-rn
|
||||
android.useAndroidX=true
|
||||
# Kotlin code style for this project: "official" or "obsolete":
|
||||
kotlin.code.style=official
|
||||
# Enables namespacing of each library's R class so that its R class includes only the
|
||||
# resources declared in the library itself and none from the library's dependencies,
|
||||
# thereby reducing the size of the R class for that library
|
||||
android.nonTransitiveRClass=true
|
||||
|
||||
# By default, the app will be uninstalled after the tests finish (apparently
|
||||
# after 10 seconds in case of an unclean shutdown). We disable this, because
|
||||
# when using android.py it can conflict with the installation of the next run.
|
||||
android.injected.androidTest.leaveApksInstalledAfterRun=true
|
||||
6
Android/testbed/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
6
Android/testbed/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
#Mon Feb 19 20:29:06 GMT 2024
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-bin.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
18
Android/testbed/settings.gradle.kts
Normal file
18
Android/testbed/settings.gradle.kts
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
pluginManagement {
|
||||
repositories {
|
||||
google()
|
||||
mavenCentral()
|
||||
gradlePluginPortal()
|
||||
}
|
||||
}
|
||||
dependencyResolutionManagement {
|
||||
repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
|
||||
repositories {
|
||||
google()
|
||||
mavenCentral()
|
||||
}
|
||||
}
|
||||
|
||||
rootProject.name = "Python testbed"
|
||||
include(":app")
|
||||
|
||||
1015
Apple/__main__.py
Normal file
1015
Apple/__main__.py
Normal file
File diff suppressed because it is too large
Load diff
328
Apple/iOS/README.md
Normal file
328
Apple/iOS/README.md
Normal file
|
|
@ -0,0 +1,328 @@
|
|||
# Python on iOS README
|
||||
|
||||
**iOS support is [tier 3](https://peps.python.org/pep-0011/#tier-3).**
|
||||
|
||||
This document provides a quick overview of some iOS specific features in the
|
||||
Python distribution.
|
||||
|
||||
These instructions are only needed if you're planning to compile Python for iOS
|
||||
yourself. Most users should *not* need to do this. If you're looking to
|
||||
experiment with writing an iOS app in Python, tools such as [BeeWare's
|
||||
Briefcase](https://briefcase.readthedocs.io) and [Kivy's
|
||||
Buildozer](https://buildozer.readthedocs.io) will provide a much more
|
||||
approachable user experience.
|
||||
|
||||
## Compilers for building on iOS
|
||||
|
||||
Building for iOS requires the use of Apple's Xcode tooling. It is strongly
|
||||
recommended that you use the most recent stable release of Xcode. This will
|
||||
require the use of the most (or second-most) recently released macOS version,
|
||||
as Apple does not maintain Xcode for older macOS versions. The Xcode Command
|
||||
Line Tools are not sufficient for iOS development; you need a *full* Xcode
|
||||
install.
|
||||
|
||||
If you want to run your code on the iOS simulator, you'll also need to install
|
||||
an iOS Simulator Platform. You should be prompted to select an iOS Simulator
|
||||
Platform when you first run Xcode. Alternatively, you can add an iOS Simulator
|
||||
Platform by selecting an open the Platforms tab of the Xcode Settings panel.
|
||||
|
||||
## Building Python on iOS
|
||||
|
||||
### ABIs and Architectures
|
||||
|
||||
iOS apps can be deployed on physical devices, and on the iOS simulator. Although
|
||||
the API used on these devices is identical, the ABI is different - you need to
|
||||
link against different libraries for an iOS device build (`iphoneos`) or an
|
||||
iOS simulator build (`iphonesimulator`).
|
||||
|
||||
Apple uses the `XCframework` format to allow specifying a single dependency
|
||||
that supports multiple ABIs. An `XCframework` is a wrapper around multiple
|
||||
ABI-specific frameworks that share a common API.
|
||||
|
||||
iOS can also support different CPU architectures within each ABI. At present,
|
||||
there is only a single supported architecture on physical devices - ARM64.
|
||||
However, the *simulator* supports 2 architectures - ARM64 (for running on Apple
|
||||
Silicon machines), and x86_64 (for running on older Intel-based machines).
|
||||
|
||||
To support multiple CPU architectures on a single platform, Apple uses a "fat
|
||||
binary" format - a single physical file that contains support for multiple
|
||||
architectures. It is possible to compile and use a "thin" single architecture
|
||||
version of a binary for testing purposes; however, the "thin" binary will not be
|
||||
portable to machines using other architectures.
|
||||
|
||||
### Building a multi-architecture iOS XCframework
|
||||
|
||||
The `Apple` subfolder of the Python repository acts as a build script that
|
||||
can be used to coordinate the compilation of a complete iOS XCframework. To use
|
||||
it, run::
|
||||
|
||||
python Apple build iOS
|
||||
|
||||
This will:
|
||||
|
||||
* Configure and compile a version of Python to run on the build machine
|
||||
* Download pre-compiled binary dependencies for each platform
|
||||
* Configure and build a `Python.framework` for each required architecture and
|
||||
iOS SDK
|
||||
* Merge the multiple `Python.framework` folders into a single `Python.xcframework`
|
||||
* Produce a `.tar.gz` archive in the `cross-build/dist` folder containing
|
||||
the `Python.xcframework`, plus a copy of the Testbed app pre-configured to
|
||||
use the XCframework.
|
||||
|
||||
The `Apple` build script has other entry points that will perform the
|
||||
individual parts of the overall `build` target, plus targets to test the
|
||||
build, clean the `cross-build` folder of iOS build products, and perform a
|
||||
complete "build and test" CI run. The `--clean` flag can also be used on
|
||||
individual commands to ensure that a stale build product are removed before
|
||||
building.
|
||||
|
||||
### Building a single-architecture framework
|
||||
|
||||
If you're using the `Apple` build script, you won't need to build
|
||||
individual frameworks. However, if you do need to manually configure an iOS
|
||||
Python build for a single framework, the following options are available.
|
||||
|
||||
#### iOS specific arguments to configure
|
||||
|
||||
* `--enable-framework[=DIR]`
|
||||
|
||||
This argument specifies the location where the Python.framework will be
|
||||
installed. If `DIR` is not specified, the framework will be installed into
|
||||
a subdirectory of the `iOS/Frameworks` folder.
|
||||
|
||||
This argument *must* be provided when configuring iOS builds. iOS does not
|
||||
support non-framework builds.
|
||||
|
||||
* `--with-framework-name=NAME`
|
||||
|
||||
Specify the name for the Python framework; defaults to `Python`.
|
||||
|
||||
> [!NOTE]
|
||||
> Unless you know what you're doing, changing the name of the Python
|
||||
> framework on iOS is not advised. If you use this option, you won't be able
|
||||
> to run the `Apple` build script without making significant manual
|
||||
> alterations, and you won't be able to use any binary packages unless you
|
||||
> compile them yourself using your own framework name.
|
||||
|
||||
#### Building Python for iOS
|
||||
|
||||
The Python build system will create a `Python.framework` that supports a
|
||||
*single* ABI with a *single* architecture. Unlike macOS, iOS does not allow a
|
||||
framework to contain non-library content, so the iOS build will produce a
|
||||
`bin` and `lib` folder in the same output folder as `Python.framework`.
|
||||
The `lib` folder will be needed at runtime to support the Python library.
|
||||
|
||||
If you want to use Python in a real iOS project, you need to produce multiple
|
||||
`Python.framework` builds, one for each ABI and architecture. iOS builds of
|
||||
Python *must* be constructed as framework builds. To support this, you must
|
||||
provide the `--enable-framework` flag when configuring the build. The build
|
||||
also requires the use of cross-compilation. The minimal commands for building
|
||||
Python for the ARM64 iOS simulator will look something like:
|
||||
```
|
||||
export PATH="$(pwd)/Apple/iOS/Resources/bin:/usr/bin:/bin:/usr/sbin:/sbin:/Library/Apple/usr/bin"
|
||||
./configure \
|
||||
--enable-framework \
|
||||
--host=arm64-apple-ios-simulator \
|
||||
--build=arm64-apple-darwin \
|
||||
--with-build-python=/path/to/python.exe
|
||||
make
|
||||
make install
|
||||
```
|
||||
|
||||
In this invocation:
|
||||
|
||||
* `Apple/iOS/Resources/bin` has been added to the path, providing some shims for the
|
||||
compilers and linkers needed by the build. Xcode requires the use of `xcrun`
|
||||
to invoke compiler tooling. However, if `xcrun` is pre-evaluated and the
|
||||
result passed to `configure`, these results can embed user- and
|
||||
version-specific paths into the sysconfig data, which limits the portability
|
||||
of the compiled Python. Alternatively, if `xcrun` is used *as* the compiler,
|
||||
it requires that compiler variables like `CC` include spaces, which can
|
||||
cause significant problems with many C configuration systems which assume that
|
||||
`CC` will be a single executable.
|
||||
|
||||
To work around this problem, the `Apple/iOS/Resources/bin` folder contains some
|
||||
wrapper scripts that present as simple compilers and linkers, but wrap
|
||||
underlying calls to `xcrun`. This allows configure to use a `CC`
|
||||
definition without spaces, and without user- or version-specific paths, while
|
||||
retaining the ability to adapt to the local Xcode install. These scripts are
|
||||
included in the `bin` directory of an iOS install.
|
||||
|
||||
These scripts will, by default, use the currently active Xcode installation.
|
||||
If you want to use a different Xcode installation, you can use
|
||||
`xcode-select` to set a new default Xcode globally, or you can use the
|
||||
`DEVELOPER_DIR` environment variable to specify an Xcode install. The
|
||||
scripts will use the default `iphoneos`/`iphonesimulator` SDK version for
|
||||
the select Xcode install; if you want to use a different SDK, you can set the
|
||||
`IOS_SDK_VERSION` environment variable. (e.g, setting
|
||||
`IOS_SDK_VERSION=17.1` would cause the scripts to use the `iphoneos17.1`
|
||||
and `iphonesimulator17.1` SDKs, regardless of the Xcode default.)
|
||||
|
||||
The path has also been cleared of any user customizations. A common source of
|
||||
bugs is for tools like Homebrew to accidentally leak macOS binaries into an iOS
|
||||
build. Resetting the path to a known "bare bones" value is the easiest way to
|
||||
avoid these problems.
|
||||
|
||||
* `--host` is the architecture and ABI that you want to build, in GNU compiler
|
||||
triple format. This will be one of:
|
||||
|
||||
- `arm64-apple-ios` for ARM64 iOS devices.
|
||||
- `arm64-apple-ios-simulator` for the iOS simulator running on Apple
|
||||
Silicon devices.
|
||||
- `x86_64-apple-ios-simulator` for the iOS simulator running on Intel
|
||||
devices.
|
||||
|
||||
* `--build` is the GNU compiler triple for the machine that will be running
|
||||
the compiler. This is one of:
|
||||
|
||||
- `arm64-apple-darwin` for Apple Silicon devices.
|
||||
- `x86_64-apple-darwin` for Intel devices.
|
||||
|
||||
* `/path/to/python.exe` is the path to a Python binary on the machine that
|
||||
will be running the compiler. This is needed because the Python compilation
|
||||
process involves running some Python code. On a normal desktop build of
|
||||
Python, you can compile a python interpreter and then use that interpreter to
|
||||
run Python code. However, the binaries produced for iOS won't run on macOS, so
|
||||
you need to provide an external Python interpreter. This interpreter must be
|
||||
the same version as the Python that is being compiled. To be completely safe,
|
||||
this should be the *exact* same commit hash. However, the longer a Python
|
||||
release has been stable, the more likely it is that this constraint can be
|
||||
relaxed - the same micro version will often be sufficient.
|
||||
|
||||
* The `install` target for iOS builds is slightly different to other
|
||||
platforms. On most platforms, `make install` will install the build into
|
||||
the final runtime location. This won't be the case for iOS, as the final
|
||||
runtime location will be on a physical device.
|
||||
|
||||
However, you still need to run the `install` target for iOS builds, as it
|
||||
performs some final framework assembly steps. The location specified with
|
||||
`--enable-framework` will be the location where `make install` will
|
||||
assemble the complete iOS framework. This completed framework can then
|
||||
be copied and relocated as required.
|
||||
|
||||
For a full CPython build, you also need to specify the paths to iOS builds of
|
||||
the binary libraries that CPython depends on (such as XZ, LibFFI and OpenSSL).
|
||||
This can be done by defining library specific environment variables (such as
|
||||
`LIBLZMA_CFLAGS`, `LIBLZMA_LIBS`), and the `--with-openssl` configure
|
||||
option. Versions of these libraries pre-compiled for iOS can be found in [this
|
||||
repository](https://github.com/beeware/cpython-apple-source-deps/releases).
|
||||
LibFFI is especially important, as many parts of the standard library
|
||||
(including the `platform`, `sysconfig` and `webbrowser` modules) require
|
||||
the use of the `ctypes` module at runtime.
|
||||
|
||||
By default, Python will be compiled with an iOS deployment target (i.e., the
|
||||
minimum supported iOS version) of 13.0. To specify a different deployment
|
||||
target, provide the version number as part of the `--host` argument - for
|
||||
example, `--host=arm64-apple-ios15.4-simulator` would compile an ARM64
|
||||
simulator build with a deployment target of 15.4.
|
||||
|
||||
## Testing Python on iOS
|
||||
|
||||
### Testing a multi-architecture framework
|
||||
|
||||
Once you have a built an XCframework, you can test that framework by running:
|
||||
|
||||
$ python Apple test iOS
|
||||
|
||||
### Testing a single-architecture framework
|
||||
|
||||
The `Apple/testbed` folder that contains an Xcode project that is able to run
|
||||
the Python test suite on Apple platforms. This project converts the Python test
|
||||
suite into a single test case in Xcode's XCTest framework. The single XCTest
|
||||
passes if the test suite passes.
|
||||
|
||||
To run the test suite, configure a Python build for an iOS simulator (i.e.,
|
||||
`--host=arm64-apple-ios-simulator` or `--host=x86_64-apple-ios-simulator`
|
||||
), specifying a framework build (i.e. `--enable-framework`). Ensure that your
|
||||
`PATH` has been configured to include the `Apple/iOS/Resources/bin` folder and
|
||||
exclude any non-iOS tools, then run:
|
||||
```
|
||||
make all
|
||||
make install
|
||||
make testios
|
||||
```
|
||||
|
||||
This will:
|
||||
|
||||
* Build an iOS framework for your chosen architecture;
|
||||
* Finalize the single-platform framework;
|
||||
* Make a clean copy of the testbed project;
|
||||
* Install the Python iOS framework into the copy of the testbed project; and
|
||||
* Run the test suite on an "entry-level device" simulator (i.e., an iPhone SE,
|
||||
iPhone 16e, or a similar).
|
||||
|
||||
On success, the test suite will exit and report successful completion of the
|
||||
test suite. On a 2022 M1 MacBook Pro, the test suite takes approximately 15
|
||||
minutes to run; a couple of extra minutes is required to compile the testbed
|
||||
project, and then boot and prepare the iOS simulator.
|
||||
|
||||
### Debugging test failures
|
||||
|
||||
Running `python Apple test iOS` generates a standalone version of the
|
||||
`Apple/testbed` project, and runs the full test suite. It does this using
|
||||
`Apple/testbed` itself - the folder is an executable module that can be used
|
||||
to create and run a clone of the testbed project. The standalone version of the
|
||||
testbed will be created in a directory named
|
||||
`cross-build/iOS-testbed.<timestamp>`.
|
||||
|
||||
You can generate your own standalone testbed instance by running:
|
||||
```
|
||||
python cross-build/iOS/testbed clone my-testbed
|
||||
```
|
||||
|
||||
In this invocation, `my-testbed` is the name of the folder for the new
|
||||
testbed clone.
|
||||
|
||||
If you've built your own XCframework, or you only want to test a single architecture,
|
||||
you can construct a standalone testbed instance by running:
|
||||
```
|
||||
python Apple/testbed clone --platform iOS --framework <path/to/framework> my-testbed
|
||||
```
|
||||
|
||||
The framework path can be the path path to a `Python.xcframework`, or the
|
||||
path to a folder that contains a single-platform `Python.framework`.
|
||||
|
||||
You can then use the `my-testbed` folder to run the Python test suite,
|
||||
passing in any command line arguments you may require. For example, if you're
|
||||
trying to diagnose a failure in the `os` module, you might run:
|
||||
```
|
||||
python my-testbed run -- test -W test_os
|
||||
```
|
||||
|
||||
This is the equivalent of running `python -m test -W test_os` on a desktop
|
||||
Python build. Any arguments after the `--` will be passed to testbed as if
|
||||
they were arguments to `python -m` on a desktop machine.
|
||||
|
||||
### Testing in Xcode
|
||||
|
||||
You can also open the testbed project in Xcode by running:
|
||||
```
|
||||
open my-testbed/iOSTestbed.xcodeproj
|
||||
```
|
||||
|
||||
This will allow you to use the full Xcode suite of tools for debugging.
|
||||
|
||||
The arguments used to run the test suite are defined as part of the test plan.
|
||||
To modify the test plan, select the test plan node of the project tree (it
|
||||
should be the first child of the root node), and select the "Configurations"
|
||||
tab. Modify the "Arguments Passed On Launch" value to change the testing
|
||||
arguments.
|
||||
|
||||
The test plan also disables parallel testing, and specifies the use of the
|
||||
`Testbed.lldbinit` file for providing configuration of the debugger. The
|
||||
default debugger configuration disables automatic breakpoints on the
|
||||
`SIGINT`, `SIGUSR1`, `SIGUSR2`, and `SIGXFSZ` signals.
|
||||
|
||||
### Testing on an iOS device
|
||||
|
||||
To test on an iOS device, the app needs to be signed with known developer
|
||||
credentials. To obtain these credentials, you must have an iOS Developer
|
||||
account, and your Xcode install will need to be logged into your account (see
|
||||
the Accounts tab of the Preferences dialog).
|
||||
|
||||
Once the project is open, and you're signed into your Apple Developer account,
|
||||
select the root node of the project tree (labeled "iOSTestbed"), then the
|
||||
"Signing & Capabilities" tab in the details page. Select a development team
|
||||
(this will likely be your own name), and plug in a physical device to your
|
||||
macOS machine with a USB cable. You should then be able to select your physical
|
||||
device from the list of targets in the pulldown in the Xcode titlebar.
|
||||
2
Apple/iOS/Resources/bin/arm64-apple-ios-ar
Executable file
2
Apple/iOS/Resources/bin/arm64-apple-ios-ar
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphoneos${IOS_SDK_VERSION} ar "$@"
|
||||
2
Apple/iOS/Resources/bin/arm64-apple-ios-clang
Executable file
2
Apple/iOS/Resources/bin/arm64-apple-ios-clang
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphoneos${IOS_SDK_VERSION} clang -target arm64-apple-ios${IPHONEOS_DEPLOYMENT_TARGET} "$@"
|
||||
2
Apple/iOS/Resources/bin/arm64-apple-ios-clang++
Executable file
2
Apple/iOS/Resources/bin/arm64-apple-ios-clang++
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphoneos${IOS_SDK_VERSION} clang++ -target arm64-apple-ios${IPHONEOS_DEPLOYMENT_TARGET} "$@"
|
||||
2
Apple/iOS/Resources/bin/arm64-apple-ios-cpp
Executable file
2
Apple/iOS/Resources/bin/arm64-apple-ios-cpp
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphoneos${IOS_SDK_VERSION} clang -target arm64-apple-ios${IPHONEOS_DEPLOYMENT_TARGET} -E "$@"
|
||||
2
Apple/iOS/Resources/bin/arm64-apple-ios-simulator-ar
Executable file
2
Apple/iOS/Resources/bin/arm64-apple-ios-simulator-ar
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphonesimulator${IOS_SDK_VERSION} ar "$@"
|
||||
2
Apple/iOS/Resources/bin/arm64-apple-ios-simulator-clang
Executable file
2
Apple/iOS/Resources/bin/arm64-apple-ios-simulator-clang
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target arm64-apple-ios${IPHONEOS_DEPLOYMENT_TARGET}-simulator "$@"
|
||||
2
Apple/iOS/Resources/bin/arm64-apple-ios-simulator-clang++
Executable file
2
Apple/iOS/Resources/bin/arm64-apple-ios-simulator-clang++
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang++ -target arm64-apple-ios${IPHONEOS_DEPLOYMENT_TARGET}-simulator "$@"
|
||||
2
Apple/iOS/Resources/bin/arm64-apple-ios-simulator-cpp
Executable file
2
Apple/iOS/Resources/bin/arm64-apple-ios-simulator-cpp
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target arm64-apple-ios${IPHONEOS_DEPLOYMENT_TARGET}-simulator -E "$@"
|
||||
2
Apple/iOS/Resources/bin/arm64-apple-ios-simulator-strip
Executable file
2
Apple/iOS/Resources/bin/arm64-apple-ios-simulator-strip
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphonesimulator${IOS_SDK_VERSION} strip -arch arm64 "$@"
|
||||
2
Apple/iOS/Resources/bin/arm64-apple-ios-strip
Executable file
2
Apple/iOS/Resources/bin/arm64-apple-ios-strip
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphoneos${IOS_SDK_VERSION} strip -arch arm64 "$@"
|
||||
2
Apple/iOS/Resources/bin/x86_64-apple-ios-simulator-ar
Executable file
2
Apple/iOS/Resources/bin/x86_64-apple-ios-simulator-ar
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphonesimulator${IOS_SDK_VERSION} ar "$@"
|
||||
2
Apple/iOS/Resources/bin/x86_64-apple-ios-simulator-clang
Executable file
2
Apple/iOS/Resources/bin/x86_64-apple-ios-simulator-clang
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target x86_64-apple-ios${IPHONEOS_DEPLOYMENT_TARGET}-simulator "$@"
|
||||
2
Apple/iOS/Resources/bin/x86_64-apple-ios-simulator-clang++
Executable file
2
Apple/iOS/Resources/bin/x86_64-apple-ios-simulator-clang++
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang++ -target x86_64-apple-ios${IPHONEOS_DEPLOYMENT_TARGET}-simulator "$@"
|
||||
2
Apple/iOS/Resources/bin/x86_64-apple-ios-simulator-cpp
Executable file
2
Apple/iOS/Resources/bin/x86_64-apple-ios-simulator-cpp
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphonesimulator${IOS_SDK_VERSION} clang -target x86_64-apple-ios${IPHONEOS_DEPLOYMENT_TARGET}-simulator -E "$@"
|
||||
2
Apple/iOS/Resources/bin/x86_64-apple-ios-simulator-strip
Executable file
2
Apple/iOS/Resources/bin/x86_64-apple-ios-simulator-strip
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
xcrun --sdk iphonesimulator${IOS_SDK_VERSION} strip -arch x86_64 "$@"
|
||||
|
|
@ -19,7 +19,7 @@
|
|||
<string>iPhoneOS</string>
|
||||
</array>
|
||||
<key>MinimumOSVersion</key>
|
||||
<string>12.0</string>
|
||||
<string>13.0</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>1</string>
|
||||
</dict>
|
||||
150
Apple/testbed/Python.xcframework/build/utils.sh
Executable file
150
Apple/testbed/Python.xcframework/build/utils.sh
Executable file
|
|
@ -0,0 +1,150 @@
|
|||
# Utility methods for use in an Xcode project.
|
||||
#
|
||||
# An iOS XCframework cannot include any content other than the library binary
|
||||
# and relevant metadata. However, Python requires a standard library at runtime.
|
||||
# Therefore, it is necessary to add a build step to an Xcode app target that
|
||||
# processes the standard library and puts the content into the final app.
|
||||
#
|
||||
# In general, these tools will be invoked after bundle resources have been
|
||||
# copied into the app, but before framework embedding (and signing).
|
||||
#
|
||||
# The following is an example script, assuming that:
|
||||
# * Python.xcframework is in the root of the project
|
||||
# * There is an `app` folder that contains the app code
|
||||
# * There is an `app_packages` folder that contains installed Python packages.
|
||||
# -----
|
||||
# set -e
|
||||
# source $PROJECT_DIR/Python.xcframework/build/build_utils.sh
|
||||
# install_python Python.xcframework app app_packages
|
||||
# -----
|
||||
|
||||
# Copy the standard library from the XCframework into the app bundle.
|
||||
#
|
||||
# Accepts one argument:
|
||||
# 1. The path, relative to the root of the Xcode project, where the Python
|
||||
# XCframework can be found.
|
||||
install_stdlib() {
|
||||
PYTHON_XCFRAMEWORK_PATH=$1
|
||||
|
||||
mkdir -p "$CODESIGNING_FOLDER_PATH/python/lib"
|
||||
if [ "$EFFECTIVE_PLATFORM_NAME" = "-iphonesimulator" ]; then
|
||||
echo "Installing Python modules for iOS Simulator"
|
||||
if [ -d "$PROJECT_DIR/$PYTHON_XCFRAMEWORK_PATH/ios-arm64-simulator" ]; then
|
||||
SLICE_FOLDER="ios-arm64-simulator"
|
||||
else
|
||||
SLICE_FOLDER="ios-arm64_x86_64-simulator"
|
||||
fi
|
||||
else
|
||||
echo "Installing Python modules for iOS Device"
|
||||
SLICE_FOLDER="ios-arm64"
|
||||
fi
|
||||
|
||||
# If the XCframework has a shared lib folder, then it's a full framework.
|
||||
# Copy both the common and slice-specific part of the lib directory.
|
||||
# Otherwise, it's a single-arch framework; use the "full" lib folder.
|
||||
if [ -d "$PROJECT_DIR/$PYTHON_XCFRAMEWORK_PATH/lib" ]; then
|
||||
rsync -au --delete "$PROJECT_DIR/$PYTHON_XCFRAMEWORK_PATH/lib/" "$CODESIGNING_FOLDER_PATH/python/lib/"
|
||||
rsync -au "$PROJECT_DIR/$PYTHON_XCFRAMEWORK_PATH/$SLICE_FOLDER/lib-$ARCHS/" "$CODESIGNING_FOLDER_PATH/python/lib/"
|
||||
else
|
||||
rsync -au --delete "$PROJECT_DIR/$PYTHON_XCFRAMEWORK_PATH/$SLICE_FOLDER/lib/" "$CODESIGNING_FOLDER_PATH/python/lib/"
|
||||
fi
|
||||
}
|
||||
|
||||
# Convert a single .so library into a framework that iOS can load.
|
||||
#
|
||||
# Accepts three arguments:
|
||||
# 1. The path, relative to the root of the Xcode project, where the Python
|
||||
# XCframework can be found.
|
||||
# 2. The base path, relative to the installed location in the app bundle, that
|
||||
# needs to be processed. Any .so file found in this path (or a subdirectory
|
||||
# of it) will be processed.
|
||||
# 2. The full path to a single .so file to process. This path should include
|
||||
# the base path.
|
||||
install_dylib () {
|
||||
PYTHON_XCFRAMEWORK_PATH=$1
|
||||
INSTALL_BASE=$2
|
||||
FULL_EXT=$3
|
||||
|
||||
# The name of the extension file
|
||||
EXT=$(basename "$FULL_EXT")
|
||||
# The name and location of the module
|
||||
MODULE_PATH=$(dirname "$FULL_EXT")
|
||||
MODULE_NAME=$(echo $EXT | cut -d "." -f 1)
|
||||
# The location of the extension file, relative to the bundle
|
||||
RELATIVE_EXT=${FULL_EXT#$CODESIGNING_FOLDER_PATH/}
|
||||
# The path to the extension file, relative to the install base
|
||||
PYTHON_EXT=${RELATIVE_EXT/$INSTALL_BASE/}
|
||||
# The full dotted name of the extension module, constructed from the file path.
|
||||
FULL_MODULE_NAME=$(echo $PYTHON_EXT | cut -d "." -f 1 | tr "/" ".");
|
||||
# A bundle identifier; not actually used, but required by Xcode framework packaging
|
||||
FRAMEWORK_BUNDLE_ID=$(echo $PRODUCT_BUNDLE_IDENTIFIER.$FULL_MODULE_NAME | tr "_" "-")
|
||||
# The name of the framework folder.
|
||||
FRAMEWORK_FOLDER="Frameworks/$FULL_MODULE_NAME.framework"
|
||||
|
||||
# If the framework folder doesn't exist, create it.
|
||||
if [ ! -d "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER" ]; then
|
||||
echo "Creating framework for $RELATIVE_EXT"
|
||||
mkdir -p "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER"
|
||||
cp "$PROJECT_DIR/$PYTHON_XCFRAMEWORK_PATH/build/$PLATFORM_FAMILY_NAME-dylib-Info-template.plist" "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist"
|
||||
plutil -replace CFBundleExecutable -string "$FULL_MODULE_NAME" "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist"
|
||||
plutil -replace CFBundleIdentifier -string "$FRAMEWORK_BUNDLE_ID" "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist"
|
||||
fi
|
||||
|
||||
echo "Installing binary for $FRAMEWORK_FOLDER/$FULL_MODULE_NAME"
|
||||
mv "$FULL_EXT" "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME"
|
||||
# Create a placeholder .fwork file where the .so was
|
||||
echo "$FRAMEWORK_FOLDER/$FULL_MODULE_NAME" > ${FULL_EXT%.so}.fwork
|
||||
# Create a back reference to the .so file location in the framework
|
||||
echo "${RELATIVE_EXT%.so}.fwork" > "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME.origin"
|
||||
|
||||
# If the framework provides an xcprivacy file, install it.
|
||||
if [ -e "$MODULE_PATH/$MODULE_NAME.xcprivacy" ]; then
|
||||
echo "Installing XCPrivacy file for $FRAMEWORK_FOLDER/$FULL_MODULE_NAME"
|
||||
XCPRIVACY_FILE="$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/PrivacyInfo.xcprivacy"
|
||||
if [ -e "$XCPRIVACY_FILE" ]; then
|
||||
rm -rf "$XCPRIVACY_FILE"
|
||||
fi
|
||||
mv "$MODULE_PATH/$MODULE_NAME.xcprivacy" "$XCPRIVACY_FILE"
|
||||
fi
|
||||
|
||||
echo "Signing framework as $EXPANDED_CODE_SIGN_IDENTITY_NAME ($EXPANDED_CODE_SIGN_IDENTITY)..."
|
||||
/usr/bin/codesign --force --sign "$EXPANDED_CODE_SIGN_IDENTITY" ${OTHER_CODE_SIGN_FLAGS:-} -o runtime --timestamp=none --preserve-metadata=identifier,entitlements,flags --generate-entitlement-der "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER"
|
||||
}
|
||||
|
||||
# Process all the dynamic libraries in a path into Framework format.
|
||||
#
|
||||
# Accepts two arguments:
|
||||
# 1. The path, relative to the root of the Xcode project, where the Python
|
||||
# XCframework can be found.
|
||||
# 2. The base path, relative to the installed location in the app bundle, that
|
||||
# needs to be processed. Any .so file found in this path (or a subdirectory
|
||||
# of it) will be processed.
|
||||
process_dylibs () {
|
||||
PYTHON_XCFRAMEWORK_PATH=$1
|
||||
LIB_PATH=$2
|
||||
find "$CODESIGNING_FOLDER_PATH/$LIB_PATH" -name "*.so" | while read FULL_EXT; do
|
||||
install_dylib $PYTHON_XCFRAMEWORK_PATH "$LIB_PATH/" "$FULL_EXT"
|
||||
done
|
||||
}
|
||||
|
||||
# The entry point for post-processing a Python XCframework.
|
||||
#
|
||||
# Accepts 1 or more arguments:
|
||||
# 1. The path, relative to the root of the Xcode project, where the Python
|
||||
# XCframework can be found. If the XCframework is in the root of the project,
|
||||
# 2+. The path of a package, relative to the root of the packaged app, that contains
|
||||
# library content that should be processed for binary libraries.
|
||||
install_python() {
|
||||
PYTHON_XCFRAMEWORK_PATH=$1
|
||||
shift
|
||||
|
||||
install_stdlib $PYTHON_XCFRAMEWORK_PATH
|
||||
PYTHON_VER=$(ls -1 "$CODESIGNING_FOLDER_PATH/python/lib")
|
||||
echo "Install Python $PYTHON_VER standard library extension modules..."
|
||||
process_dylibs $PYTHON_XCFRAMEWORK_PATH python/lib/$PYTHON_VER/lib-dynload
|
||||
|
||||
for package_path in $@; do
|
||||
echo "Installing $package_path extension modules ..."
|
||||
process_dylibs $PYTHON_XCFRAMEWORK_PATH $package_path
|
||||
done
|
||||
}
|
||||
4
Apple/testbed/Testbed.lldbinit
Normal file
4
Apple/testbed/Testbed.lldbinit
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
process handle SIGINT -n true -p true -s false
|
||||
process handle SIGUSR1 -n true -p true -s false
|
||||
process handle SIGUSR2 -n true -p true -s false
|
||||
process handle SIGXFSZ -n true -p true -s false
|
||||
197
Apple/testbed/TestbedTests/TestbedTests.m
Normal file
197
Apple/testbed/TestbedTests/TestbedTests.m
Normal file
|
|
@ -0,0 +1,197 @@
|
|||
#import <XCTest/XCTest.h>
|
||||
#import <Python/Python.h>
|
||||
|
||||
@interface TestbedTests : XCTestCase
|
||||
|
||||
@end
|
||||
|
||||
@implementation TestbedTests
|
||||
|
||||
|
||||
- (void)testPython {
|
||||
const char **argv;
|
||||
int exit_code;
|
||||
int failed;
|
||||
PyStatus status;
|
||||
PyPreConfig preconfig;
|
||||
PyConfig config;
|
||||
PyObject *app_packages_path;
|
||||
PyObject *method_args;
|
||||
PyObject *result;
|
||||
PyObject *site_module;
|
||||
PyObject *site_addsitedir_attr;
|
||||
PyObject *sys_module;
|
||||
PyObject *sys_path_attr;
|
||||
NSArray *test_args;
|
||||
NSString *python_home;
|
||||
NSString *path;
|
||||
wchar_t *wtmp_str;
|
||||
|
||||
NSString *resourcePath = [[NSBundle mainBundle] resourcePath];
|
||||
|
||||
// Set some other common environment indicators to disable color, as the
|
||||
// Xcode log can't display color. Stdout will report that it is *not* a
|
||||
// TTY.
|
||||
setenv("NO_COLOR", "1", true);
|
||||
setenv("PYTHON_COLORS", "0", true);
|
||||
|
||||
// Arguments to pass into the test suite runner.
|
||||
// argv[0] must identify the process; any subsequent arg
|
||||
// will be handled as if it were an argument to `python -m test`
|
||||
// The processInfo arguments contain the binary that is running,
|
||||
// followed by the arguments defined in the test plan. This means:
|
||||
// run_module = test_args[1]
|
||||
// argv = ["Testbed"] + test_args[2:]
|
||||
test_args = [[NSProcessInfo processInfo] arguments];
|
||||
if (test_args == NULL) {
|
||||
NSLog(@"Unable to identify test arguments.");
|
||||
}
|
||||
NSLog(@"Test arguments: %@", test_args);
|
||||
argv = malloc(sizeof(char *) * ([test_args count] - 1));
|
||||
argv[0] = "Testbed";
|
||||
for (int i = 1; i < [test_args count] - 1; i++) {
|
||||
argv[i] = [[test_args objectAtIndex:i+1] UTF8String];
|
||||
}
|
||||
|
||||
// Generate an isolated Python configuration.
|
||||
NSLog(@"Configuring isolated Python...");
|
||||
PyPreConfig_InitIsolatedConfig(&preconfig);
|
||||
PyConfig_InitIsolatedConfig(&config);
|
||||
|
||||
// Configure the Python interpreter:
|
||||
// Enforce UTF-8 encoding for stderr, stdout, file-system encoding and locale.
|
||||
// See https://docs.python.org/3/library/os.html#python-utf-8-mode.
|
||||
preconfig.utf8_mode = 1;
|
||||
// Use the system logger for stdout/err
|
||||
config.use_system_logger = 1;
|
||||
// Don't buffer stdio. We want output to appears in the log immediately
|
||||
config.buffered_stdio = 0;
|
||||
// Don't write bytecode; we can't modify the app bundle
|
||||
// after it has been signed.
|
||||
config.write_bytecode = 0;
|
||||
// Ensure that signal handlers are installed
|
||||
config.install_signal_handlers = 1;
|
||||
// Run the test module.
|
||||
config.run_module = Py_DecodeLocale([[test_args objectAtIndex:1] UTF8String], NULL);
|
||||
// For debugging - enable verbose mode.
|
||||
// config.verbose = 1;
|
||||
|
||||
NSLog(@"Pre-initializing Python runtime...");
|
||||
status = Py_PreInitialize(&preconfig);
|
||||
if (PyStatus_Exception(status)) {
|
||||
XCTFail(@"Unable to pre-initialize Python interpreter: %s", status.err_msg);
|
||||
PyConfig_Clear(&config);
|
||||
return;
|
||||
}
|
||||
|
||||
// Set the home for the Python interpreter
|
||||
python_home = [NSString stringWithFormat:@"%@/python", resourcePath, nil];
|
||||
NSLog(@"PythonHome: %@", python_home);
|
||||
wtmp_str = Py_DecodeLocale([python_home UTF8String], NULL);
|
||||
status = PyConfig_SetString(&config, &config.home, wtmp_str);
|
||||
if (PyStatus_Exception(status)) {
|
||||
XCTFail(@"Unable to set PYTHONHOME: %s", status.err_msg);
|
||||
PyConfig_Clear(&config);
|
||||
return;
|
||||
}
|
||||
PyMem_RawFree(wtmp_str);
|
||||
|
||||
// Read the site config
|
||||
status = PyConfig_Read(&config);
|
||||
if (PyStatus_Exception(status)) {
|
||||
XCTFail(@"Unable to read site config: %s", status.err_msg);
|
||||
PyConfig_Clear(&config);
|
||||
return;
|
||||
}
|
||||
|
||||
NSLog(@"Configure argc/argv...");
|
||||
status = PyConfig_SetBytesArgv(&config, [test_args count] - 1, (char**) argv);
|
||||
if (PyStatus_Exception(status)) {
|
||||
XCTFail(@"Unable to configure argc/argv: %s", status.err_msg);
|
||||
PyConfig_Clear(&config);
|
||||
return;
|
||||
}
|
||||
|
||||
NSLog(@"Initializing Python runtime...");
|
||||
status = Py_InitializeFromConfig(&config);
|
||||
if (PyStatus_Exception(status)) {
|
||||
XCTFail(@"Unable to initialize Python interpreter: %s", status.err_msg);
|
||||
PyConfig_Clear(&config);
|
||||
return;
|
||||
}
|
||||
|
||||
// Add app_packages as a site directory. This both adds to sys.path,
|
||||
// and ensures that any .pth files in that directory will be executed.
|
||||
site_module = PyImport_ImportModule("site");
|
||||
if (site_module == NULL) {
|
||||
XCTFail(@"Could not import site module");
|
||||
return;
|
||||
}
|
||||
|
||||
site_addsitedir_attr = PyObject_GetAttrString(site_module, "addsitedir");
|
||||
if (site_addsitedir_attr == NULL || !PyCallable_Check(site_addsitedir_attr)) {
|
||||
XCTFail(@"Could not access site.addsitedir");
|
||||
return;
|
||||
}
|
||||
|
||||
path = [NSString stringWithFormat:@"%@/app_packages", resourcePath, nil];
|
||||
NSLog(@"App packages path: %@", path);
|
||||
wtmp_str = Py_DecodeLocale([path UTF8String], NULL);
|
||||
app_packages_path = PyUnicode_FromWideChar(wtmp_str, wcslen(wtmp_str));
|
||||
if (app_packages_path == NULL) {
|
||||
XCTFail(@"Could not convert app_packages path to unicode");
|
||||
return;
|
||||
}
|
||||
PyMem_RawFree(wtmp_str);
|
||||
|
||||
method_args = Py_BuildValue("(O)", app_packages_path);
|
||||
if (method_args == NULL) {
|
||||
XCTFail(@"Could not create arguments for site.addsitedir");
|
||||
return;
|
||||
}
|
||||
|
||||
result = PyObject_CallObject(site_addsitedir_attr, method_args);
|
||||
if (result == NULL) {
|
||||
XCTFail(@"Could not add app_packages directory using site.addsitedir");
|
||||
return;
|
||||
}
|
||||
|
||||
// Add test code to sys.path
|
||||
sys_module = PyImport_ImportModule("sys");
|
||||
if (sys_module == NULL) {
|
||||
XCTFail(@"Could not import sys module");
|
||||
return;
|
||||
}
|
||||
|
||||
sys_path_attr = PyObject_GetAttrString(sys_module, "path");
|
||||
if (sys_path_attr == NULL) {
|
||||
XCTFail(@"Could not access sys.path");
|
||||
return;
|
||||
}
|
||||
|
||||
path = [NSString stringWithFormat:@"%@/app", resourcePath, nil];
|
||||
NSLog(@"App path: %@", path);
|
||||
wtmp_str = Py_DecodeLocale([path UTF8String], NULL);
|
||||
failed = PyList_Insert(sys_path_attr, 0, PyUnicode_FromString([path UTF8String]));
|
||||
if (failed) {
|
||||
XCTFail(@"Unable to add app to sys.path");
|
||||
return;
|
||||
}
|
||||
PyMem_RawFree(wtmp_str);
|
||||
|
||||
// Ensure the working directory is the app folder.
|
||||
chdir([path UTF8String]);
|
||||
|
||||
// Start the test suite. Print a separator to differentiate Python startup logs from app logs
|
||||
NSLog(@"---------------------------------------------------------------------------");
|
||||
|
||||
exit_code = Py_RunMain();
|
||||
XCTAssertEqual(exit_code, 0, @"Test suite did not pass");
|
||||
|
||||
NSLog(@"---------------------------------------------------------------------------");
|
||||
|
||||
Py_Finalize();
|
||||
}
|
||||
|
||||
|
||||
@end
|
||||
415
Apple/testbed/__main__.py
Normal file
415
Apple/testbed/__main__.py
Normal file
|
|
@ -0,0 +1,415 @@
|
|||
import argparse
|
||||
import json
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
TEST_SLICES = {
|
||||
"iOS": "ios-arm64_x86_64-simulator",
|
||||
}
|
||||
|
||||
DECODE_ARGS = ("UTF-8", "backslashreplace")
|
||||
|
||||
# The system log prefixes each line:
|
||||
# 2025-01-17 16:14:29.093742+0800 iOSTestbed[23987:1fd393b4] ...
|
||||
# 2025-01-17 16:14:29.093742+0800 iOSTestbed[23987:1fd393b4] ...
|
||||
|
||||
LOG_PREFIX_REGEX = re.compile(
|
||||
r"^\d{4}-\d{2}-\d{2}" # YYYY-MM-DD
|
||||
r"\s+\d+:\d{2}:\d{2}\.\d+\+\d{4}" # HH:MM:SS.ssssss+ZZZZ
|
||||
r"\s+iOSTestbed\[\d+:\w+\]" # Process/thread ID
|
||||
)
|
||||
|
||||
|
||||
# Select a simulator device to use.
|
||||
def select_simulator_device(platform):
|
||||
# List the testing simulators, in JSON format
|
||||
raw_json = subprocess.check_output(["xcrun", "simctl", "list", "-j"])
|
||||
json_data = json.loads(raw_json)
|
||||
|
||||
if platform == "iOS":
|
||||
# Any iOS device will do; we'll look for "SE" devices - but the name isn't
|
||||
# consistent over time. Older Xcode versions will use "iPhone SE (Nth
|
||||
# generation)"; As of 2025, they've started using "iPhone 16e".
|
||||
#
|
||||
# When Xcode is updated after a new release, new devices will be available
|
||||
# and old ones will be dropped from the set available on the latest iOS
|
||||
# version. Select the one with the highest minimum runtime version - this
|
||||
# is an indicator of the "newest" released device, which should always be
|
||||
# supported on the "most recent" iOS version.
|
||||
se_simulators = sorted(
|
||||
(devicetype["minRuntimeVersion"], devicetype["name"])
|
||||
for devicetype in json_data["devicetypes"]
|
||||
if devicetype["productFamily"] == "iPhone"
|
||||
and (
|
||||
(
|
||||
"iPhone " in devicetype["name"]
|
||||
and devicetype["name"].endswith("e")
|
||||
)
|
||||
or "iPhone SE " in devicetype["name"]
|
||||
)
|
||||
)
|
||||
simulator = se_simulators[-1][1]
|
||||
else:
|
||||
raise ValueError(f"Unknown platform {platform}")
|
||||
|
||||
return simulator
|
||||
|
||||
|
||||
def xcode_test(location: Path, platform: str, simulator: str, verbose: bool):
|
||||
# Build and run the test suite on the named simulator.
|
||||
args = [
|
||||
"-project",
|
||||
str(location / f"{platform}Testbed.xcodeproj"),
|
||||
"-scheme",
|
||||
f"{platform}Testbed",
|
||||
"-destination",
|
||||
f"platform={platform} Simulator,name={simulator}",
|
||||
"-derivedDataPath",
|
||||
str(location / "DerivedData"),
|
||||
]
|
||||
verbosity_args = [] if verbose else ["-quiet"]
|
||||
|
||||
print("Building test project...")
|
||||
subprocess.run(
|
||||
["xcodebuild", "build-for-testing"] + args + verbosity_args,
|
||||
check=True,
|
||||
)
|
||||
|
||||
print("Running test project...")
|
||||
# Test execution *can't* be run -quiet; verbose mode
|
||||
# is how we see the output of the test output.
|
||||
process = subprocess.Popen(
|
||||
["xcodebuild", "test-without-building"] + args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
while line := (process.stdout.readline()).decode(*DECODE_ARGS):
|
||||
# Strip the timestamp/process prefix from each log line
|
||||
line = LOG_PREFIX_REGEX.sub("", line)
|
||||
sys.stdout.write(line)
|
||||
sys.stdout.flush()
|
||||
|
||||
status = process.wait(timeout=5)
|
||||
exit(status)
|
||||
|
||||
|
||||
def copy(src, tgt):
|
||||
"""An all-purpose copy.
|
||||
|
||||
If src is a file, it is copied. If src is a symlink, it is copied *as a
|
||||
symlink*. If src is a directory, the full tree is duplicated, with symlinks
|
||||
being preserved.
|
||||
"""
|
||||
if src.is_file() or src.is_symlink():
|
||||
shutil.copyfile(src, tgt, follow_symlinks=False)
|
||||
else:
|
||||
shutil.copytree(src, tgt, symlinks=True)
|
||||
|
||||
|
||||
def clone_testbed(
|
||||
source: Path,
|
||||
target: Path,
|
||||
framework: Path,
|
||||
platform: str,
|
||||
apps: list[Path],
|
||||
) -> None:
|
||||
if target.exists():
|
||||
print(f"{target} already exists; aborting without creating project.")
|
||||
sys.exit(10)
|
||||
|
||||
if framework is None:
|
||||
if not (
|
||||
source / "Python.xcframework" / TEST_SLICES[platform] / "bin"
|
||||
).is_dir():
|
||||
print(
|
||||
f"The testbed being cloned ({source}) does not contain "
|
||||
"a framework with slices. Re-run with --framework"
|
||||
)
|
||||
sys.exit(11)
|
||||
else:
|
||||
if not framework.is_dir():
|
||||
print(f"{framework} does not exist.")
|
||||
sys.exit(12)
|
||||
elif not (
|
||||
framework.suffix == ".xcframework"
|
||||
or (framework / "Python.framework").is_dir()
|
||||
):
|
||||
print(
|
||||
f"{framework} is not an XCframework, "
|
||||
f"or a simulator slice of a framework build."
|
||||
)
|
||||
sys.exit(13)
|
||||
|
||||
print("Cloning testbed project:")
|
||||
print(f" Cloning {source}...", end="")
|
||||
# Only copy the files for the platform being cloned plus the files common
|
||||
# to all platforms. The XCframework will be copied later, if needed.
|
||||
target.mkdir(parents=True)
|
||||
|
||||
for name in [
|
||||
"__main__.py",
|
||||
"TestbedTests",
|
||||
"Testbed.lldbinit",
|
||||
f"{platform}Testbed",
|
||||
f"{platform}Testbed.xcodeproj",
|
||||
f"{platform}Testbed.xctestplan",
|
||||
]:
|
||||
copy(source / name, target / name)
|
||||
|
||||
print(" done")
|
||||
|
||||
orig_xc_framework_path = source / "Python.xcframework"
|
||||
xc_framework_path = target / "Python.xcframework"
|
||||
test_framework_path = xc_framework_path / TEST_SLICES[platform]
|
||||
if framework is not None:
|
||||
if framework.suffix == ".xcframework":
|
||||
print(" Installing XCFramework...", end="")
|
||||
xc_framework_path.symlink_to(
|
||||
framework.relative_to(xc_framework_path.parent, walk_up=True)
|
||||
)
|
||||
print(" done")
|
||||
else:
|
||||
print(" Installing simulator framework...", end="")
|
||||
# We're only installing a slice of a framework; we need
|
||||
# to do a full tree copy to make sure we don't damage
|
||||
# symlinked content.
|
||||
shutil.copytree(orig_xc_framework_path, xc_framework_path)
|
||||
if test_framework_path.is_dir():
|
||||
shutil.rmtree(test_framework_path)
|
||||
else:
|
||||
test_framework_path.unlink(missing_ok=True)
|
||||
test_framework_path.symlink_to(
|
||||
framework.relative_to(test_framework_path.parent, walk_up=True)
|
||||
)
|
||||
print(" done")
|
||||
else:
|
||||
copy(orig_xc_framework_path, xc_framework_path)
|
||||
|
||||
if (
|
||||
xc_framework_path.is_symlink()
|
||||
and not xc_framework_path.readlink().is_absolute()
|
||||
):
|
||||
# XCFramework is a relative symlink. Rewrite the symlink relative
|
||||
# to the new location.
|
||||
print(" Rewriting symlink to XCframework...", end="")
|
||||
resolved_xc_framework_path = (
|
||||
source / xc_framework_path.readlink()
|
||||
).resolve()
|
||||
xc_framework_path.unlink()
|
||||
xc_framework_path.symlink_to(
|
||||
resolved_xc_framework_path.relative_to(
|
||||
xc_framework_path.parent, walk_up=True
|
||||
)
|
||||
)
|
||||
print(" done")
|
||||
elif (
|
||||
test_framework_path.is_symlink()
|
||||
and not test_framework_path.readlink().is_absolute()
|
||||
):
|
||||
print(" Rewriting symlink to simulator framework...", end="")
|
||||
# Simulator framework is a relative symlink. Rewrite the symlink
|
||||
# relative to the new location.
|
||||
orig_test_framework_path = (
|
||||
source / "Python.XCframework" / test_framework_path.readlink()
|
||||
).resolve()
|
||||
test_framework_path.unlink()
|
||||
test_framework_path.symlink_to(
|
||||
orig_test_framework_path.relative_to(
|
||||
test_framework_path.parent, walk_up=True
|
||||
)
|
||||
)
|
||||
print(" done")
|
||||
else:
|
||||
print(" Using pre-existing Python framework.")
|
||||
|
||||
for app_src in apps:
|
||||
print(f" Installing app {app_src.name!r}...", end="")
|
||||
app_target = target / f"Testbed/app/{app_src.name}"
|
||||
if app_target.is_dir():
|
||||
shutil.rmtree(app_target)
|
||||
shutil.copytree(app_src, app_target)
|
||||
print(" done")
|
||||
|
||||
print(f"Successfully cloned testbed: {target.resolve()}")
|
||||
|
||||
|
||||
def update_test_plan(testbed_path, platform, args):
|
||||
# Modify the test plan to use the requested test arguments.
|
||||
test_plan_path = testbed_path / f"{platform}Testbed.xctestplan"
|
||||
with test_plan_path.open("r", encoding="utf-8") as f:
|
||||
test_plan = json.load(f)
|
||||
|
||||
test_plan["defaultOptions"]["commandLineArgumentEntries"] = [
|
||||
{"argument": arg} for arg in args
|
||||
]
|
||||
|
||||
with test_plan_path.open("w", encoding="utf-8") as f:
|
||||
json.dump(test_plan, f, indent=2)
|
||||
|
||||
|
||||
def run_testbed(
|
||||
platform: str,
|
||||
simulator: str | None,
|
||||
args: list[str],
|
||||
verbose: bool = False,
|
||||
):
|
||||
location = Path(__file__).parent
|
||||
print("Updating test plan...", end="")
|
||||
update_test_plan(location, platform, args)
|
||||
print(" done.")
|
||||
|
||||
if simulator is None:
|
||||
simulator = select_simulator_device(platform)
|
||||
print(f"Running test on {simulator}")
|
||||
|
||||
xcode_test(
|
||||
location,
|
||||
platform=platform,
|
||||
simulator=simulator,
|
||||
verbose=verbose,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
# Look for directories like `iOSTestbed` as an indicator of the platforms
|
||||
# that the testbed folder supports. The original source testbed can support
|
||||
# many platforms, but when cloned, only one platform is preserved.
|
||||
available_platforms = [
|
||||
platform
|
||||
for platform in ["iOS"]
|
||||
if (Path(__file__).parent / f"{platform}Testbed").is_dir()
|
||||
]
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
"Manages the process of testing an Apple Python project through Xcode."
|
||||
),
|
||||
)
|
||||
|
||||
subcommands = parser.add_subparsers(dest="subcommand")
|
||||
clone = subcommands.add_parser(
|
||||
"clone",
|
||||
description=(
|
||||
"Clone the testbed project, copying in a Python framework and"
|
||||
"any specified application code."
|
||||
),
|
||||
help="Clone a testbed project to a new location.",
|
||||
)
|
||||
clone.add_argument(
|
||||
"--framework",
|
||||
help=(
|
||||
"The location of the XCFramework (or simulator-only slice of an "
|
||||
"XCFramework) to use when running the testbed"
|
||||
),
|
||||
)
|
||||
clone.add_argument(
|
||||
"--platform",
|
||||
dest="platform",
|
||||
choices=available_platforms,
|
||||
default=available_platforms[0],
|
||||
help=f"The platform to target (default: {available_platforms[0]})",
|
||||
)
|
||||
clone.add_argument(
|
||||
"--app",
|
||||
dest="apps",
|
||||
action="append",
|
||||
default=[],
|
||||
help="The location of any code to include in the testbed project",
|
||||
)
|
||||
clone.add_argument(
|
||||
"location",
|
||||
help="The path where the testbed will be cloned.",
|
||||
)
|
||||
|
||||
run = subcommands.add_parser(
|
||||
"run",
|
||||
usage="%(prog)s [-h] [--simulator SIMULATOR] -- <test arg> [<test arg> ...]",
|
||||
description=(
|
||||
"Run a testbed project. The arguments provided after `--` will be "
|
||||
"passed to the running iOS process as if they were arguments to "
|
||||
"`python -m`."
|
||||
),
|
||||
help="Run a testbed project",
|
||||
)
|
||||
run.add_argument(
|
||||
"--platform",
|
||||
dest="platform",
|
||||
choices=available_platforms,
|
||||
default=available_platforms[0],
|
||||
help=f"The platform to target (default: {available_platforms[0]})",
|
||||
)
|
||||
run.add_argument(
|
||||
"--simulator",
|
||||
help=(
|
||||
"The name of the simulator to use (eg: 'iPhone 16e'). Defaults to "
|
||||
"the most recently released 'entry level' iPhone device. Device "
|
||||
"architecture and OS version can also be specified; e.g., "
|
||||
"`--simulator 'iPhone 16 Pro,arch=arm64,OS=26.0'` would run on "
|
||||
"an ARM64 iPhone 16 Pro simulator running iOS 26.0."
|
||||
),
|
||||
)
|
||||
run.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="Enable verbose output",
|
||||
)
|
||||
|
||||
try:
|
||||
pos = sys.argv.index("--")
|
||||
testbed_args = sys.argv[1:pos]
|
||||
test_args = sys.argv[pos + 1 :]
|
||||
except ValueError:
|
||||
testbed_args = sys.argv[1:]
|
||||
test_args = []
|
||||
|
||||
context = parser.parse_args(testbed_args)
|
||||
|
||||
if context.subcommand == "clone":
|
||||
clone_testbed(
|
||||
source=Path(__file__).parent.resolve(),
|
||||
target=Path(context.location).resolve(),
|
||||
framework=Path(context.framework).resolve()
|
||||
if context.framework
|
||||
else None,
|
||||
platform=context.platform,
|
||||
apps=[Path(app) for app in context.apps],
|
||||
)
|
||||
elif context.subcommand == "run":
|
||||
if test_args:
|
||||
if not (
|
||||
Path(__file__).parent
|
||||
/ "Python.xcframework"
|
||||
/ TEST_SLICES[context.platform]
|
||||
/ "bin"
|
||||
).is_dir():
|
||||
print(
|
||||
f"Testbed does not contain a compiled Python framework. Use "
|
||||
f"`python {sys.argv[0]} clone ...` to create a runnable "
|
||||
f"clone of this testbed."
|
||||
)
|
||||
sys.exit(20)
|
||||
|
||||
run_testbed(
|
||||
platform=context.platform,
|
||||
simulator=context.simulator,
|
||||
verbose=context.verbose,
|
||||
args=test_args,
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"Must specify test arguments (e.g., {sys.argv[0]} run -- test)"
|
||||
)
|
||||
print()
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit(21)
|
||||
else:
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -11,12 +11,13 @@
|
|||
607A66222B0EFA390010BFC8 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 607A66212B0EFA390010BFC8 /* Assets.xcassets */; };
|
||||
607A66252B0EFA390010BFC8 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 607A66232B0EFA390010BFC8 /* LaunchScreen.storyboard */; };
|
||||
607A66282B0EFA390010BFC8 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 607A66272B0EFA390010BFC8 /* main.m */; };
|
||||
607A66322B0EFA3A0010BFC8 /* iOSTestbedTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 607A66312B0EFA3A0010BFC8 /* iOSTestbedTests.m */; };
|
||||
607A66322B0EFA3A0010BFC8 /* TestbedTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 607A66312B0EFA3A0010BFC8 /* TestbedTests.m */; };
|
||||
607A664C2B0EFC080010BFC8 /* Python.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = 607A664A2B0EFB310010BFC8 /* Python.xcframework */; };
|
||||
607A664D2B0EFC080010BFC8 /* Python.xcframework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 607A664A2B0EFB310010BFC8 /* Python.xcframework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
|
||||
607A66502B0EFFE00010BFC8 /* Python.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = 607A664A2B0EFB310010BFC8 /* Python.xcframework */; };
|
||||
607A66512B0EFFE00010BFC8 /* Python.xcframework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 607A664A2B0EFB310010BFC8 /* Python.xcframework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
|
||||
607A66582B0F079F0010BFC8 /* dylib-Info-template.plist in Resources */ = {isa = PBXBuildFile; fileRef = 607A66572B0F079F0010BFC8 /* dylib-Info-template.plist */; };
|
||||
608619542CB77BA900F46182 /* app_packages in Resources */ = {isa = PBXBuildFile; fileRef = 608619532CB77BA900F46182 /* app_packages */; };
|
||||
608619562CB7819B00F46182 /* app in Resources */ = {isa = PBXBuildFile; fileRef = 608619552CB7819B00F46182 /* app */; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
/* Begin PBXContainerItemProxy section */
|
||||
|
|
@ -62,10 +63,12 @@
|
|||
607A66242B0EFA390010BFC8 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
|
||||
607A66272B0EFA390010BFC8 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
|
||||
607A662D2B0EFA3A0010BFC8 /* iOSTestbedTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = iOSTestbedTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
607A66312B0EFA3A0010BFC8 /* iOSTestbedTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = iOSTestbedTests.m; sourceTree = "<group>"; };
|
||||
607A66312B0EFA3A0010BFC8 /* TestbedTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = TestbedTests.m; sourceTree = "<group>"; };
|
||||
607A664A2B0EFB310010BFC8 /* Python.xcframework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcframework; path = Python.xcframework; sourceTree = "<group>"; };
|
||||
607A66572B0F079F0010BFC8 /* dylib-Info-template.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "dylib-Info-template.plist"; sourceTree = "<group>"; };
|
||||
607A66592B0F08600010BFC8 /* iOSTestbed-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "iOSTestbed-Info.plist"; sourceTree = "<group>"; };
|
||||
608619532CB77BA900F46182 /* app_packages */ = {isa = PBXFileReference; lastKnownFileType = folder; path = app_packages; sourceTree = "<group>"; };
|
||||
608619552CB7819B00F46182 /* app */ = {isa = PBXFileReference; lastKnownFileType = folder; path = app; sourceTree = "<group>"; };
|
||||
60FE0EFB2E56BB6D00524F87 /* iOSTestbed.xctestplan */ = {isa = PBXFileReference; lastKnownFileType = text; path = iOSTestbed.xctestplan; sourceTree = "<group>"; };
|
||||
/* End PBXFileReference section */
|
||||
|
||||
/* Begin PBXFrameworksBuildPhase section */
|
||||
|
|
@ -91,9 +94,10 @@
|
|||
607A66092B0EFA380010BFC8 = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
60FE0EFB2E56BB6D00524F87 /* iOSTestbed.xctestplan */,
|
||||
607A664A2B0EFB310010BFC8 /* Python.xcframework */,
|
||||
607A66142B0EFA380010BFC8 /* iOSTestbed */,
|
||||
607A66302B0EFA3A0010BFC8 /* iOSTestbedTests */,
|
||||
607A66302B0EFA3A0010BFC8 /* TestbedTests */,
|
||||
607A66132B0EFA380010BFC8 /* Products */,
|
||||
607A664F2B0EFFE00010BFC8 /* Frameworks */,
|
||||
);
|
||||
|
|
@ -111,8 +115,9 @@
|
|||
607A66142B0EFA380010BFC8 /* iOSTestbed */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
608619552CB7819B00F46182 /* app */,
|
||||
608619532CB77BA900F46182 /* app_packages */,
|
||||
607A66592B0F08600010BFC8 /* iOSTestbed-Info.plist */,
|
||||
607A66572B0F079F0010BFC8 /* dylib-Info-template.plist */,
|
||||
607A66152B0EFA380010BFC8 /* AppDelegate.h */,
|
||||
607A66162B0EFA380010BFC8 /* AppDelegate.m */,
|
||||
607A66212B0EFA390010BFC8 /* Assets.xcassets */,
|
||||
|
|
@ -122,12 +127,12 @@
|
|||
path = iOSTestbed;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
607A66302B0EFA3A0010BFC8 /* iOSTestbedTests */ = {
|
||||
607A66302B0EFA3A0010BFC8 /* TestbedTests */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
607A66312B0EFA3A0010BFC8 /* iOSTestbedTests.m */,
|
||||
607A66312B0EFA3A0010BFC8 /* TestbedTests.m */,
|
||||
);
|
||||
path = iOSTestbedTests;
|
||||
path = TestbedTests;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
607A664F2B0EFFE00010BFC8 /* Frameworks */ = {
|
||||
|
|
@ -147,8 +152,7 @@
|
|||
607A660E2B0EFA380010BFC8 /* Sources */,
|
||||
607A660F2B0EFA380010BFC8 /* Frameworks */,
|
||||
607A66102B0EFA380010BFC8 /* Resources */,
|
||||
607A66552B0F061D0010BFC8 /* Install Target Specific Python Standard Library */,
|
||||
607A66562B0F06200010BFC8 /* Prepare Python Binary Modules */,
|
||||
607A66552B0F061D0010BFC8 /* Process Python libraries */,
|
||||
607A664E2B0EFC080010BFC8 /* Embed Frameworks */,
|
||||
);
|
||||
buildRules = (
|
||||
|
|
@ -222,8 +226,9 @@
|
|||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
607A66252B0EFA390010BFC8 /* LaunchScreen.storyboard in Resources */,
|
||||
607A66582B0F079F0010BFC8 /* dylib-Info-template.plist in Resources */,
|
||||
608619562CB7819B00F46182 /* app in Resources */,
|
||||
607A66222B0EFA390010BFC8 /* Assets.xcassets in Resources */,
|
||||
608619542CB77BA900F46182 /* app_packages in Resources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
|
|
@ -237,7 +242,7 @@
|
|||
/* End PBXResourcesBuildPhase section */
|
||||
|
||||
/* Begin PBXShellScriptBuildPhase section */
|
||||
607A66552B0F061D0010BFC8 /* Install Target Specific Python Standard Library */ = {
|
||||
607A66552B0F061D0010BFC8 /* Process Python libraries */ = {
|
||||
isa = PBXShellScriptBuildPhase;
|
||||
alwaysOutOfDate = 1;
|
||||
buildActionMask = 2147483647;
|
||||
|
|
@ -247,33 +252,15 @@
|
|||
);
|
||||
inputPaths = (
|
||||
);
|
||||
name = "Install Target Specific Python Standard Library";
|
||||
name = "Process Python libraries";
|
||||
outputFileListPaths = (
|
||||
);
|
||||
outputPaths = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "set -e\n\nmkdir -p \"$CODESIGNING_FOLDER_PATH/python/lib\"\nif [ \"$EFFECTIVE_PLATFORM_NAME\" = \"-iphonesimulator\" ]; then\n echo \"Installing Python modules for iOS Simulator\"\n rsync -au --delete \"$PROJECT_DIR/Python.xcframework/ios-arm64_x86_64-simulator/lib/\" \"$CODESIGNING_FOLDER_PATH/python/lib/\" \nelse\n echo \"Installing Python modules for iOS Device\"\n rsync -au --delete \"$PROJECT_DIR/Python.xcframework/ios-arm64/lib/\" \"$CODESIGNING_FOLDER_PATH/python/lib/\" \nfi\n";
|
||||
};
|
||||
607A66562B0F06200010BFC8 /* Prepare Python Binary Modules */ = {
|
||||
isa = PBXShellScriptBuildPhase;
|
||||
alwaysOutOfDate = 1;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
);
|
||||
inputFileListPaths = (
|
||||
);
|
||||
inputPaths = (
|
||||
);
|
||||
name = "Prepare Python Binary Modules";
|
||||
outputFileListPaths = (
|
||||
);
|
||||
outputPaths = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "set -e\n\ninstall_dylib () {\n INSTALL_BASE=$1\n FULL_EXT=$2\n\n # The name of the extension file\n EXT=$(basename \"$FULL_EXT\")\n # The location of the extension file, relative to the bundle\n RELATIVE_EXT=${FULL_EXT#$CODESIGNING_FOLDER_PATH/} \n # The path to the extension file, relative to the install base\n PYTHON_EXT=${RELATIVE_EXT/$INSTALL_BASE/}\n # The full dotted name of the extension module, constructed from the file path.\n FULL_MODULE_NAME=$(echo $PYTHON_EXT | cut -d \".\" -f 1 | tr \"/\" \".\"); \n # A bundle identifier; not actually used, but required by Xcode framework packaging\n FRAMEWORK_BUNDLE_ID=$(echo $PRODUCT_BUNDLE_IDENTIFIER.$FULL_MODULE_NAME | tr \"_\" \"-\")\n # The name of the framework folder.\n FRAMEWORK_FOLDER=\"Frameworks/$FULL_MODULE_NAME.framework\"\n\n # If the framework folder doesn't exist, create it.\n if [ ! -d \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\" ]; then\n echo \"Creating framework for $RELATIVE_EXT\" \n mkdir -p \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\"\n cp \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleExecutable -string \"$FULL_MODULE_NAME\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleIdentifier -string \"$FRAMEWORK_BUNDLE_ID\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n fi\n \n echo \"Installing binary for $FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" \n mv \"$FULL_EXT\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\"\n # Create a placeholder .fwork file where the .so was\n echo \"$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" > ${FULL_EXT%.so}.fwork\n # Create a back reference to the .so file location in the framework\n echo \"${RELATIVE_EXT%.so}.fwork\" > \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME.origin\" \n}\n\nPYTHON_VER=$(ls -1 \"$CODESIGNING_FOLDER_PATH/python/lib\")\necho \"Install Python $PYTHON_VER standard library extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/python/lib/$PYTHON_VER/lib-dynload\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib python/lib/$PYTHON_VER/lib-dynload/ \"$FULL_EXT\"\ndone\n\n# Clean up dylib template \nrm -f \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\"\necho \"Signing frameworks as $EXPANDED_CODE_SIGN_IDENTITY_NAME ($EXPANDED_CODE_SIGN_IDENTITY)...\"\nfind \"$CODESIGNING_FOLDER_PATH/Frameworks\" -name \"*.framework\" -exec /usr/bin/codesign --force --sign \"$EXPANDED_CODE_SIGN_IDENTITY\" ${OTHER_CODE_SIGN_FLAGS:-} -o runtime --timestamp=none --preserve-metadata=identifier,entitlements,flags --generate-entitlement-der \"{}\" \\; \n";
|
||||
shellScript = "set -e\nsource $PROJECT_DIR/Python.xcframework/build/utils.sh\ninstall_python Python.xcframework app app_packages\n";
|
||||
showEnvVarsInLog = 0;
|
||||
};
|
||||
/* End PBXShellScriptBuildPhase section */
|
||||
|
||||
|
|
@ -291,7 +278,7 @@
|
|||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
607A66322B0EFA3A0010BFC8 /* iOSTestbedTests.m in Sources */,
|
||||
607A66322B0EFA3A0010BFC8 /* TestbedTests.m in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
|
|
@ -369,7 +356,7 @@
|
|||
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||
GCC_WARN_UNUSED_VARIABLE = YES;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
|
||||
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
|
||||
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
|
||||
MTL_FAST_MATH = YES;
|
||||
|
|
@ -424,7 +411,7 @@
|
|||
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||
GCC_WARN_UNUSED_VARIABLE = YES;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
|
||||
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
|
||||
MTL_ENABLE_DEBUG_INFO = NO;
|
||||
MTL_FAST_MATH = YES;
|
||||
|
|
@ -450,7 +437,7 @@
|
|||
INFOPLIST_KEY_UIMainStoryboardFile = Main;
|
||||
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
|
||||
LD_RUNPATH_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
|
|
@ -481,7 +468,7 @@
|
|||
INFOPLIST_KEY_UIMainStoryboardFile = Main;
|
||||
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
|
||||
LD_RUNPATH_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
|
|
@ -504,7 +491,7 @@
|
|||
DEVELOPMENT_TEAM = 3HEZE76D99;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
HEADER_SEARCH_PATHS = "\"$(BUILT_PRODUCTS_DIR)/Python.framework/Headers\"";
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
|
||||
MARKETING_VERSION = 1.0;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = org.python.iOSTestbedTests;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
|
|
@ -524,7 +511,7 @@
|
|||
DEVELOPMENT_TEAM = 3HEZE76D99;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
HEADER_SEARCH_PATHS = "\"$(BUILT_PRODUCTS_DIR)/Python.framework/Headers\"";
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
|
||||
MARKETING_VERSION = 1.0;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = org.python.iOSTestbedTests;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Scheme
|
||||
LastUpgradeVersion = "1640"
|
||||
version = "1.7">
|
||||
<BuildAction
|
||||
parallelizeBuildables = "YES"
|
||||
buildImplicitDependencies = "YES"
|
||||
buildArchitectures = "Automatic">
|
||||
<BuildActionEntries>
|
||||
<BuildActionEntry
|
||||
buildForTesting = "YES"
|
||||
buildForRunning = "YES"
|
||||
buildForProfiling = "YES"
|
||||
buildForArchiving = "YES"
|
||||
buildForAnalyzing = "YES">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "607A66112B0EFA380010BFC8"
|
||||
BuildableName = "iOSTestbed.app"
|
||||
BlueprintName = "iOSTestbed"
|
||||
ReferencedContainer = "container:iOSTestbed.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildActionEntry>
|
||||
</BuildActionEntries>
|
||||
</BuildAction>
|
||||
<TestAction
|
||||
buildConfiguration = "Debug"
|
||||
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
|
||||
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
|
||||
customLLDBInitFile = "$(SOURCE_ROOT)/Testbed.lldbinit"
|
||||
shouldUseLaunchSchemeArgsEnv = "YES">
|
||||
<TestPlans>
|
||||
<TestPlanReference
|
||||
reference = "container:iOSTestbed.xctestplan"
|
||||
default = "YES">
|
||||
</TestPlanReference>
|
||||
</TestPlans>
|
||||
<Testables>
|
||||
<TestableReference
|
||||
skipped = "NO"
|
||||
parallelizable = "NO">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "607A662C2B0EFA3A0010BFC8"
|
||||
BuildableName = "iOSTestbedTests.xctest"
|
||||
BlueprintName = "iOSTestbedTests"
|
||||
ReferencedContainer = "container:iOSTestbed.xcodeproj">
|
||||
</BuildableReference>
|
||||
</TestableReference>
|
||||
</Testables>
|
||||
</TestAction>
|
||||
<LaunchAction
|
||||
buildConfiguration = "Debug"
|
||||
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
|
||||
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
|
||||
launchStyle = "0"
|
||||
useCustomWorkingDirectory = "NO"
|
||||
ignoresPersistentStateOnLaunch = "NO"
|
||||
debugDocumentVersioning = "YES"
|
||||
debugServiceExtension = "internal"
|
||||
allowLocationSimulation = "YES">
|
||||
<BuildableProductRunnable
|
||||
runnableDebuggingMode = "0">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "607A66112B0EFA380010BFC8"
|
||||
BuildableName = "iOSTestbed.app"
|
||||
BlueprintName = "iOSTestbed"
|
||||
ReferencedContainer = "container:iOSTestbed.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildableProductRunnable>
|
||||
</LaunchAction>
|
||||
<ProfileAction
|
||||
buildConfiguration = "Release"
|
||||
shouldUseLaunchSchemeArgsEnv = "YES"
|
||||
savedToolIdentifier = ""
|
||||
useCustomWorkingDirectory = "NO"
|
||||
debugDocumentVersioning = "YES">
|
||||
<BuildableProductRunnable
|
||||
runnableDebuggingMode = "0">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "607A66112B0EFA380010BFC8"
|
||||
BuildableName = "iOSTestbed.app"
|
||||
BlueprintName = "iOSTestbed"
|
||||
ReferencedContainer = "container:iOSTestbed.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildableProductRunnable>
|
||||
</ProfileAction>
|
||||
<AnalyzeAction
|
||||
buildConfiguration = "Debug">
|
||||
</AnalyzeAction>
|
||||
<ArchiveAction
|
||||
buildConfiguration = "Release"
|
||||
revealArchiveInOrganizer = "YES">
|
||||
</ArchiveAction>
|
||||
</Scheme>
|
||||
46
Apple/testbed/iOSTestbed.xctestplan
Normal file
46
Apple/testbed/iOSTestbed.xctestplan
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"configurations" : [
|
||||
{
|
||||
"id" : "F5A95CE4-1ADE-4A6E-A0E1-CDBAE26DF0C5",
|
||||
"name" : "Test Scheme Action",
|
||||
"options" : {
|
||||
|
||||
}
|
||||
}
|
||||
],
|
||||
"defaultOptions" : {
|
||||
"commandLineArgumentEntries" : [
|
||||
{
|
||||
"argument" : "test"
|
||||
},
|
||||
{
|
||||
"argument" : "-uall"
|
||||
},
|
||||
{
|
||||
"argument" : "--single-process"
|
||||
},
|
||||
{
|
||||
"argument" : "--rerun"
|
||||
},
|
||||
{
|
||||
"argument" : "-W"
|
||||
}
|
||||
],
|
||||
"targetForVariableExpansion" : {
|
||||
"containerPath" : "container:iOSTestbed.xcodeproj",
|
||||
"identifier" : "607A66112B0EFA380010BFC8",
|
||||
"name" : "iOSTestbed"
|
||||
}
|
||||
},
|
||||
"testTargets" : [
|
||||
{
|
||||
"parallelizable" : false,
|
||||
"target" : {
|
||||
"containerPath" : "container:iOSTestbed.xcodeproj",
|
||||
"identifier" : "607A662C2B0EFA3A0010BFC8",
|
||||
"name" : "iOSTestbedTests"
|
||||
}
|
||||
}
|
||||
],
|
||||
"version" : 1
|
||||
}
|
||||
7
Apple/testbed/iOSTestbed/app/README
Normal file
7
Apple/testbed/iOSTestbed/app/README
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
This folder can contain any Python application code.
|
||||
|
||||
During the build, any binary modules found in this folder will be processed into
|
||||
Framework form.
|
||||
|
||||
When the test suite runs, this folder will be on the PYTHONPATH, and will be the
|
||||
working directory for the test suite.
|
||||
7
Apple/testbed/iOSTestbed/app_packages/README
Normal file
7
Apple/testbed/iOSTestbed/app_packages/README
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
This folder can be a target for installing any Python dependencies needed by the
|
||||
test suite.
|
||||
|
||||
During the build, any binary modules found in this folder will be processed into
|
||||
Framework form.
|
||||
|
||||
When the test suite runs, this folder will be on the PYTHONPATH.
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue