mirror of
https://github.com/python/cpython.git
synced 2025-11-01 06:01:29 +00:00
gh-138122: Implement PEP 799 (#138142)
This commit is contained in:
parent
f733e428f8
commit
56eb6b64a0
23 changed files with 497 additions and 386 deletions
207
Lib/cProfile.py
207
Lib/cProfile.py
|
|
@ -1,205 +1,14 @@
|
|||
"""Python interface for the 'lsprof' profiler.
|
||||
Compatible with the 'profile' module.
|
||||
"""Compatibility wrapper for cProfile module.
|
||||
|
||||
This module maintains backward compatibility by importing from the new
|
||||
profiling.tracing module.
|
||||
"""
|
||||
|
||||
from profiling.tracing import run, runctx, Profile
|
||||
|
||||
__all__ = ["run", "runctx", "Profile"]
|
||||
|
||||
import _lsprof
|
||||
import importlib.machinery
|
||||
import importlib.util
|
||||
import io
|
||||
import profile as _pyprofile
|
||||
|
||||
# ____________________________________________________________
|
||||
# Simple interface
|
||||
|
||||
def run(statement, filename=None, sort=-1):
|
||||
return _pyprofile._Utils(Profile).run(statement, filename, sort)
|
||||
|
||||
def runctx(statement, globals, locals, filename=None, sort=-1):
|
||||
return _pyprofile._Utils(Profile).runctx(statement, globals, locals,
|
||||
filename, sort)
|
||||
|
||||
run.__doc__ = _pyprofile.run.__doc__
|
||||
runctx.__doc__ = _pyprofile.runctx.__doc__
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
class Profile(_lsprof.Profiler):
|
||||
"""Profile(timer=None, timeunit=None, subcalls=True, builtins=True)
|
||||
|
||||
Builds a profiler object using the specified timer function.
|
||||
The default timer is a fast built-in one based on real time.
|
||||
For custom timer functions returning integers, timeunit can
|
||||
be a float specifying a scale (i.e. how long each integer unit
|
||||
is, in seconds).
|
||||
"""
|
||||
|
||||
# Most of the functionality is in the base class.
|
||||
# This subclass only adds convenient and backward-compatible methods.
|
||||
|
||||
def print_stats(self, sort=-1):
|
||||
import pstats
|
||||
if not isinstance(sort, tuple):
|
||||
sort = (sort,)
|
||||
pstats.Stats(self).strip_dirs().sort_stats(*sort).print_stats()
|
||||
|
||||
def dump_stats(self, file):
|
||||
import marshal
|
||||
with open(file, 'wb') as f:
|
||||
self.create_stats()
|
||||
marshal.dump(self.stats, f)
|
||||
|
||||
def create_stats(self):
|
||||
self.disable()
|
||||
self.snapshot_stats()
|
||||
|
||||
def snapshot_stats(self):
|
||||
entries = self.getstats()
|
||||
self.stats = {}
|
||||
callersdicts = {}
|
||||
# call information
|
||||
for entry in entries:
|
||||
func = label(entry.code)
|
||||
nc = entry.callcount # ncalls column of pstats (before '/')
|
||||
cc = nc - entry.reccallcount # ncalls column of pstats (after '/')
|
||||
tt = entry.inlinetime # tottime column of pstats
|
||||
ct = entry.totaltime # cumtime column of pstats
|
||||
callers = {}
|
||||
callersdicts[id(entry.code)] = callers
|
||||
self.stats[func] = cc, nc, tt, ct, callers
|
||||
# subcall information
|
||||
for entry in entries:
|
||||
if entry.calls:
|
||||
func = label(entry.code)
|
||||
for subentry in entry.calls:
|
||||
try:
|
||||
callers = callersdicts[id(subentry.code)]
|
||||
except KeyError:
|
||||
continue
|
||||
nc = subentry.callcount
|
||||
cc = nc - subentry.reccallcount
|
||||
tt = subentry.inlinetime
|
||||
ct = subentry.totaltime
|
||||
if func in callers:
|
||||
prev = callers[func]
|
||||
nc += prev[0]
|
||||
cc += prev[1]
|
||||
tt += prev[2]
|
||||
ct += prev[3]
|
||||
callers[func] = nc, cc, tt, ct
|
||||
|
||||
# The following two methods can be called by clients to use
|
||||
# a profiler to profile a statement, given as a string.
|
||||
|
||||
def run(self, cmd):
|
||||
import __main__
|
||||
dict = __main__.__dict__
|
||||
return self.runctx(cmd, dict, dict)
|
||||
|
||||
def runctx(self, cmd, globals, locals):
|
||||
self.enable()
|
||||
try:
|
||||
exec(cmd, globals, locals)
|
||||
finally:
|
||||
self.disable()
|
||||
return self
|
||||
|
||||
# This method is more useful to profile a single function call.
|
||||
def runcall(self, func, /, *args, **kw):
|
||||
self.enable()
|
||||
try:
|
||||
return func(*args, **kw)
|
||||
finally:
|
||||
self.disable()
|
||||
|
||||
def __enter__(self):
|
||||
self.enable()
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
self.disable()
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
def label(code):
|
||||
if isinstance(code, str):
|
||||
return ('~', 0, code) # built-in functions ('~' sorts at the end)
|
||||
else:
|
||||
return (code.co_filename, code.co_firstlineno, code.co_name)
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
def main():
|
||||
import os
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
import runpy
|
||||
import pstats
|
||||
from optparse import OptionParser
|
||||
usage = "cProfile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ..."
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-o', '--outfile', dest="outfile",
|
||||
help="Save stats to <outfile>", default=None)
|
||||
parser.add_option('-s', '--sort', dest="sort",
|
||||
help="Sort order when printing to stdout, based on pstats.Stats class",
|
||||
default=2,
|
||||
choices=sorted(pstats.Stats.sort_arg_dict_default))
|
||||
parser.add_option('-m', dest="module", action="store_true",
|
||||
help="Profile a library module", default=False)
|
||||
|
||||
if not sys.argv[1:]:
|
||||
parser.print_usage()
|
||||
sys.exit(2)
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
sys.argv[:] = args
|
||||
|
||||
# The script that we're profiling may chdir, so capture the absolute path
|
||||
# to the output file at startup.
|
||||
if options.outfile is not None:
|
||||
options.outfile = os.path.abspath(options.outfile)
|
||||
|
||||
if len(args) > 0:
|
||||
if options.module:
|
||||
code = "run_module(modname, run_name='__main__')"
|
||||
globs = {
|
||||
'run_module': runpy.run_module,
|
||||
'modname': args[0]
|
||||
}
|
||||
else:
|
||||
progname = args[0]
|
||||
sys.path.insert(0, os.path.dirname(progname))
|
||||
with io.open_code(progname) as fp:
|
||||
code = compile(fp.read(), progname, 'exec')
|
||||
spec = importlib.machinery.ModuleSpec(name='__main__', loader=None,
|
||||
origin=progname)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
# Set __main__ so that importing __main__ in the profiled code will
|
||||
# return the same namespace that the code is executing under.
|
||||
sys.modules['__main__'] = module
|
||||
# Ensure that we're using the same __dict__ instance as the module
|
||||
# for the global variables so that updates to globals are reflected
|
||||
# in the module's namespace.
|
||||
globs = module.__dict__
|
||||
globs.update({
|
||||
'__spec__': spec,
|
||||
'__file__': spec.origin,
|
||||
'__name__': spec.name,
|
||||
'__package__': None,
|
||||
'__cached__': None,
|
||||
})
|
||||
|
||||
try:
|
||||
runctx(code, globs, None, options.outfile, options.sort)
|
||||
except BrokenPipeError as exc:
|
||||
# Prevent "Exception ignored" during interpreter shutdown.
|
||||
sys.stdout = None
|
||||
sys.exit(exc.errno)
|
||||
else:
|
||||
parser.print_usage()
|
||||
return parser
|
||||
|
||||
# When invoked as main program, invoke the profiler on a script
|
||||
if __name__ == '__main__':
|
||||
from profiling.tracing.__main__ import main
|
||||
main()
|
||||
|
|
|
|||
|
|
@ -28,9 +28,18 @@
|
|||
import sys
|
||||
import time
|
||||
import marshal
|
||||
import warnings
|
||||
|
||||
__all__ = ["run", "runctx", "Profile"]
|
||||
|
||||
# Emit deprecation warning as per PEP 799
|
||||
warnings.warn(
|
||||
"The profile module is deprecated and will be removed in Python 3.17. "
|
||||
"Use profiling.tracing (or cProfile) for tracing profilers instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2
|
||||
)
|
||||
|
||||
# Sample timer for use with
|
||||
#i_count = 0
|
||||
#def integer_timer():
|
||||
|
|
@ -550,3 +559,66 @@ def f(m, f1=f1):
|
|||
return mean
|
||||
|
||||
#****************************************************************************
|
||||
|
||||
def main():
|
||||
import os
|
||||
from optparse import OptionParser
|
||||
|
||||
usage = "profile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ..."
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-o', '--outfile', dest="outfile",
|
||||
help="Save stats to <outfile>", default=None)
|
||||
parser.add_option('-m', dest="module", action="store_true",
|
||||
help="Profile a library module.", default=False)
|
||||
parser.add_option('-s', '--sort', dest="sort",
|
||||
help="Sort order when printing to stdout, based on pstats.Stats class",
|
||||
default=-1)
|
||||
|
||||
if not sys.argv[1:]:
|
||||
parser.print_usage()
|
||||
sys.exit(2)
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
sys.argv[:] = args
|
||||
|
||||
# The script that we're profiling may chdir, so capture the absolute path
|
||||
# to the output file at startup.
|
||||
if options.outfile is not None:
|
||||
options.outfile = os.path.abspath(options.outfile)
|
||||
|
||||
if len(args) > 0:
|
||||
if options.module:
|
||||
import runpy
|
||||
code = "run_module(modname, run_name='__main__')"
|
||||
globs = {
|
||||
'run_module': runpy.run_module,
|
||||
'modname': args[0]
|
||||
}
|
||||
else:
|
||||
progname = args[0]
|
||||
sys.path.insert(0, os.path.dirname(progname))
|
||||
with io.open_code(progname) as fp:
|
||||
code = compile(fp.read(), progname, 'exec')
|
||||
spec = importlib.machinery.ModuleSpec(name='__main__', loader=None,
|
||||
origin=progname)
|
||||
globs = {
|
||||
'__spec__': spec,
|
||||
'__file__': spec.origin,
|
||||
'__name__': spec.name,
|
||||
'__package__': None,
|
||||
'__cached__': None,
|
||||
}
|
||||
try:
|
||||
runctx(code, globs, None, options.outfile, options.sort)
|
||||
except BrokenPipeError as exc:
|
||||
# Prevent "Exception ignored" during interpreter shutdown.
|
||||
sys.stdout = None
|
||||
sys.exit(exc.errno)
|
||||
else:
|
||||
parser.print_usage()
|
||||
return parser
|
||||
|
||||
# When invoked as main program, invoke the profiler on a script
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
from .profile import run
|
||||
from .profile import runctx
|
||||
from .profile import Profile
|
||||
from .profile import _Utils
|
||||
|
||||
__all__ = ['run', 'runctx', 'Profile']
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
import io
|
||||
import importlib.machinery
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
from .profile import runctx
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
usage = "profile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ..."
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-o', '--outfile', dest="outfile",
|
||||
help="Save stats to <outfile>", default=None)
|
||||
parser.add_option('-m', dest="module", action="store_true",
|
||||
help="Profile a library module.", default=False)
|
||||
parser.add_option('-s', '--sort', dest="sort",
|
||||
help="Sort order when printing to stdout, based on pstats.Stats class",
|
||||
default=-1)
|
||||
|
||||
if not sys.argv[1:]:
|
||||
parser.print_usage()
|
||||
sys.exit(2)
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
sys.argv[:] = args
|
||||
|
||||
# The script that we're profiling may chdir, so capture the absolute path
|
||||
# to the output file at startup.
|
||||
if options.outfile is not None:
|
||||
options.outfile = os.path.abspath(options.outfile)
|
||||
|
||||
if len(args) > 0:
|
||||
if options.module:
|
||||
import runpy
|
||||
code = "run_module(modname, run_name='__main__')"
|
||||
globs = {
|
||||
'run_module': runpy.run_module,
|
||||
'modname': args[0]
|
||||
}
|
||||
else:
|
||||
progname = args[0]
|
||||
sys.path.insert(0, os.path.dirname(progname))
|
||||
with io.open_code(progname) as fp:
|
||||
code = compile(fp.read(), progname, 'exec')
|
||||
spec = importlib.machinery.ModuleSpec(name='__main__', loader=None,
|
||||
origin=progname)
|
||||
globs = {
|
||||
'__spec__': spec,
|
||||
'__file__': spec.origin,
|
||||
'__name__': spec.name,
|
||||
'__package__': None,
|
||||
'__cached__': None,
|
||||
}
|
||||
try:
|
||||
runctx(code, globs, None, options.outfile, options.sort)
|
||||
except BrokenPipeError as exc:
|
||||
# Prevent "Exception ignored" during interpreter shutdown.
|
||||
sys.stdout = None
|
||||
sys.exit(exc.errno)
|
||||
else:
|
||||
parser.print_usage()
|
||||
return parser
|
||||
|
||||
# When invoked as main program, invoke the profiler on a script
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
13
Lib/profiling/__init__.py
Normal file
13
Lib/profiling/__init__.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
"""Python profiling tools.
|
||||
|
||||
This package provides two types of profilers:
|
||||
|
||||
- profiling.tracing: Deterministic tracing profiler that instruments every
|
||||
function call and return. Higher overhead but provides exact call counts
|
||||
and timing.
|
||||
|
||||
- profiling.sampling: Statistical sampling profiler that periodically samples
|
||||
the call stack. Low overhead and suitable for production use.
|
||||
"""
|
||||
|
||||
__all__ = ("tracing", "sampling")
|
||||
11
Lib/profiling/sampling/__init__.py
Normal file
11
Lib/profiling/sampling/__init__.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
"""Statistical sampling profiler for Python.
|
||||
|
||||
This module provides low-overhead profiling by periodically sampling the
|
||||
call stack rather than tracing every function call.
|
||||
"""
|
||||
|
||||
from .collector import Collector
|
||||
from .pstats_collector import PstatsCollector
|
||||
from .stack_collector import CollapsedStackCollector
|
||||
|
||||
__all__ = ("Collector", "PstatsCollector", "CollapsedStackCollector")
|
||||
6
Lib/profiling/sampling/__main__.py
Normal file
6
Lib/profiling/sampling/__main__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
"""Run the sampling profiler from the command line."""
|
||||
|
||||
from .sample import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -25,40 +25,40 @@
|
|||
|
||||
Examples:
|
||||
# Profile process 1234 for 10 seconds with default settings
|
||||
python -m profile.sample -p 1234
|
||||
python -m profiling.sampling -p 1234
|
||||
|
||||
# Profile a script by running it in a subprocess
|
||||
python -m profile.sample myscript.py arg1 arg2
|
||||
python -m profiling.sampling myscript.py arg1 arg2
|
||||
|
||||
# Profile a module by running it as python -m module in a subprocess
|
||||
python -m profile.sample -m mymodule arg1 arg2
|
||||
python -m profiling.sampling -m mymodule arg1 arg2
|
||||
|
||||
# Profile with custom interval and duration, save to file
|
||||
python -m profile.sample -i 50 -d 30 -o profile.stats -p 1234
|
||||
python -m profiling.sampling -i 50 -d 30 -o profile.stats -p 1234
|
||||
|
||||
# Generate collapsed stacks for flamegraph
|
||||
python -m profile.sample --collapsed -p 1234
|
||||
python -m profiling.sampling --collapsed -p 1234
|
||||
|
||||
# Profile all threads, sort by total time
|
||||
python -m profile.sample -a --sort-tottime -p 1234
|
||||
python -m profiling.sampling -a --sort-tottime -p 1234
|
||||
|
||||
# Profile for 1 minute with 1ms sampling interval
|
||||
python -m profile.sample -i 1000 -d 60 -p 1234
|
||||
python -m profiling.sampling -i 1000 -d 60 -p 1234
|
||||
|
||||
# Show only top 20 functions sorted by direct samples
|
||||
python -m profile.sample --sort-nsamples -l 20 -p 1234
|
||||
python -m profiling.sampling --sort-nsamples -l 20 -p 1234
|
||||
|
||||
# Profile all threads and save collapsed stacks
|
||||
python -m profile.sample -a --collapsed -o stacks.txt -p 1234
|
||||
python -m profiling.sampling -a --collapsed -o stacks.txt -p 1234
|
||||
|
||||
# Profile with real-time sampling statistics
|
||||
python -m profile.sample --realtime-stats -p 1234
|
||||
python -m profiling.sampling --realtime-stats -p 1234
|
||||
|
||||
# Sort by sample percentage to find most sampled functions
|
||||
python -m profile.sample --sort-sample-pct -p 1234
|
||||
python -m profiling.sampling --sort-sample-pct -p 1234
|
||||
|
||||
# Sort by cumulative samples to find functions most on call stack
|
||||
python -m profile.sample --sort-nsamples-cumul -p 1234"""
|
||||
python -m profiling.sampling --sort-nsamples-cumul -p 1234"""
|
||||
|
||||
|
||||
# Constants for socket synchronization
|
||||
|
|
@ -84,7 +84,7 @@ def _run_with_sync(original_cmd):
|
|||
|
||||
# Build command using the sync coordinator
|
||||
target_args = original_cmd[1:] # Remove python executable
|
||||
cmd = (sys.executable, "-m", "profile._sync_coordinator", str(sync_port), cwd) + tuple(target_args)
|
||||
cmd = (sys.executable, "-m", "profiling.sampling._sync_coordinator", str(sync_port), cwd) + tuple(target_args)
|
||||
|
||||
# Start the process with coordinator
|
||||
process = subprocess.Popen(cmd)
|
||||
219
Lib/profiling/tracing/__init__.py
Normal file
219
Lib/profiling/tracing/__init__.py
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
"""Tracing profiler for Python.
|
||||
|
||||
This module provides deterministic profiling of Python programs by tracing
|
||||
every function call and return.
|
||||
"""
|
||||
|
||||
__all__ = ("run", "runctx", "Profile")
|
||||
|
||||
import _lsprof
|
||||
import importlib.machinery
|
||||
import importlib.util
|
||||
import io
|
||||
from profiling.tracing._utils import _Utils
|
||||
|
||||
# ____________________________________________________________
|
||||
# Simple interface
|
||||
|
||||
def run(statement, filename=None, sort=-1):
|
||||
"""Run statement under profiler optionally saving results in filename
|
||||
|
||||
This function takes a single argument that can be passed to the
|
||||
"exec" statement, and an optional file name. In all cases this
|
||||
routine attempts to "exec" its first argument and gather profiling
|
||||
statistics from the execution. If no file name is present, then this
|
||||
function automatically prints a simple profiling report, sorted by the
|
||||
standard name string (file/line/function-name) that is presented in
|
||||
each line.
|
||||
"""
|
||||
return _Utils(Profile).run(statement, filename, sort)
|
||||
|
||||
def runctx(statement, globals, locals, filename=None, sort=-1):
|
||||
"""Run statement under profiler, supplying your own globals and locals,
|
||||
optionally saving results in filename.
|
||||
|
||||
statement and filename have the same semantics as profile.run
|
||||
"""
|
||||
return _Utils(Profile).runctx(statement, globals, locals,
|
||||
filename, sort)
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
class Profile(_lsprof.Profiler):
|
||||
"""Profile(timer=None, timeunit=None, subcalls=True, builtins=True)
|
||||
|
||||
Builds a profiler object using the specified timer function.
|
||||
The default timer is a fast built-in one based on real time.
|
||||
For custom timer functions returning integers, timeunit can
|
||||
be a float specifying a scale (i.e. how long each integer unit
|
||||
is, in seconds).
|
||||
"""
|
||||
|
||||
# Most of the functionality is in the base class.
|
||||
# This subclass only adds convenient and backward-compatible methods.
|
||||
|
||||
def print_stats(self, sort=-1):
|
||||
import pstats
|
||||
if not isinstance(sort, tuple):
|
||||
sort = (sort,)
|
||||
pstats.Stats(self).strip_dirs().sort_stats(*sort).print_stats()
|
||||
|
||||
def dump_stats(self, file):
|
||||
import marshal
|
||||
with open(file, 'wb') as f:
|
||||
self.create_stats()
|
||||
marshal.dump(self.stats, f)
|
||||
|
||||
def create_stats(self):
|
||||
self.disable()
|
||||
self.snapshot_stats()
|
||||
|
||||
def snapshot_stats(self):
|
||||
entries = self.getstats()
|
||||
self.stats = {}
|
||||
callersdicts = {}
|
||||
# call information
|
||||
for entry in entries:
|
||||
func = label(entry.code)
|
||||
nc = entry.callcount # ncalls column of pstats (before '/')
|
||||
cc = nc - entry.reccallcount # ncalls column of pstats (after '/')
|
||||
tt = entry.inlinetime # tottime column of pstats
|
||||
ct = entry.totaltime # cumtime column of pstats
|
||||
callers = {}
|
||||
callersdicts[id(entry.code)] = callers
|
||||
self.stats[func] = cc, nc, tt, ct, callers
|
||||
# subcall information
|
||||
for entry in entries:
|
||||
if entry.calls:
|
||||
func = label(entry.code)
|
||||
for subentry in entry.calls:
|
||||
try:
|
||||
callers = callersdicts[id(subentry.code)]
|
||||
except KeyError:
|
||||
continue
|
||||
nc = subentry.callcount
|
||||
cc = nc - subentry.reccallcount
|
||||
tt = subentry.inlinetime
|
||||
ct = subentry.totaltime
|
||||
if func in callers:
|
||||
prev = callers[func]
|
||||
nc += prev[0]
|
||||
cc += prev[1]
|
||||
tt += prev[2]
|
||||
ct += prev[3]
|
||||
callers[func] = nc, cc, tt, ct
|
||||
|
||||
# The following two methods can be called by clients to use
|
||||
# a profiler to profile a statement, given as a string.
|
||||
|
||||
def run(self, cmd):
|
||||
import __main__
|
||||
dict = __main__.__dict__
|
||||
return self.runctx(cmd, dict, dict)
|
||||
|
||||
def runctx(self, cmd, globals, locals):
|
||||
self.enable()
|
||||
try:
|
||||
exec(cmd, globals, locals)
|
||||
finally:
|
||||
self.disable()
|
||||
return self
|
||||
|
||||
# This method is more useful to profile a single function call.
|
||||
def runcall(self, func, /, *args, **kw):
|
||||
self.enable()
|
||||
try:
|
||||
return func(*args, **kw)
|
||||
finally:
|
||||
self.disable()
|
||||
|
||||
def __enter__(self):
|
||||
self.enable()
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
self.disable()
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
def label(code):
|
||||
if isinstance(code, str):
|
||||
return ('~', 0, code) # built-in functions ('~' sorts at the end)
|
||||
else:
|
||||
return (code.co_filename, code.co_firstlineno, code.co_name)
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
def main():
|
||||
import os
|
||||
import sys
|
||||
import runpy
|
||||
import pstats
|
||||
from optparse import OptionParser
|
||||
usage = "cProfile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ..."
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-o', '--outfile', dest="outfile",
|
||||
help="Save stats to <outfile>", default=None)
|
||||
parser.add_option('-s', '--sort', dest="sort",
|
||||
help="Sort order when printing to stdout, based on pstats.Stats class",
|
||||
default=2,
|
||||
choices=sorted(pstats.Stats.sort_arg_dict_default))
|
||||
parser.add_option('-m', dest="module", action="store_true",
|
||||
help="Profile a library module", default=False)
|
||||
|
||||
if not sys.argv[1:]:
|
||||
parser.print_usage()
|
||||
sys.exit(2)
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
sys.argv[:] = args
|
||||
|
||||
# The script that we're profiling may chdir, so capture the absolute path
|
||||
# to the output file at startup.
|
||||
if options.outfile is not None:
|
||||
options.outfile = os.path.abspath(options.outfile)
|
||||
|
||||
if len(args) > 0:
|
||||
if options.module:
|
||||
code = "run_module(modname, run_name='__main__')"
|
||||
globs = {
|
||||
'run_module': runpy.run_module,
|
||||
'modname': args[0]
|
||||
}
|
||||
else:
|
||||
progname = args[0]
|
||||
sys.path.insert(0, os.path.dirname(progname))
|
||||
with io.open_code(progname) as fp:
|
||||
code = compile(fp.read(), progname, 'exec')
|
||||
spec = importlib.machinery.ModuleSpec(name='__main__', loader=None,
|
||||
origin=progname)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
# Set __main__ so that importing __main__ in the profiled code will
|
||||
# return the same namespace that the code is executing under.
|
||||
sys.modules['__main__'] = module
|
||||
# Ensure that we're using the same __dict__ instance as the module
|
||||
# for the global variables so that updates to globals are reflected
|
||||
# in the module's namespace.
|
||||
globs = module.__dict__
|
||||
globs.update({
|
||||
'__spec__': spec,
|
||||
'__file__': spec.origin,
|
||||
'__name__': spec.name,
|
||||
'__package__': None,
|
||||
'__cached__': None,
|
||||
})
|
||||
|
||||
try:
|
||||
runctx(code, globs, None, options.outfile, options.sort)
|
||||
except BrokenPipeError as exc:
|
||||
# Prevent "Exception ignored" during interpreter shutdown.
|
||||
sys.stdout = None
|
||||
sys.exit(exc.errno)
|
||||
else:
|
||||
parser.print_usage()
|
||||
return parser
|
||||
|
||||
# When invoked as main program, invoke the profiler on a script
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
6
Lib/profiling/tracing/__main__.py
Normal file
6
Lib/profiling/tracing/__main__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
"""Run the tracing profiler from the command line."""
|
||||
|
||||
from profiling.tracing import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
32
Lib/profiling/tracing/_utils.py
Normal file
32
Lib/profiling/tracing/_utils.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
class _Utils:
|
||||
"""Support class for utility functions which are shared by
|
||||
profile.py and cProfile.py modules.
|
||||
Not supposed to be used directly.
|
||||
"""
|
||||
|
||||
def __init__(self, profiler):
|
||||
self.profiler = profiler
|
||||
|
||||
def run(self, statement, filename, sort):
|
||||
prof = self.profiler()
|
||||
try:
|
||||
prof.run(statement)
|
||||
except SystemExit:
|
||||
pass
|
||||
finally:
|
||||
self._show(prof, filename, sort)
|
||||
|
||||
def runctx(self, statement, globals, locals, filename, sort):
|
||||
prof = self.profiler()
|
||||
try:
|
||||
prof.runctx(statement, globals, locals)
|
||||
except SystemExit:
|
||||
pass
|
||||
finally:
|
||||
self._show(prof, filename, sort)
|
||||
|
||||
def _show(self, prof, filename, sort):
|
||||
if filename is not None:
|
||||
prof.dump_stats(filename)
|
||||
else:
|
||||
prof.print_stats(sort)
|
||||
|
|
@ -4,12 +4,16 @@
|
|||
import pstats
|
||||
import unittest
|
||||
import os
|
||||
import warnings
|
||||
from difflib import unified_diff
|
||||
from io import StringIO
|
||||
from test.support.os_helper import TESTFN, unlink, temp_dir, change_cwd
|
||||
from contextlib import contextmanager, redirect_stdout
|
||||
|
||||
import profile
|
||||
# Suppress deprecation warning for profile module (PEP 799)
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
import profile
|
||||
from test.profilee import testfunc, timer
|
||||
from test.support.script_helper import assert_python_failure, assert_python_ok
|
||||
|
||||
|
|
|
|||
5
Lib/test/test_profiling/__init__.py
Normal file
5
Lib/test/test_profiling/__init__.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
import os
|
||||
from test.support import load_package_tests
|
||||
|
||||
def load_tests(*args):
|
||||
return load_package_tests(os.path.dirname(__file__), *args)
|
||||
4
Lib/test/test_profiling/__main__.py
Normal file
4
Lib/test/test_profiling/__main__.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
from . import load_tests
|
||||
import unittest
|
||||
|
||||
unittest.main()
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
"""Tests for the sampling profiler (profile.sample)."""
|
||||
"""Tests for the sampling profiler (profiling.sampling)."""
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
|
|
@ -12,8 +12,8 @@
|
|||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
from profile.pstats_collector import PstatsCollector
|
||||
from profile.stack_collector import (
|
||||
from profiling.sampling.pstats_collector import PstatsCollector
|
||||
from profiling.sampling.stack_collector import (
|
||||
CollapsedStackCollector,
|
||||
)
|
||||
|
||||
|
|
@ -32,8 +32,8 @@
|
|||
"Test only runs when _remote_debugging is available"
|
||||
)
|
||||
else:
|
||||
import profile.sample
|
||||
from profile.sample import SampleProfiler
|
||||
import profiling.sampling
|
||||
from profiling.sampling.sample import SampleProfiler
|
||||
|
||||
|
||||
|
||||
|
|
@ -472,7 +472,7 @@ class TestSampleProfiler(unittest.TestCase):
|
|||
|
||||
def test_sample_profiler_initialization(self):
|
||||
"""Test SampleProfiler initialization with various parameters."""
|
||||
from profile.sample import SampleProfiler
|
||||
from profiling.sampling.sample import SampleProfiler
|
||||
|
||||
# Mock RemoteUnwinder to avoid permission issues
|
||||
with mock.patch(
|
||||
|
|
@ -498,7 +498,7 @@ def test_sample_profiler_initialization(self):
|
|||
|
||||
def test_sample_profiler_sample_method_timing(self):
|
||||
"""Test that the sample method respects duration and handles timing correctly."""
|
||||
from profile.sample import SampleProfiler
|
||||
from profiling.sampling.sample import SampleProfiler
|
||||
|
||||
# Mock the unwinder to avoid needing a real process
|
||||
mock_unwinder = mock.MagicMock()
|
||||
|
|
@ -548,7 +548,7 @@ def test_sample_profiler_sample_method_timing(self):
|
|||
|
||||
def test_sample_profiler_error_handling(self):
|
||||
"""Test that the sample method handles errors gracefully."""
|
||||
from profile.sample import SampleProfiler
|
||||
from profiling.sampling.sample import SampleProfiler
|
||||
|
||||
# Mock unwinder that raises errors
|
||||
mock_unwinder = mock.MagicMock()
|
||||
|
|
@ -612,7 +612,7 @@ def test_sample_profiler_error_handling(self):
|
|||
|
||||
def test_sample_profiler_missed_samples_warning(self):
|
||||
"""Test that the profiler warns about missed samples when sampling is too slow."""
|
||||
from profile.sample import SampleProfiler
|
||||
from profiling.sampling.sample import SampleProfiler
|
||||
|
||||
mock_unwinder = mock.MagicMock()
|
||||
mock_unwinder.get_stack_trace.return_value = [
|
||||
|
|
@ -698,7 +698,7 @@ def setUp(self):
|
|||
|
||||
def test_print_sampled_stats_basic(self):
|
||||
"""Test basic print_sampled_stats functionality."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
# Capture output
|
||||
with io.StringIO() as output:
|
||||
|
|
@ -720,7 +720,7 @@ def test_print_sampled_stats_basic(self):
|
|||
|
||||
def test_print_sampled_stats_sorting(self):
|
||||
"""Test different sorting options."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
# Test sort by calls
|
||||
with io.StringIO() as output:
|
||||
|
|
@ -753,7 +753,7 @@ def test_print_sampled_stats_sorting(self):
|
|||
|
||||
def test_print_sampled_stats_limit(self):
|
||||
"""Test limiting output rows."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
with io.StringIO() as output:
|
||||
with mock.patch("sys.stdout", output):
|
||||
|
|
@ -782,7 +782,7 @@ def test_print_sampled_stats_limit(self):
|
|||
|
||||
def test_print_sampled_stats_time_units(self):
|
||||
"""Test proper time unit selection."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
with io.StringIO() as output:
|
||||
with mock.patch("sys.stdout", output):
|
||||
|
|
@ -812,7 +812,7 @@ def test_print_sampled_stats_time_units(self):
|
|||
|
||||
def test_print_sampled_stats_summary(self):
|
||||
"""Test summary section generation."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
with io.StringIO() as output:
|
||||
with mock.patch("sys.stdout", output):
|
||||
|
|
@ -840,7 +840,7 @@ def test_print_sampled_stats_summary(self):
|
|||
|
||||
def test_print_sampled_stats_no_summary(self):
|
||||
"""Test disabling summary output."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
with io.StringIO() as output:
|
||||
with mock.patch("sys.stdout", output):
|
||||
|
|
@ -857,7 +857,7 @@ def test_print_sampled_stats_no_summary(self):
|
|||
|
||||
def test_print_sampled_stats_empty_stats(self):
|
||||
"""Test with empty stats."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
empty_stats = mock.MagicMock()
|
||||
empty_stats.stats = {}
|
||||
|
|
@ -873,7 +873,7 @@ def test_print_sampled_stats_empty_stats(self):
|
|||
|
||||
def test_print_sampled_stats_sample_percentage_sorting(self):
|
||||
"""Test sample percentage sorting options."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
# Add a function with high sample percentage (more direct calls than func3's 200)
|
||||
self.mock_stats.stats[("expensive.py", 60, "expensive_func")] = (
|
||||
|
|
@ -900,7 +900,7 @@ def test_print_sampled_stats_sample_percentage_sorting(self):
|
|||
|
||||
def test_print_sampled_stats_with_recursive_calls(self):
|
||||
"""Test print_sampled_stats with recursive calls where nc != cc."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
# Create stats with recursive calls (nc != cc)
|
||||
recursive_stats = mock.MagicMock()
|
||||
|
|
@ -936,7 +936,7 @@ def test_print_sampled_stats_with_recursive_calls(self):
|
|||
|
||||
def test_print_sampled_stats_with_zero_call_counts(self):
|
||||
"""Test print_sampled_stats with zero call counts to trigger division protection."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
# Create stats with zero call counts
|
||||
zero_stats = mock.MagicMock()
|
||||
|
|
@ -964,7 +964,7 @@ def test_print_sampled_stats_with_zero_call_counts(self):
|
|||
|
||||
def test_print_sampled_stats_sort_by_name(self):
|
||||
"""Test sort by function name option."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
with io.StringIO() as output:
|
||||
with mock.patch("sys.stdout", output):
|
||||
|
|
@ -1022,7 +1022,7 @@ def test_print_sampled_stats_sort_by_name(self):
|
|||
|
||||
def test_print_sampled_stats_with_zero_time_functions(self):
|
||||
"""Test summary sections with functions that have zero time."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
# Create stats with zero-time functions
|
||||
zero_time_stats = mock.MagicMock()
|
||||
|
|
@ -1060,7 +1060,7 @@ def test_print_sampled_stats_with_zero_time_functions(self):
|
|||
|
||||
def test_print_sampled_stats_with_malformed_qualified_names(self):
|
||||
"""Test summary generation with function names that don't contain colons."""
|
||||
from profile.sample import print_sampled_stats
|
||||
from profiling.sampling.sample import print_sampled_stats
|
||||
|
||||
# Create stats with function names that would create malformed qualified names
|
||||
malformed_stats = mock.MagicMock()
|
||||
|
|
@ -1451,7 +1451,7 @@ def test_sampling_basic_functionality(self):
|
|||
mock.patch("sys.stdout", captured_output),
|
||||
):
|
||||
try:
|
||||
profile.sample.sample(
|
||||
profiling.sampling.sample.sample(
|
||||
proc.pid,
|
||||
duration_sec=2,
|
||||
sample_interval_usec=1000, # 1ms
|
||||
|
|
@ -1483,7 +1483,7 @@ def test_sampling_with_pstats_export(self):
|
|||
mock.patch("sys.stdout", captured_output),
|
||||
):
|
||||
try:
|
||||
profile.sample.sample(
|
||||
profiling.sampling.sample.sample(
|
||||
proc.pid,
|
||||
duration_sec=1,
|
||||
filename=pstats_out.name,
|
||||
|
|
@ -1528,7 +1528,7 @@ def test_sampling_with_collapsed_export(self):
|
|||
mock.patch("sys.stdout", captured_output),
|
||||
):
|
||||
try:
|
||||
profile.sample.sample(
|
||||
profiling.sampling.sample.sample(
|
||||
proc.pid,
|
||||
duration_sec=1,
|
||||
filename=collapsed_file.name,
|
||||
|
|
@ -1576,7 +1576,7 @@ def test_sampling_all_threads(self):
|
|||
mock.patch("sys.stdout", captured_output),
|
||||
):
|
||||
try:
|
||||
profile.sample.sample(
|
||||
profiling.sampling.sample.sample(
|
||||
proc.pid,
|
||||
duration_sec=1,
|
||||
all_threads=True,
|
||||
|
|
@ -1595,7 +1595,7 @@ def test_sample_target_script(self):
|
|||
script_file.flush()
|
||||
self.addCleanup(close_and_unlink, script_file)
|
||||
|
||||
test_args = ["profile.sample", "-d", "1", script_file.name]
|
||||
test_args = ["profiling.sampling.sample", "-d", "1", script_file.name]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
|
|
@ -1603,7 +1603,7 @@ def test_sample_target_script(self):
|
|||
mock.patch("sys.stdout", captured_output),
|
||||
):
|
||||
try:
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
except PermissionError:
|
||||
self.skipTest("Insufficient permissions for remote profiling")
|
||||
|
||||
|
|
@ -1627,7 +1627,7 @@ def test_sample_target_module(self):
|
|||
with open(module_path, "w") as f:
|
||||
f.write(self.test_script)
|
||||
|
||||
test_args = ["profile.sample", "-d", "1", "-m", "test_module"]
|
||||
test_args = ["profiling.sampling.sample", "-d", "1", "-m", "test_module"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
|
|
@ -1637,7 +1637,7 @@ def test_sample_target_module(self):
|
|||
contextlib.chdir(tempdir.name),
|
||||
):
|
||||
try:
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
except PermissionError:
|
||||
self.skipTest("Insufficient permissions for remote profiling")
|
||||
|
||||
|
|
@ -1660,7 +1660,7 @@ def test_sample_target_module(self):
|
|||
class TestSampleProfilerErrorHandling(unittest.TestCase):
|
||||
def test_invalid_pid(self):
|
||||
with self.assertRaises((OSError, RuntimeError)):
|
||||
profile.sample.sample(-1, duration_sec=1)
|
||||
profiling.sampling.sample.sample(-1, duration_sec=1)
|
||||
|
||||
def test_process_dies_during_sampling(self):
|
||||
with test_subprocess("import time; time.sleep(0.5); exit()") as proc:
|
||||
|
|
@ -1669,7 +1669,7 @@ def test_process_dies_during_sampling(self):
|
|||
mock.patch("sys.stdout", captured_output),
|
||||
):
|
||||
try:
|
||||
profile.sample.sample(
|
||||
profiling.sampling.sample.sample(
|
||||
proc.pid,
|
||||
duration_sec=2, # Longer than process lifetime
|
||||
sample_interval_usec=50000,
|
||||
|
|
@ -1685,7 +1685,7 @@ def test_process_dies_during_sampling(self):
|
|||
|
||||
def test_invalid_output_format(self):
|
||||
with self.assertRaises(ValueError):
|
||||
profile.sample.sample(
|
||||
profiling.sampling.sample.sample(
|
||||
os.getpid(),
|
||||
duration_sec=1,
|
||||
output_format="invalid_format",
|
||||
|
|
@ -1694,13 +1694,13 @@ def test_invalid_output_format(self):
|
|||
def test_invalid_output_format_with_mocked_profiler(self):
|
||||
"""Test invalid output format with proper mocking to avoid permission issues."""
|
||||
with mock.patch(
|
||||
"profile.sample.SampleProfiler"
|
||||
"profiling.sampling.sample.SampleProfiler"
|
||||
) as mock_profiler_class:
|
||||
mock_profiler = mock.MagicMock()
|
||||
mock_profiler_class.return_value = mock_profiler
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
profile.sample.sample(
|
||||
profiling.sampling.sample.sample(
|
||||
12345,
|
||||
duration_sec=1,
|
||||
output_format="unknown_format",
|
||||
|
|
@ -1787,23 +1787,23 @@ def _verify_coordinator_command(self, mock_popen, expected_target_args):
|
|||
coordinator_cmd = args[0]
|
||||
self.assertEqual(coordinator_cmd[0], sys.executable)
|
||||
self.assertEqual(coordinator_cmd[1], "-m")
|
||||
self.assertEqual(coordinator_cmd[2], "profile._sync_coordinator")
|
||||
self.assertEqual(coordinator_cmd[2], "profiling.sampling._sync_coordinator")
|
||||
self.assertEqual(coordinator_cmd[3], "12345") # port
|
||||
# cwd is coordinator_cmd[4]
|
||||
self.assertEqual(coordinator_cmd[5:], expected_target_args)
|
||||
|
||||
@unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist")
|
||||
def test_cli_module_argument_parsing(self):
|
||||
test_args = ["profile.sample", "-m", "mymodule"]
|
||||
test_args = ["profiling.sampling.sample", "-m", "mymodule"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
mock.patch("subprocess.Popen") as mock_popen,
|
||||
mock.patch("socket.socket") as mock_socket,
|
||||
):
|
||||
self._setup_sync_mocks(mock_socket, mock_popen)
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
self._verify_coordinator_command(mock_popen, ("-m", "mymodule"))
|
||||
mock_sample.assert_called_once_with(
|
||||
|
|
@ -1821,16 +1821,16 @@ def test_cli_module_argument_parsing(self):
|
|||
|
||||
@unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist")
|
||||
def test_cli_module_with_arguments(self):
|
||||
test_args = ["profile.sample", "-m", "mymodule", "arg1", "arg2", "--flag"]
|
||||
test_args = ["profiling.sampling.sample", "-m", "mymodule", "arg1", "arg2", "--flag"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
mock.patch("subprocess.Popen") as mock_popen,
|
||||
mock.patch("socket.socket") as mock_socket,
|
||||
):
|
||||
self._setup_sync_mocks(mock_socket, mock_popen)
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
self._verify_coordinator_command(mock_popen, ("-m", "mymodule", "arg1", "arg2", "--flag"))
|
||||
mock_sample.assert_called_once_with(
|
||||
|
|
@ -1848,16 +1848,16 @@ def test_cli_module_with_arguments(self):
|
|||
|
||||
@unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist")
|
||||
def test_cli_script_argument_parsing(self):
|
||||
test_args = ["profile.sample", "myscript.py"]
|
||||
test_args = ["profiling.sampling.sample", "myscript.py"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
mock.patch("subprocess.Popen") as mock_popen,
|
||||
mock.patch("socket.socket") as mock_socket,
|
||||
):
|
||||
self._setup_sync_mocks(mock_socket, mock_popen)
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
self._verify_coordinator_command(mock_popen, ("myscript.py",))
|
||||
mock_sample.assert_called_once_with(
|
||||
|
|
@ -1875,11 +1875,11 @@ def test_cli_script_argument_parsing(self):
|
|||
|
||||
@unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist")
|
||||
def test_cli_script_with_arguments(self):
|
||||
test_args = ["profile.sample", "myscript.py", "arg1", "arg2", "--flag"]
|
||||
test_args = ["profiling.sampling.sample", "myscript.py", "arg1", "arg2", "--flag"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
mock.patch("subprocess.Popen") as mock_popen,
|
||||
mock.patch("socket.socket") as mock_socket,
|
||||
):
|
||||
|
|
@ -1888,55 +1888,55 @@ def test_cli_script_with_arguments(self):
|
|||
# Override specific behavior for this test
|
||||
mock_process.wait.side_effect = [subprocess.TimeoutExpired(test_args, 0.1), None]
|
||||
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
# Verify the coordinator command was called
|
||||
args, kwargs = mock_popen.call_args
|
||||
coordinator_cmd = args[0]
|
||||
self.assertEqual(coordinator_cmd[0], sys.executable)
|
||||
self.assertEqual(coordinator_cmd[1], "-m")
|
||||
self.assertEqual(coordinator_cmd[2], "profile._sync_coordinator")
|
||||
self.assertEqual(coordinator_cmd[2], "profiling.sampling._sync_coordinator")
|
||||
self.assertEqual(coordinator_cmd[3], "12345") # port
|
||||
# cwd is coordinator_cmd[4]
|
||||
self.assertEqual(coordinator_cmd[5:], ("myscript.py", "arg1", "arg2", "--flag"))
|
||||
|
||||
def test_cli_mutually_exclusive_pid_module(self):
|
||||
test_args = ["profile.sample", "-p", "12345", "-m", "mymodule"]
|
||||
test_args = ["profiling.sampling.sample", "-p", "12345", "-m", "mymodule"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("sys.stderr", io.StringIO()) as mock_stderr,
|
||||
self.assertRaises(SystemExit) as cm,
|
||||
):
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
self.assertEqual(cm.exception.code, 2) # argparse error
|
||||
error_msg = mock_stderr.getvalue()
|
||||
self.assertIn("not allowed with argument", error_msg)
|
||||
|
||||
def test_cli_mutually_exclusive_pid_script(self):
|
||||
test_args = ["profile.sample", "-p", "12345", "myscript.py"]
|
||||
test_args = ["profiling.sampling.sample", "-p", "12345", "myscript.py"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("sys.stderr", io.StringIO()) as mock_stderr,
|
||||
self.assertRaises(SystemExit) as cm,
|
||||
):
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
self.assertEqual(cm.exception.code, 2) # argparse error
|
||||
error_msg = mock_stderr.getvalue()
|
||||
self.assertIn("only one target type can be specified", error_msg)
|
||||
|
||||
def test_cli_no_target_specified(self):
|
||||
test_args = ["profile.sample", "-d", "5"]
|
||||
test_args = ["profiling.sampling.sample", "-d", "5"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("sys.stderr", io.StringIO()) as mock_stderr,
|
||||
self.assertRaises(SystemExit) as cm,
|
||||
):
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
self.assertEqual(cm.exception.code, 2) # argparse error
|
||||
error_msg = mock_stderr.getvalue()
|
||||
|
|
@ -1945,18 +1945,18 @@ def test_cli_no_target_specified(self):
|
|||
@unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist")
|
||||
def test_cli_module_with_profiler_options(self):
|
||||
test_args = [
|
||||
"profile.sample", "-i", "1000", "-d", "30", "-a",
|
||||
"profiling.sampling.sample", "-i", "1000", "-d", "30", "-a",
|
||||
"--sort-tottime", "-l", "20", "-m", "mymodule",
|
||||
]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
mock.patch("subprocess.Popen") as mock_popen,
|
||||
mock.patch("socket.socket") as mock_socket,
|
||||
):
|
||||
self._setup_sync_mocks(mock_socket, mock_popen)
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
self._verify_coordinator_command(mock_popen, ("-m", "mymodule"))
|
||||
mock_sample.assert_called_once_with(
|
||||
|
|
@ -1976,19 +1976,19 @@ def test_cli_module_with_profiler_options(self):
|
|||
def test_cli_script_with_profiler_options(self):
|
||||
"""Test script with various profiler options."""
|
||||
test_args = [
|
||||
"profile.sample", "-i", "2000", "-d", "60",
|
||||
"profiling.sampling.sample", "-i", "2000", "-d", "60",
|
||||
"--collapsed", "-o", "output.txt",
|
||||
"myscript.py", "scriptarg",
|
||||
]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
mock.patch("subprocess.Popen") as mock_popen,
|
||||
mock.patch("socket.socket") as mock_socket,
|
||||
):
|
||||
self._setup_sync_mocks(mock_socket, mock_popen)
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
self._verify_coordinator_command(mock_popen, ("myscript.py", "scriptarg"))
|
||||
# Verify profiler options were passed correctly
|
||||
|
|
@ -2006,14 +2006,14 @@ def test_cli_script_with_profiler_options(self):
|
|||
)
|
||||
|
||||
def test_cli_empty_module_name(self):
|
||||
test_args = ["profile.sample", "-m"]
|
||||
test_args = ["profiling.sampling.sample", "-m"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("sys.stderr", io.StringIO()) as mock_stderr,
|
||||
self.assertRaises(SystemExit) as cm,
|
||||
):
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
self.assertEqual(cm.exception.code, 2) # argparse error
|
||||
error_msg = mock_stderr.getvalue()
|
||||
|
|
@ -2021,29 +2021,29 @@ def test_cli_empty_module_name(self):
|
|||
|
||||
@unittest.skipIf(is_emscripten, "socket.SO_REUSEADDR does not exist")
|
||||
def test_cli_long_module_option(self):
|
||||
test_args = ["profile.sample", "--module", "mymodule", "arg1"]
|
||||
test_args = ["profiling.sampling.sample", "--module", "mymodule", "arg1"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
mock.patch("subprocess.Popen") as mock_popen,
|
||||
mock.patch("socket.socket") as mock_socket,
|
||||
):
|
||||
self._setup_sync_mocks(mock_socket, mock_popen)
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
self._verify_coordinator_command(mock_popen, ("-m", "mymodule", "arg1"))
|
||||
|
||||
def test_cli_complex_script_arguments(self):
|
||||
test_args = [
|
||||
"profile.sample", "script.py",
|
||||
"profiling.sampling.sample", "script.py",
|
||||
"--input", "file.txt", "-v", "--output=/tmp/out", "positional"
|
||||
]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profile.sample._run_with_sync") as mock_run_with_sync,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample._run_with_sync") as mock_run_with_sync,
|
||||
):
|
||||
mock_process = mock.MagicMock()
|
||||
mock_process.pid = 12345
|
||||
|
|
@ -2051,7 +2051,7 @@ def test_cli_complex_script_arguments(self):
|
|||
mock_process.poll.return_value = None
|
||||
mock_run_with_sync.return_value = mock_process
|
||||
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
mock_run_with_sync.assert_called_once_with((
|
||||
sys.executable, "script.py",
|
||||
|
|
@ -2063,16 +2063,16 @@ def test_cli_collapsed_format_validation(self):
|
|||
test_cases = [
|
||||
# Test sort options are invalid with collapsed
|
||||
(
|
||||
["profile.sample", "--collapsed", "--sort-nsamples", "-p", "12345"],
|
||||
["profiling.sampling.sample", "--collapsed", "--sort-nsamples", "-p", "12345"],
|
||||
"sort",
|
||||
),
|
||||
(
|
||||
["profile.sample", "--collapsed", "--sort-tottime", "-p", "12345"],
|
||||
["profiling.sampling.sample", "--collapsed", "--sort-tottime", "-p", "12345"],
|
||||
"sort",
|
||||
),
|
||||
(
|
||||
[
|
||||
"profile.sample",
|
||||
"profiling.sampling.sample",
|
||||
"--collapsed",
|
||||
"--sort-cumtime",
|
||||
"-p",
|
||||
|
|
@ -2082,7 +2082,7 @@ def test_cli_collapsed_format_validation(self):
|
|||
),
|
||||
(
|
||||
[
|
||||
"profile.sample",
|
||||
"profiling.sampling.sample",
|
||||
"--collapsed",
|
||||
"--sort-sample-pct",
|
||||
"-p",
|
||||
|
|
@ -2092,7 +2092,7 @@ def test_cli_collapsed_format_validation(self):
|
|||
),
|
||||
(
|
||||
[
|
||||
"profile.sample",
|
||||
"profiling.sampling.sample",
|
||||
"--collapsed",
|
||||
"--sort-cumul-pct",
|
||||
"-p",
|
||||
|
|
@ -2101,18 +2101,18 @@ def test_cli_collapsed_format_validation(self):
|
|||
"sort",
|
||||
),
|
||||
(
|
||||
["profile.sample", "--collapsed", "--sort-name", "-p", "12345"],
|
||||
["profiling.sampling.sample", "--collapsed", "--sort-name", "-p", "12345"],
|
||||
"sort",
|
||||
),
|
||||
# Test limit option is invalid with collapsed
|
||||
(["profile.sample", "--collapsed", "-l", "20", "-p", "12345"], "limit"),
|
||||
(["profiling.sampling.sample", "--collapsed", "-l", "20", "-p", "12345"], "limit"),
|
||||
(
|
||||
["profile.sample", "--collapsed", "--limit", "20", "-p", "12345"],
|
||||
["profiling.sampling.sample", "--collapsed", "--limit", "20", "-p", "12345"],
|
||||
"limit",
|
||||
),
|
||||
# Test no-summary option is invalid with collapsed
|
||||
(
|
||||
["profile.sample", "--collapsed", "--no-summary", "-p", "12345"],
|
||||
["profiling.sampling.sample", "--collapsed", "--no-summary", "-p", "12345"],
|
||||
"summary",
|
||||
),
|
||||
]
|
||||
|
|
@ -2123,7 +2123,7 @@ def test_cli_collapsed_format_validation(self):
|
|||
mock.patch("sys.stderr", io.StringIO()) as mock_stderr,
|
||||
self.assertRaises(SystemExit) as cm,
|
||||
):
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
self.assertEqual(cm.exception.code, 2) # argparse error code
|
||||
error_msg = mock_stderr.getvalue()
|
||||
|
|
@ -2132,13 +2132,13 @@ def test_cli_collapsed_format_validation(self):
|
|||
|
||||
def test_cli_default_collapsed_filename(self):
|
||||
"""Test that collapsed format gets a default filename when not specified."""
|
||||
test_args = ["profile.sample", "--collapsed", "-p", "12345"]
|
||||
test_args = ["profiling.sampling.sample", "--collapsed", "-p", "12345"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
):
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
# Check that filename was set to default collapsed format
|
||||
mock_sample.assert_called_once()
|
||||
|
|
@ -2150,12 +2150,12 @@ def test_cli_custom_output_filenames(self):
|
|||
"""Test custom output filenames for both formats."""
|
||||
test_cases = [
|
||||
(
|
||||
["profile.sample", "--pstats", "-o", "custom.pstats", "-p", "12345"],
|
||||
["profiling.sampling.sample", "--pstats", "-o", "custom.pstats", "-p", "12345"],
|
||||
"custom.pstats",
|
||||
"pstats",
|
||||
),
|
||||
(
|
||||
["profile.sample", "--collapsed", "-o", "custom.txt", "-p", "12345"],
|
||||
["profiling.sampling.sample", "--collapsed", "-o", "custom.txt", "-p", "12345"],
|
||||
"custom.txt",
|
||||
"collapsed",
|
||||
),
|
||||
|
|
@ -2164,9 +2164,9 @@ def test_cli_custom_output_filenames(self):
|
|||
for test_args, expected_filename, expected_format in test_cases:
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
):
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
mock_sample.assert_called_once()
|
||||
call_args = mock_sample.call_args[1]
|
||||
|
|
@ -2176,32 +2176,32 @@ def test_cli_custom_output_filenames(self):
|
|||
def test_cli_missing_required_arguments(self):
|
||||
"""Test that CLI requires PID argument."""
|
||||
with (
|
||||
mock.patch("sys.argv", ["profile.sample"]),
|
||||
mock.patch("sys.argv", ["profiling.sampling.sample"]),
|
||||
mock.patch("sys.stderr", io.StringIO()),
|
||||
):
|
||||
with self.assertRaises(SystemExit):
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
def test_cli_mutually_exclusive_format_options(self):
|
||||
"""Test that pstats and collapsed options are mutually exclusive."""
|
||||
with (
|
||||
mock.patch(
|
||||
"sys.argv",
|
||||
["profile.sample", "--pstats", "--collapsed", "-p", "12345"],
|
||||
["profiling.sampling.sample", "--pstats", "--collapsed", "-p", "12345"],
|
||||
),
|
||||
mock.patch("sys.stderr", io.StringIO()),
|
||||
):
|
||||
with self.assertRaises(SystemExit):
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
def test_argument_parsing_basic(self):
|
||||
test_args = ["profile.sample", "-p", "12345"]
|
||||
test_args = ["profiling.sampling.sample", "-p", "12345"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
):
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
mock_sample.assert_called_once_with(
|
||||
12345,
|
||||
|
|
@ -2227,13 +2227,13 @@ def test_sort_options(self):
|
|||
]
|
||||
|
||||
for option, expected_sort_value in sort_options:
|
||||
test_args = ["profile.sample", option, "-p", "12345"]
|
||||
test_args = ["profiling.sampling.sample", option, "-p", "12345"]
|
||||
|
||||
with (
|
||||
mock.patch("sys.argv", test_args),
|
||||
mock.patch("profile.sample.sample") as mock_sample,
|
||||
mock.patch("profiling.sampling.sample.sample") as mock_sample,
|
||||
):
|
||||
profile.sample.main()
|
||||
profiling.sampling.sample.main()
|
||||
|
||||
mock_sample.assert_called_once()
|
||||
call_args = mock_sample.call_args[1]
|
||||
|
|
@ -4,7 +4,7 @@
|
|||
import unittest
|
||||
|
||||
# rip off all interesting stuff from test_profile
|
||||
import cProfile
|
||||
import profiling.tracing as cProfile
|
||||
import tempfile
|
||||
import textwrap
|
||||
from test.test_profile import ProfileTest, regenerate_expected_output
|
||||
|
|
@ -2565,6 +2565,7 @@ LIBSUBDIRS= asyncio \
|
|||
multiprocessing multiprocessing/dummy \
|
||||
pathlib \
|
||||
profile \
|
||||
profiling profiling/sampling profiling/tracing \
|
||||
pydoc_data \
|
||||
re \
|
||||
site-packages \
|
||||
|
|
@ -2677,6 +2678,7 @@ TESTSUBDIRS= idlelib/idle_test \
|
|||
test/test_pathlib \
|
||||
test/test_pathlib/support \
|
||||
test/test_peg_generator \
|
||||
test/test_profiling \
|
||||
test/test_pydoc \
|
||||
test/test_pyrepl \
|
||||
test/test_string \
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
Implement :pep:`799` -- A dedicated profiling package for organizing Python
|
||||
profiling tools. Patch by Pablo Galindo.
|
||||
1
Python/stdlib_module_names.h
generated
1
Python/stdlib_module_names.h
generated
|
|
@ -215,6 +215,7 @@ static const char* _Py_stdlib_module_names[] = {
|
|||
"posixpath",
|
||||
"pprint",
|
||||
"profile",
|
||||
"profiling",
|
||||
"pstats",
|
||||
"pty",
|
||||
"pwd",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue