Merge branch 'master' into feature/34632-importlib-metadata

This commit is contained in:
Jason R. Coombs 2019-05-08 11:34:20 -04:00
commit 175603f3aa
706 changed files with 23101 additions and 14313 deletions

View file

@ -5631,8 +5631,6 @@ def __init__(self, value=None):
def __repr__(self):
return "(%r, %r, %r)" % (self.sign, self.int, self.exp)
__str__ = __repr__
def _normalize(op1, op2, prec = 0):

View file

@ -292,16 +292,15 @@ class IOBase(metaclass=abc.ABCMeta):
derived classes can override selectively; the default implementations
represent a file that cannot be read, written or seeked.
Even though IOBase does not declare read, readinto, or write because
Even though IOBase does not declare read or write because
their signatures will vary, implementations and clients should
consider those methods part of the interface. Also, implementations
may raise UnsupportedOperation when operations they do not support are
called.
The basic type used for binary data read from or written to a file is
bytes. Other bytes-like objects are accepted as method arguments too. In
some cases (such as readinto), a writable object is required. Text I/O
classes work with str data.
bytes. Other bytes-like objects are accepted as method arguments too.
Text I/O classes work with str data.
Note that calling any method (even inquiries) on a closed stream is
undefined. Implementations may raise OSError in this case.
@ -552,6 +551,11 @@ def readlines(self, hint=None):
return lines
def writelines(self, lines):
"""Write a list of lines to the stream.
Line separators are not added, so it is usual for each of the lines
provided to have a line separator at the end.
"""
self._checkClosed()
for line in lines:
self.write(line)
@ -1763,8 +1767,7 @@ class TextIOBase(IOBase):
"""Base class for text I/O.
This class provides a character and line based interface to stream
I/O. There is no readinto method because Python's character strings
are immutable. There is no public constructor.
I/O. There is no public constructor.
"""
def read(self, size=-1):

View file

@ -16,10 +16,12 @@
import collections
import collections.abc
import concurrent.futures
import functools
import heapq
import itertools
import os
import socket
import stat
import subprocess
import threading
import time
@ -40,6 +42,7 @@
from . import futures
from . import protocols
from . import sslproto
from . import staggered
from . import tasks
from . import transports
from .log import logger
@ -158,6 +161,28 @@ def _ipaddr_info(host, port, family, type, proto):
return None
def _interleave_addrinfos(addrinfos, first_address_family_count=1):
"""Interleave list of addrinfo tuples by family."""
# Group addresses by family
addrinfos_by_family = collections.OrderedDict()
for addr in addrinfos:
family = addr[0]
if family not in addrinfos_by_family:
addrinfos_by_family[family] = []
addrinfos_by_family[family].append(addr)
addrinfos_lists = list(addrinfos_by_family.values())
reordered = []
if first_address_family_count > 1:
reordered.extend(addrinfos_lists[0][:first_address_family_count - 1])
del addrinfos_lists[0][:first_address_family_count - 1]
reordered.extend(
a for a in itertools.chain.from_iterable(
itertools.zip_longest(*addrinfos_lists)
) if a is not None)
return reordered
def _run_until_complete_cb(fut):
if not fut.cancelled():
exc = fut.exception()
@ -870,12 +895,49 @@ def _check_sendfile_params(self, sock, file, offset, count):
"offset must be a non-negative integer (got {!r})".format(
offset))
async def _connect_sock(self, exceptions, addr_info, local_addr_infos=None):
"""Create, bind and connect one socket."""
my_exceptions = []
exceptions.append(my_exceptions)
family, type_, proto, _, address = addr_info
sock = None
try:
sock = socket.socket(family=family, type=type_, proto=proto)
sock.setblocking(False)
if local_addr_infos is not None:
for _, _, _, _, laddr in local_addr_infos:
try:
sock.bind(laddr)
break
except OSError as exc:
msg = (
f'error while attempting to bind on '
f'address {laddr!r}: '
f'{exc.strerror.lower()}'
)
exc = OSError(exc.errno, msg)
my_exceptions.append(exc)
else: # all bind attempts failed
raise my_exceptions.pop()
await self.sock_connect(sock, address)
return sock
except OSError as exc:
my_exceptions.append(exc)
if sock is not None:
sock.close()
raise
except:
if sock is not None:
sock.close()
raise
async def create_connection(
self, protocol_factory, host=None, port=None,
*, ssl=None, family=0,
proto=0, flags=0, sock=None,
local_addr=None, server_hostname=None,
ssl_handshake_timeout=None):
ssl_handshake_timeout=None,
happy_eyeballs_delay=None, interleave=None):
"""Connect to a TCP server.
Create a streaming transport connection to a given Internet host and
@ -910,6 +972,10 @@ async def create_connection(
raise ValueError(
'ssl_handshake_timeout is only meaningful with ssl')
if happy_eyeballs_delay is not None and interleave is None:
# If using happy eyeballs, default to interleave addresses by family
interleave = 1
if host is not None or port is not None:
if sock is not None:
raise ValueError(
@ -928,43 +994,31 @@ async def create_connection(
flags=flags, loop=self)
if not laddr_infos:
raise OSError('getaddrinfo() returned empty list')
else:
laddr_infos = None
if interleave:
infos = _interleave_addrinfos(infos, interleave)
exceptions = []
for family, type, proto, cname, address in infos:
try:
sock = socket.socket(family=family, type=type, proto=proto)
sock.setblocking(False)
if local_addr is not None:
for _, _, _, _, laddr in laddr_infos:
try:
sock.bind(laddr)
break
except OSError as exc:
msg = (
f'error while attempting to bind on '
f'address {laddr!r}: '
f'{exc.strerror.lower()}'
)
exc = OSError(exc.errno, msg)
exceptions.append(exc)
else:
sock.close()
sock = None
continue
if self._debug:
logger.debug("connect %r to %r", sock, address)
await self.sock_connect(sock, address)
except OSError as exc:
if sock is not None:
sock.close()
exceptions.append(exc)
except:
if sock is not None:
sock.close()
raise
else:
break
else:
if happy_eyeballs_delay is None:
# not using happy eyeballs
for addrinfo in infos:
try:
sock = await self._connect_sock(
exceptions, addrinfo, laddr_infos)
break
except OSError:
continue
else: # using happy eyeballs
sock, _, _ = await staggered.staggered_race(
(functools.partial(self._connect_sock,
exceptions, addrinfo, laddr_infos)
for addrinfo in infos),
happy_eyeballs_delay, loop=self)
if sock is None:
exceptions = [exc for sub in exceptions for exc in sub]
if len(exceptions) == 1:
raise exceptions[0]
else:
@ -1183,6 +1237,19 @@ async def create_datagram_endpoint(self, protocol_factory,
for addr in (local_addr, remote_addr):
if addr is not None and not isinstance(addr, str):
raise TypeError('string is expected')
if local_addr and local_addr[0] not in (0, '\x00'):
try:
if stat.S_ISSOCK(os.stat(local_addr).st_mode):
os.remove(local_addr)
except FileNotFoundError:
pass
except OSError as err:
# Directory may have permissions only to create socket.
logger.error('Unable to check or remove stale UNIX '
'socket %r: %r',
local_addr, err)
addr_pairs_info = (((family, proto),
(local_addr, remote_addr)), )
else:
@ -1239,7 +1306,8 @@ async def create_datagram_endpoint(self, protocol_factory,
if local_addr:
sock.bind(local_address)
if remote_addr:
await self.sock_connect(sock, remote_address)
if not allow_broadcast:
await self.sock_connect(sock, remote_address)
r_addr = remote_address
except OSError as exc:
if sock is not None:

View file

@ -298,7 +298,8 @@ async def create_connection(
*, ssl=None, family=0, proto=0,
flags=0, sock=None, local_addr=None,
server_hostname=None,
ssl_handshake_timeout=None):
ssl_handshake_timeout=None,
happy_eyeballs_delay=None, interleave=None):
raise NotImplementedError
async def create_server(

View file

@ -587,7 +587,10 @@ class _SelectorTransport(transports._FlowControlMixin,
def __init__(self, loop, sock, protocol, extra=None, server=None):
super().__init__(extra, loop)
self._extra['socket'] = sock
self._extra['sockname'] = sock.getsockname()
try:
self._extra['sockname'] = sock.getsockname()
except OSError:
self._extra['sockname'] = None
if 'peername' not in self._extra:
try:
self._extra['peername'] = sock.getpeername()
@ -976,9 +979,11 @@ def sendto(self, data, addr=None):
if not data:
return
if self._address and addr not in (None, self._address):
raise ValueError(
f'Invalid address: must be None or {self._address}')
if self._address:
if addr not in (None, self._address):
raise ValueError(
f'Invalid address: must be None or {self._address}')
addr = self._address
if self._conn_lost and self._address:
if self._conn_lost >= constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:
@ -989,7 +994,7 @@ def sendto(self, data, addr=None):
if not self._buffer:
# Attempt to send it right away first.
try:
if self._address:
if self._extra['peername']:
self._sock.send(data)
else:
self._sock.sendto(data, addr)
@ -1012,7 +1017,7 @@ def _sendto_ready(self):
while self._buffer:
data, addr = self._buffer.popleft()
try:
if self._address:
if self._extra['peername']:
self._sock.send(data)
else:
self._sock.sendto(data, addr)

147
Lib/asyncio/staggered.py Normal file
View file

@ -0,0 +1,147 @@
"""Support for running coroutines in parallel with staggered start times."""
__all__ = 'staggered_race',
import contextlib
import typing
from . import events
from . import futures
from . import locks
from . import tasks
async def staggered_race(
coro_fns: typing.Iterable[typing.Callable[[], typing.Awaitable]],
delay: typing.Optional[float],
*,
loop: events.AbstractEventLoop = None,
) -> typing.Tuple[
typing.Any,
typing.Optional[int],
typing.List[typing.Optional[Exception]]
]:
"""Run coroutines with staggered start times and take the first to finish.
This method takes an iterable of coroutine functions. The first one is
started immediately. From then on, whenever the immediately preceding one
fails (raises an exception), or when *delay* seconds has passed, the next
coroutine is started. This continues until one of the coroutines complete
successfully, in which case all others are cancelled, or until all
coroutines fail.
The coroutines provided should be well-behaved in the following way:
* They should only ``return`` if completed successfully.
* They should always raise an exception if they did not complete
successfully. In particular, if they handle cancellation, they should
probably reraise, like this::
try:
# do work
except asyncio.CancelledError:
# undo partially completed work
raise
Args:
coro_fns: an iterable of coroutine functions, i.e. callables that
return a coroutine object when called. Use ``functools.partial`` or
lambdas to pass arguments.
delay: amount of time, in seconds, between starting coroutines. If
``None``, the coroutines will run sequentially.
loop: the event loop to use.
Returns:
tuple *(winner_result, winner_index, exceptions)* where
- *winner_result*: the result of the winning coroutine, or ``None``
if no coroutines won.
- *winner_index*: the index of the winning coroutine in
``coro_fns``, or ``None`` if no coroutines won. If the winning
coroutine may return None on success, *winner_index* can be used
to definitively determine whether any coroutine won.
- *exceptions*: list of exceptions returned by the coroutines.
``len(exceptions)`` is equal to the number of coroutines actually
started, and the order is the same as in ``coro_fns``. The winning
coroutine's entry is ``None``.
"""
# TODO: when we have aiter() and anext(), allow async iterables in coro_fns.
loop = loop or events.get_running_loop()
enum_coro_fns = enumerate(coro_fns)
winner_result = None
winner_index = None
exceptions = []
running_tasks = []
async def run_one_coro(
previous_failed: typing.Optional[locks.Event]) -> None:
# Wait for the previous task to finish, or for delay seconds
if previous_failed is not None:
with contextlib.suppress(futures.TimeoutError):
# Use asyncio.wait_for() instead of asyncio.wait() here, so
# that if we get cancelled at this point, Event.wait() is also
# cancelled, otherwise there will be a "Task destroyed but it is
# pending" later.
await tasks.wait_for(previous_failed.wait(), delay)
# Get the next coroutine to run
try:
this_index, coro_fn = next(enum_coro_fns)
except StopIteration:
return
# Start task that will run the next coroutine
this_failed = locks.Event()
next_task = loop.create_task(run_one_coro(this_failed))
running_tasks.append(next_task)
assert len(running_tasks) == this_index + 2
# Prepare place to put this coroutine's exceptions if not won
exceptions.append(None)
assert len(exceptions) == this_index + 1
try:
result = await coro_fn()
except Exception as e:
exceptions[this_index] = e
this_failed.set() # Kickstart the next coroutine
else:
# Store winner's results
nonlocal winner_index, winner_result
assert winner_index is None
winner_index = this_index
winner_result = result
# Cancel all other tasks. We take care to not cancel the current
# task as well. If we do so, then since there is no `await` after
# here and CancelledError are usually thrown at one, we will
# encounter a curious corner case where the current task will end
# up as done() == True, cancelled() == False, exception() ==
# asyncio.CancelledError. This behavior is specified in
# https://bugs.python.org/issue30048
for i, t in enumerate(running_tasks):
if i != this_index:
t.cancel()
first_task = loop.create_task(run_one_coro(None))
running_tasks.append(first_task)
try:
# Wait for a growing list of tasks to all finish: poor man's version of
# curio's TaskGroup or trio's nursery
done_count = 0
while done_count != len(running_tasks):
done, _ = await tasks.wait(running_tasks)
done_count = len(done)
# If run_one_coro raises an unhandled exception, it's probably a
# programming error, and I want to see it.
if __debug__:
for d in done:
if d.done() and not d.cancelled() and d.exception():
raise d.exception()
return winner_result, winner_index, exceptions
finally:
# Make sure no tasks are left running if we leave this function
for t in running_tasks:
t.cancel()

View file

@ -4,6 +4,7 @@
import socket
import sys
import warnings
import weakref
if hasattr(socket, 'AF_UNIX'):
@ -42,11 +43,14 @@ async def open_connection(host=None, port=None, *,
"""
if loop is None:
loop = events.get_event_loop()
reader = StreamReader(limit=limit, loop=loop)
protocol = StreamReaderProtocol(reader, loop=loop)
reader = StreamReader(limit=limit, loop=loop,
_asyncio_internal=True)
protocol = StreamReaderProtocol(reader, loop=loop,
_asyncio_internal=True)
transport, _ = await loop.create_connection(
lambda: protocol, host, port, **kwds)
writer = StreamWriter(transport, protocol, reader, loop)
writer = StreamWriter(transport, protocol, reader, loop,
_asyncio_internal=True)
return reader, writer
@ -77,9 +81,11 @@ async def start_server(client_connected_cb, host=None, port=None, *,
loop = events.get_event_loop()
def factory():
reader = StreamReader(limit=limit, loop=loop)
reader = StreamReader(limit=limit, loop=loop,
_asyncio_internal=True)
protocol = StreamReaderProtocol(reader, client_connected_cb,
loop=loop)
loop=loop,
_asyncio_internal=True)
return protocol
return await loop.create_server(factory, host, port, **kwds)
@ -93,11 +99,14 @@ async def open_unix_connection(path=None, *,
"""Similar to `open_connection` but works with UNIX Domain Sockets."""
if loop is None:
loop = events.get_event_loop()
reader = StreamReader(limit=limit, loop=loop)
protocol = StreamReaderProtocol(reader, loop=loop)
reader = StreamReader(limit=limit, loop=loop,
_asyncio_internal=True)
protocol = StreamReaderProtocol(reader, loop=loop,
_asyncio_internal=True)
transport, _ = await loop.create_unix_connection(
lambda: protocol, path, **kwds)
writer = StreamWriter(transport, protocol, reader, loop)
writer = StreamWriter(transport, protocol, reader, loop,
_asyncio_internal=True)
return reader, writer
async def start_unix_server(client_connected_cb, path=None, *,
@ -107,9 +116,11 @@ async def start_unix_server(client_connected_cb, path=None, *,
loop = events.get_event_loop()
def factory():
reader = StreamReader(limit=limit, loop=loop)
reader = StreamReader(limit=limit, loop=loop,
_asyncio_internal=True)
protocol = StreamReaderProtocol(reader, client_connected_cb,
loop=loop)
loop=loop,
_asyncio_internal=True)
return protocol
return await loop.create_unix_server(factory, path, **kwds)
@ -125,11 +136,20 @@ class FlowControlMixin(protocols.Protocol):
StreamWriter.drain() must wait for _drain_helper() coroutine.
"""
def __init__(self, loop=None):
def __init__(self, loop=None, *, _asyncio_internal=False):
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
if not _asyncio_internal:
# NOTE:
# Avoid inheritance from FlowControlMixin
# Copy-paste the code to your project
# if you need flow control helpers
warnings.warn(f"{self.__class__} should be instaniated "
"by asyncio internals only, "
"please avoid its creation from user code",
DeprecationWarning)
self._paused = False
self._drain_waiter = None
self._connection_lost = False
@ -179,6 +199,9 @@ async def _drain_helper(self):
self._drain_waiter = waiter
await waiter
def _get_close_waiter(self, stream):
raise NotImplementedError
class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
"""Helper class to adapt between Protocol and StreamReader.
@ -191,8 +214,9 @@ class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
_source_traceback = None
def __init__(self, stream_reader, client_connected_cb=None, loop=None):
super().__init__(loop=loop)
def __init__(self, stream_reader, client_connected_cb=None, loop=None,
*, _asyncio_internal=False):
super().__init__(loop=loop, _asyncio_internal=_asyncio_internal)
if stream_reader is not None:
self._stream_reader_wr = weakref.ref(stream_reader,
self._on_reader_gc)
@ -253,7 +277,8 @@ def connection_made(self, transport):
if self._client_connected_cb is not None:
self._stream_writer = StreamWriter(transport, self,
reader,
self._loop)
self._loop,
_asyncio_internal=True)
res = self._client_connected_cb(reader,
self._stream_writer)
if coroutines.iscoroutine(res):
@ -293,6 +318,9 @@ def eof_received(self):
return False
return True
def _get_close_waiter(self, stream):
return self._closed
def __del__(self):
# Prevent reports about unhandled exceptions.
# Better than self._closed._log_traceback = False hack
@ -311,7 +339,13 @@ class StreamWriter:
directly.
"""
def __init__(self, transport, protocol, reader, loop):
def __init__(self, transport, protocol, reader, loop,
*, _asyncio_internal=False):
if not _asyncio_internal:
warnings.warn(f"{self.__class__} should be instaniated "
"by asyncio internals only, "
"please avoid its creation from user code",
DeprecationWarning)
self._transport = transport
self._protocol = protocol
# drain() expects that the reader has an exception() method
@ -348,7 +382,7 @@ def is_closing(self):
return self._transport.is_closing()
async def wait_closed(self):
await self._protocol._closed
await self._protocol._get_close_waiter(self)
def get_extra_info(self, name, default=None):
return self._transport.get_extra_info(name, default)
@ -366,13 +400,12 @@ async def drain(self):
if exc is not None:
raise exc
if self._transport.is_closing():
# Yield to the event loop so connection_lost() may be
# called. Without this, _drain_helper() would return
# immediately, and code that calls
# write(...); await drain()
# in a loop would never call connection_lost(), so it
# would not see an error when the socket is closed.
await sleep(0, loop=self._loop)
# Wait for protocol.connection_lost() call
# Raise connection closing error if any,
# ConnectionResetError otherwise
fut = self._protocol._get_close_waiter(self)
await fut
raise ConnectionResetError('Connection lost')
await self._protocol._drain_helper()
async def aclose(self):
@ -388,7 +421,14 @@ class StreamReader:
_source_traceback = None
def __init__(self, limit=_DEFAULT_LIMIT, loop=None):
def __init__(self, limit=_DEFAULT_LIMIT, loop=None,
*, _asyncio_internal=False):
if not _asyncio_internal:
warnings.warn(f"{self.__class__} should be instaniated "
"by asyncio internals only, "
"please avoid its creation from user code",
DeprecationWarning)
# The line length limit is a security feature;
# it also doubles as half the buffer limit.

View file

@ -1,6 +1,7 @@
__all__ = 'create_subprocess_exec', 'create_subprocess_shell'
import subprocess
import warnings
from . import events
from . import protocols
@ -18,13 +19,14 @@ class SubprocessStreamProtocol(streams.FlowControlMixin,
protocols.SubprocessProtocol):
"""Like StreamReaderProtocol, but for a subprocess."""
def __init__(self, limit, loop):
super().__init__(loop=loop)
def __init__(self, limit, loop, *, _asyncio_internal=False):
super().__init__(loop=loop, _asyncio_internal=_asyncio_internal)
self._limit = limit
self.stdin = self.stdout = self.stderr = None
self._transport = None
self._process_exited = False
self._pipe_fds = []
self._stdin_closed = self._loop.create_future()
def __repr__(self):
info = [self.__class__.__name__]
@ -42,14 +44,16 @@ def connection_made(self, transport):
stdout_transport = transport.get_pipe_transport(1)
if stdout_transport is not None:
self.stdout = streams.StreamReader(limit=self._limit,
loop=self._loop)
loop=self._loop,
_asyncio_internal=True)
self.stdout.set_transport(stdout_transport)
self._pipe_fds.append(1)
stderr_transport = transport.get_pipe_transport(2)
if stderr_transport is not None:
self.stderr = streams.StreamReader(limit=self._limit,
loop=self._loop)
loop=self._loop,
_asyncio_internal=True)
self.stderr.set_transport(stderr_transport)
self._pipe_fds.append(2)
@ -58,7 +62,8 @@ def connection_made(self, transport):
self.stdin = streams.StreamWriter(stdin_transport,
protocol=self,
reader=None,
loop=self._loop)
loop=self._loop,
_asyncio_internal=True)
def pipe_data_received(self, fd, data):
if fd == 1:
@ -76,6 +81,10 @@ def pipe_connection_lost(self, fd, exc):
if pipe is not None:
pipe.close()
self.connection_lost(exc)
if exc is None:
self._stdin_closed.set_result(None)
else:
self._stdin_closed.set_exception(exc)
return
if fd == 1:
reader = self.stdout
@ -102,9 +111,19 @@ def _maybe_close_transport(self):
self._transport.close()
self._transport = None
def _get_close_waiter(self, stream):
if stream is self.stdin:
return self._stdin_closed
class Process:
def __init__(self, transport, protocol, loop):
def __init__(self, transport, protocol, loop, *, _asyncio_internal=False):
if not _asyncio_internal:
warnings.warn(f"{self.__class__} should be instaniated "
"by asyncio internals only, "
"please avoid its creation from user code",
DeprecationWarning)
self._transport = transport
self._protocol = protocol
self._loop = loop
@ -195,12 +214,13 @@ async def create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None,
if loop is None:
loop = events.get_event_loop()
protocol_factory = lambda: SubprocessStreamProtocol(limit=limit,
loop=loop)
loop=loop,
_asyncio_internal=True)
transport, protocol = await loop.subprocess_shell(
protocol_factory,
cmd, stdin=stdin, stdout=stdout,
stderr=stderr, **kwds)
return Process(transport, protocol, loop)
return Process(transport, protocol, loop, _asyncio_internal=True)
async def create_subprocess_exec(program, *args, stdin=None, stdout=None,
@ -209,10 +229,11 @@ async def create_subprocess_exec(program, *args, stdin=None, stdout=None,
if loop is None:
loop = events.get_event_loop()
protocol_factory = lambda: SubprocessStreamProtocol(limit=limit,
loop=loop)
loop=loop,
_asyncio_internal=True)
transport, protocol = await loop.subprocess_exec(
protocol_factory,
program, *args,
stdin=stdin, stdout=stdout,
stderr=stderr, **kwds)
return Process(transport, protocol, loop)
return Process(transport, protocol, loop, _asyncio_internal=True)

View file

@ -495,10 +495,11 @@ def _on_completion(f):
finally:
if timeout_handle is not None:
timeout_handle.cancel()
for f in fs:
f.remove_done_callback(_on_completion)
done, pending = set(), set()
for f in fs:
f.remove_done_callback(_on_completion)
if f.done():
done.add(f)
else:
@ -627,7 +628,8 @@ def ensure_future(coro_or_future, *, loop=None):
return task
elif futures.isfuture(coro_or_future):
if loop is not None and loop is not futures._get_loop(coro_or_future):
raise ValueError('loop argument must agree with Future')
raise ValueError('The future belongs to a different loop than '
'the one specified as the loop argument')
return coro_or_future
elif inspect.isawaitable(coro_or_future):
return ensure_future(_wrap_awaitable(coro_or_future), loop=loop)
@ -816,7 +818,7 @@ def shield(arg, *, loop=None):
loop = futures._get_loop(inner)
outer = loop.create_future()
def _done_callback(inner):
def _inner_done_callback(inner):
if outer.cancelled():
if not inner.cancelled():
# Mark inner's result as retrieved.
@ -832,7 +834,13 @@ def _done_callback(inner):
else:
outer.set_result(inner.result())
inner.add_done_callback(_done_callback)
def _outer_done_callback(outer):
if not inner.done():
inner.remove_done_callback(_inner_done_callback)
inner.add_done_callback(_inner_done_callback)
outer.add_done_callback(_outer_done_callback)
return outer

View file

@ -262,8 +262,6 @@ def __repr__(self):
status.append(repr(self.addr))
return '<%s at %#x>' % (' '.join(status), id(self))
__str__ = __repr__
def add_channel(self, map=None):
#self.log_info('adding channel %s' % self)
if map is None:

View file

@ -618,11 +618,26 @@ def runctx(self, cmd, globals, locals):
# This method is more useful to debug a single function call.
def runcall(self, func, *args, **kwds):
def runcall(*args, **kwds):
"""Debug a single function call.
Return the result of the function call.
"""
if len(args) >= 2:
self, func, *args = args
elif not args:
raise TypeError("descriptor 'runcall' of 'Bdb' object "
"needs an argument")
elif 'func' in kwds:
func = kwds.pop('func')
self, *args = args
import warnings
warnings.warn("Passing 'func' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError('runcall expected at least 1 positional argument, '
'got %d' % (len(args)-1))
self.reset()
sys.settrace(self.trace_dispatch)
res = None
@ -634,6 +649,7 @@ def runcall(self, func, *args, **kwds):
self.quitting = True
sys.settrace(None)
return res
runcall.__text_signature__ = '($self, func, /, *args, **kwds)'
def set_trace():

View file

@ -9,14 +9,7 @@ def insort_right(a, x, lo=0, hi=None):
slice of a to be searched.
"""
if lo < 0:
raise ValueError('lo must be non-negative')
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo+hi)//2
if x < a[mid]: hi = mid
else: lo = mid+1
lo = bisect_right(a, x, lo, hi)
a.insert(lo, x)
def bisect_right(a, x, lo=0, hi=None):
@ -49,14 +42,7 @@ def insort_left(a, x, lo=0, hi=None):
slice of a to be searched.
"""
if lo < 0:
raise ValueError('lo must be non-negative')
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo+hi)//2
if a[mid] < x: lo = mid+1
else: hi = mid
lo = bisect_left(a, x, lo, hi)
a.insert(lo, x)

View file

@ -103,12 +103,28 @@ def runctx(self, cmd, globals, locals):
return self
# This method is more useful to profile a single function call.
def runcall(self, func, *args, **kw):
def runcall(*args, **kw):
if len(args) >= 2:
self, func, *args = args
elif not args:
raise TypeError("descriptor 'runcall' of 'Profile' object "
"needs an argument")
elif 'func' in kw:
func = kw.pop('func')
self, *args = args
import warnings
warnings.warn("Passing 'func' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError('runcall expected at least 1 positional argument, '
'got %d' % (len(args)-1))
self.enable()
try:
return func(*args, **kw)
finally:
self.disable()
runcall.__text_signature__ = '($self, func, /, *args, **kw)'
def __enter__(self):
self.enable()

View file

@ -1016,8 +1016,10 @@ def __init__(*args, **kwargs):
self.data = {}
if dict is not None:
self.update(dict)
if len(kwargs):
if kwargs:
self.update(kwargs)
__init__.__text_signature__ = '($self, dict=None, /, **kwargs)'
def __len__(self): return len(self.data)
def __getitem__(self, key):
if key in self.data:
@ -1083,7 +1085,11 @@ def __cast(self, other):
return other.data if isinstance(other, UserList) else other
def __contains__(self, item): return item in self.data
def __len__(self): return len(self.data)
def __getitem__(self, i): return self.data[i]
def __getitem__(self, i):
if isinstance(i, slice):
return self.__class__(self.data[i])
else:
return self.data[i]
def __setitem__(self, i, item): self.data[i] = item
def __delitem__(self, i): del self.data[i]
def __add__(self, other):

View file

@ -544,7 +544,7 @@ def set_exception(self, exception):
class Executor(object):
"""This is an abstract base class for concrete asynchronous executors."""
def submit(self, fn, *args, **kwargs):
def submit(*args, **kwargs):
"""Submits a callable to be executed with the given arguments.
Schedules the callable to be executed as fn(*args, **kwargs) and returns
@ -553,7 +553,21 @@ def submit(self, fn, *args, **kwargs):
Returns:
A Future representing the given call.
"""
if len(args) >= 2:
pass
elif not args:
raise TypeError("descriptor 'submit' of 'Executor' object "
"needs an argument")
elif 'fn' in kwargs:
import warnings
warnings.warn("Passing 'fn' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError('submit expected at least 1 positional argument, '
'got %d' % (len(args)-1))
raise NotImplementedError()
submit.__text_signature__ = '($self, fn, /, *args, **kwargs)'
def map(self, fn, *iterables, timeout=None, chunksize=1):
"""Returns an iterator equivalent to map(fn, iter).

View file

@ -3,7 +3,7 @@
"""Implements ProcessPoolExecutor.
The follow diagram and text describe the data-flow through the system:
The following diagram and text describe the data-flow through the system:
|======================= In-process =====================|== Out-of-process ==|
@ -51,7 +51,7 @@
import queue
from queue import Full
import multiprocessing as mp
from multiprocessing.connection import wait
import multiprocessing.connection
from multiprocessing.queues import Queue
import threading
import weakref
@ -352,7 +352,7 @@ def shutdown_worker():
# submitted, from the executor being shutdown/gc-ed, or from the
# shutdown of the python interpreter.
worker_sentinels = [p.sentinel for p in processes.values()]
ready = wait(readers + worker_sentinels)
ready = mp.connection.wait(readers + worker_sentinels)
cause = None
is_broken = True
@ -594,7 +594,22 @@ def _adjust_process_count(self):
p.start()
self._processes[p.pid] = p
def submit(self, fn, *args, **kwargs):
def submit(*args, **kwargs):
if len(args) >= 2:
self, fn, *args = args
elif not args:
raise TypeError("descriptor 'submit' of 'ProcessPoolExecutor' object "
"needs an argument")
elif 'fn' in kwargs:
fn = kwargs.pop('fn')
self, *args = args
import warnings
warnings.warn("Passing 'fn' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError('submit expected at least 1 positional argument, '
'got %d' % (len(args)-1))
with self._shutdown_lock:
if self._broken:
raise BrokenProcessPool(self._broken)
@ -615,6 +630,7 @@ def submit(self, fn, *args, **kwargs):
self._start_queue_management_thread()
return f
submit.__text_signature__ = _base.Executor.submit.__text_signature__
submit.__doc__ = _base.Executor.submit.__doc__
def map(self, fn, *iterables, timeout=None, chunksize=1):

View file

@ -142,7 +142,22 @@ def __init__(self, max_workers=None, thread_name_prefix='',
self._initializer = initializer
self._initargs = initargs
def submit(self, fn, *args, **kwargs):
def submit(*args, **kwargs):
if len(args) >= 2:
self, fn, *args = args
elif not args:
raise TypeError("descriptor 'submit' of 'ThreadPoolExecutor' object "
"needs an argument")
elif 'fn' in kwargs:
fn = kwargs.pop('fn')
self, *args = args
import warnings
warnings.warn("Passing 'fn' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError('submit expected at least 1 positional argument, '
'got %d' % (len(args)-1))
with self._shutdown_lock:
if self._broken:
raise BrokenThreadPool(self._broken)
@ -159,6 +174,7 @@ def submit(self, fn, *args, **kwargs):
self._work_queue.put(w)
self._adjust_thread_count()
return f
submit.__text_signature__ = _base.Executor.submit.__text_signature__
submit.__doc__ = _base.Executor.submit.__doc__
def _adjust_thread_count(self):

View file

@ -377,7 +377,8 @@ def _create_exit_wrapper(cm, cm_exit):
return MethodType(cm_exit, cm)
@staticmethod
def _create_cb_wrapper(callback, *args, **kwds):
def _create_cb_wrapper(*args, **kwds):
callback, *args = args
def _exit_wrapper(exc_type, exc, tb):
callback(*args, **kwds)
return _exit_wrapper
@ -426,11 +427,26 @@ def enter_context(self, cm):
self._push_cm_exit(cm, _exit)
return result
def callback(self, callback, *args, **kwds):
def callback(*args, **kwds):
"""Registers an arbitrary callback and arguments.
Cannot suppress exceptions.
"""
if len(args) >= 2:
self, callback, *args = args
elif not args:
raise TypeError("descriptor 'callback' of '_BaseExitStack' object "
"needs an argument")
elif 'callback' in kwds:
callback = kwds.pop('callback')
self, *args = args
import warnings
warnings.warn("Passing 'callback' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError('callback expected at least 1 positional argument, '
'got %d' % (len(args)-1))
_exit_wrapper = self._create_cb_wrapper(callback, *args, **kwds)
# We changed the signature, so using @wraps is not appropriate, but
@ -438,6 +454,7 @@ def callback(self, callback, *args, **kwds):
_exit_wrapper.__wrapped__ = callback
self._push_exit_callback(_exit_wrapper)
return callback # Allow use as a decorator
callback.__text_signature__ = '($self, callback, /, *args, **kwds)'
def _push_cm_exit(self, cm, cm_exit):
"""Helper to correctly register callbacks to __exit__ methods."""
@ -536,7 +553,8 @@ def _create_async_exit_wrapper(cm, cm_exit):
return MethodType(cm_exit, cm)
@staticmethod
def _create_async_cb_wrapper(callback, *args, **kwds):
def _create_async_cb_wrapper(*args, **kwds):
callback, *args = args
async def _exit_wrapper(exc_type, exc, tb):
await callback(*args, **kwds)
return _exit_wrapper
@ -571,11 +589,26 @@ def push_async_exit(self, exit):
self._push_async_cm_exit(exit, exit_method)
return exit # Allow use as a decorator
def push_async_callback(self, callback, *args, **kwds):
def push_async_callback(*args, **kwds):
"""Registers an arbitrary coroutine function and arguments.
Cannot suppress exceptions.
"""
if len(args) >= 2:
self, callback, *args = args
elif not args:
raise TypeError("descriptor 'push_async_callback' of "
"'AsyncExitStack' object needs an argument")
elif 'callback' in kwds:
callback = kwds.pop('callback')
self, *args = args
import warnings
warnings.warn("Passing 'callback' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError('push_async_callback expected at least 1 '
'positional argument, got %d' % (len(args)-1))
_exit_wrapper = self._create_async_cb_wrapper(callback, *args, **kwds)
# We changed the signature, so using @wraps is not appropriate, but
@ -583,6 +616,7 @@ def push_async_callback(self, callback, *args, **kwds):
_exit_wrapper.__wrapped__ = callback
self._push_exit_callback(_exit_wrapper, False)
return callback # Allow use as a decorator
push_async_callback.__text_signature__ = '($self, callback, /, *args, **kwds)'
async def aclose(self):
"""Immediately unwind the context stack."""

View file

@ -326,7 +326,8 @@ class CDLL(object):
def __init__(self, name, mode=DEFAULT_MODE, handle=None,
use_errno=False,
use_last_error=False):
use_last_error=False,
winmode=None):
self._name = name
flags = self._func_flags_
if use_errno:
@ -341,6 +342,15 @@ def __init__(self, name, mode=DEFAULT_MODE, handle=None,
"""
if name and name.endswith(")") and ".a(" in name:
mode |= ( _os.RTLD_MEMBER | _os.RTLD_NOW )
if _os.name == "nt":
if winmode is not None:
mode = winmode
else:
import nt
mode = nt._LOAD_LIBRARY_SEARCH_DEFAULT_DIRS
if '/' in name or '\\' in name:
self._name = nt._getfullpathname(self._name)
mode |= nt._LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR
class _FuncPtr(_CFuncPtr):
_flags_ = flags

View file

@ -197,6 +197,12 @@ class T(Array):
_type_ = c_int
_length_ = 0
def test_bpo36504_signed_int_overflow(self):
# The overflow check in PyCArrayType_new() could cause signed integer
# overflow.
with self.assertRaises(OverflowError):
c_char * sys.maxsize * 2
@unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform')
@bigmemtest(size=_2G, memuse=1, dry_run=False)
def test_large_array(self, size):

View file

@ -1,6 +1,9 @@
from ctypes import *
import os
import shutil
import subprocess
import sys
import sysconfig
import unittest
import test.support
from ctypes.util import find_library
@ -112,5 +115,66 @@ def test_1703286_B(self):
# This is the real test: call the function via 'call_function'
self.assertEqual(0, call_function(proc, (None,)))
@unittest.skipUnless(os.name == "nt",
'test specific to Windows')
def test_load_dll_with_flags(self):
_sqlite3 = test.support.import_module("_sqlite3")
src = _sqlite3.__file__
if src.lower().endswith("_d.pyd"):
ext = "_d.dll"
else:
ext = ".dll"
with test.support.temp_dir() as tmp:
# We copy two files and load _sqlite3.dll (formerly .pyd),
# which has a dependency on sqlite3.dll. Then we test
# loading it in subprocesses to avoid it starting in memory
# for each test.
target = os.path.join(tmp, "_sqlite3.dll")
shutil.copy(src, target)
shutil.copy(os.path.join(os.path.dirname(src), "sqlite3" + ext),
os.path.join(tmp, "sqlite3" + ext))
def should_pass(command):
with self.subTest(command):
subprocess.check_output(
[sys.executable, "-c",
"from ctypes import *; import nt;" + command],
cwd=tmp
)
def should_fail(command):
with self.subTest(command):
with self.assertRaises(subprocess.CalledProcessError):
subprocess.check_output(
[sys.executable, "-c",
"from ctypes import *; import nt;" + command],
cwd=tmp, stderr=subprocess.STDOUT,
)
# Default load should not find this in CWD
should_fail("WinDLL('_sqlite3.dll')")
# Relative path (but not just filename) should succeed
should_pass("WinDLL('./_sqlite3.dll')")
# Insecure load flags should succeed
should_pass("WinDLL('_sqlite3.dll', winmode=0)")
# Full path load without DLL_LOAD_DIR shouldn't find dependency
should_fail("WinDLL(nt._getfullpathname('_sqlite3.dll'), " +
"winmode=nt._LOAD_LIBRARY_SEARCH_SYSTEM32)")
# Full path load with DLL_LOAD_DIR should succeed
should_pass("WinDLL(nt._getfullpathname('_sqlite3.dll'), " +
"winmode=nt._LOAD_LIBRARY_SEARCH_SYSTEM32|" +
"nt._LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR)")
# User-specified directory should succeed
should_pass("import os; p = os.add_dll_directory(os.getcwd());" +
"WinDLL('_sqlite3.dll'); p.close()")
if __name__ == "__main__":
unittest.main()

View file

@ -80,9 +80,9 @@ class struct_frozen(Structure):
continue
items.append((entry.name.decode("ascii"), entry.size))
expected = [("__hello__", 139),
("__phello__", -139),
("__phello__.spam", 139),
expected = [("__hello__", 141),
("__phello__", -141),
("__phello__.spam", 141),
]
self.assertEqual(items, expected, "PyImport_FrozenModules example "
"in Doc/library/ctypes.rst may be out of date")

View file

@ -6,35 +6,6 @@
import _ctypes_test
# Only windows 32-bit has different calling conventions.
@unittest.skipUnless(sys.platform == "win32", 'Windows-specific test')
@unittest.skipUnless(sizeof(c_void_p) == sizeof(c_int),
"sizeof c_void_p and c_int differ")
class WindowsTestCase(unittest.TestCase):
def test_callconv_1(self):
# Testing stdcall function
IsWindow = windll.user32.IsWindow
# ValueError: Procedure probably called with not enough arguments
# (4 bytes missing)
self.assertRaises(ValueError, IsWindow)
# This one should succeed...
self.assertEqual(0, IsWindow(0))
# ValueError: Procedure probably called with too many arguments
# (8 bytes in excess)
self.assertRaises(ValueError, IsWindow, 0, 0, 0)
def test_callconv_2(self):
# Calling stdcall function as cdecl
IsWindow = cdll.user32.IsWindow
# ValueError: Procedure called with not enough arguments
# (4 bytes missing) or wrong calling convention
self.assertRaises(ValueError, IsWindow, None)
@unittest.skipUnless(sys.platform == "win32", 'Windows-specific test')
class FunctionCallTestCase(unittest.TestCase):
@unittest.skipUnless('MSC' in sys.version, "SEH only supported by MSC")

View file

@ -60,7 +60,7 @@ def start_color():
# raises an exception, wrapper() will restore the terminal to a sane state so
# you can read the resulting traceback.
def wrapper(func, *args, **kwds):
def wrapper(*args, **kwds):
"""Wrapper function that initializes curses and calls another function,
restoring normal keyboard/screen behavior on error.
The callable object 'func' is then passed the main window 'stdscr'
@ -68,6 +68,17 @@ def wrapper(func, *args, **kwds):
wrapper().
"""
if args:
func, *args = args
elif 'func' in kwds:
func = kwds.pop('func')
import warnings
warnings.warn("Passing 'func' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError('wrapper expected at least 1 positional argument, '
'got %d' % len(args))
try:
# Initialize curses
stdscr = initscr()
@ -99,3 +110,4 @@ def wrapper(func, *args, **kwds):
echo()
nocbreak()
endwin()
wrapper.__text_signature__ = '(func, /, *args, **kwds)'

View file

@ -884,6 +884,40 @@ def fromisoformat(cls, date_string):
except Exception:
raise ValueError(f'Invalid isoformat string: {date_string!r}')
@classmethod
def fromisocalendar(cls, year, week, day):
"""Construct a date from the ISO year, week number and weekday.
This is the inverse of the date.isocalendar() function"""
# Year is bounded this way because 9999-12-31 is (9999, 52, 5)
if not MINYEAR <= year <= MAXYEAR:
raise ValueError(f"Year is out of range: {year}")
if not 0 < week < 53:
out_of_range = True
if week == 53:
# ISO years have 53 weeks in them on years starting with a
# Thursday and leap years starting on a Wednesday
first_weekday = _ymd2ord(year, 1, 1) % 7
if (first_weekday == 4 or (first_weekday == 3 and
_is_leap(year))):
out_of_range = False
if out_of_range:
raise ValueError(f"Invalid week: {week}")
if not 0 < day < 8:
raise ValueError(f"Invalid weekday: {day} (range is [1, 7])")
# Now compute the offset from (Y, 1, 1) in days:
day_offset = (week - 1) * 7 + (day - 1)
# Calculate the ordinal day for monday, week 1
day_1 = _isoweek1monday(year)
ord_day = day_1 + day_offset
return cls(*_ord2ymd(ord_day))
# Conversions to string
@ -2141,6 +2175,7 @@ def _isoweek1monday(year):
week1monday += 7
return week1monday
class timezone(tzinfo):
__slots__ = '_offset', '_name'

View file

@ -82,7 +82,8 @@ def open(file, flag='r', mode=0o666):
# file doesn't exist and the new flag was used so use default type
mod = _defaultmod
else:
raise error[0]("need 'c' or 'n' flag to open new db")
raise error[0]("db file doesn't exist; "
"use 'c' or 'n' flag to create a new db")
elif result == "":
# db type cannot be determined
raise error[0]("db type could not be determined")

View file

@ -157,6 +157,7 @@ def _format_code_info(co):
lines.append("Name: %s" % co.co_name)
lines.append("Filename: %s" % co.co_filename)
lines.append("Argument count: %s" % co.co_argcount)
lines.append("Positional-only arguments: %s" % co.co_posonlyargcount)
lines.append("Kw-only arguments: %s" % co.co_kwonlyargcount)
lines.append("Number of locals: %s" % co.co_nlocals)
lines.append("Stack size: %s" % co.co_stacksize)

View file

@ -89,13 +89,24 @@ def _find_vc2017():
return None, None
PLAT_SPEC_TO_RUNTIME = {
'x86' : 'x86',
'x86_amd64' : 'x64',
'x86_arm' : 'arm',
}
def _find_vcvarsall(plat_spec):
_, best_dir = _find_vc2017()
vcruntime = None
vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86'
if plat_spec in PLAT_SPEC_TO_RUNTIME:
vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec]
else:
vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86'
if best_dir:
vcredist = os.path.join(best_dir, "..", "..", "redist", "MSVC", "**",
"Microsoft.VC141.CRT", "vcruntime140.dll")
vcruntime_plat, "Microsoft.VC141.CRT", "vcruntime140.dll")
try:
import glob
vcruntime = glob.glob(vcredist, recursive=True)[-1]
@ -178,6 +189,7 @@ def _find_exe(exe, paths=None):
PLAT_TO_VCVARS = {
'win32' : 'x86',
'win-amd64' : 'x86_amd64',
'win-arm32' : 'x86_arm',
}
# A set containing the DLLs that are guaranteed to be available for

View file

@ -537,7 +537,8 @@ def _make_spec_file(self):
'',
'%' + rpm_opt,])
if val:
spec_file.extend(open(val, 'r').read().split('\n'))
with open(val) as f:
spec_file.extend(f.read().split('\n'))
else:
spec_file.append(default)

View file

@ -247,47 +247,49 @@ def create_exe(self, arcname, fullname, bitmap=None):
self.announce("creating %s" % installer_name)
if bitmap:
bitmapdata = open(bitmap, "rb").read()
with open(bitmap, "rb") as f:
bitmapdata = f.read()
bitmaplen = len(bitmapdata)
else:
bitmaplen = 0
file = open(installer_name, "wb")
file.write(self.get_exe_bytes())
if bitmap:
file.write(bitmapdata)
with open(installer_name, "wb") as file:
file.write(self.get_exe_bytes())
if bitmap:
file.write(bitmapdata)
# Convert cfgdata from unicode to ascii, mbcs encoded
if isinstance(cfgdata, str):
cfgdata = cfgdata.encode("mbcs")
# Convert cfgdata from unicode to ascii, mbcs encoded
if isinstance(cfgdata, str):
cfgdata = cfgdata.encode("mbcs")
# Append the pre-install script
cfgdata = cfgdata + b"\0"
if self.pre_install_script:
# We need to normalize newlines, so we open in text mode and
# convert back to bytes. "latin-1" simply avoids any possible
# failures.
with open(self.pre_install_script, "r",
encoding="latin-1") as script:
script_data = script.read().encode("latin-1")
cfgdata = cfgdata + script_data + b"\n\0"
else:
# empty pre-install script
# Append the pre-install script
cfgdata = cfgdata + b"\0"
file.write(cfgdata)
if self.pre_install_script:
# We need to normalize newlines, so we open in text mode and
# convert back to bytes. "latin-1" simply avoids any possible
# failures.
with open(self.pre_install_script, "r",
encoding="latin-1") as script:
script_data = script.read().encode("latin-1")
cfgdata = cfgdata + script_data + b"\n\0"
else:
# empty pre-install script
cfgdata = cfgdata + b"\0"
file.write(cfgdata)
# The 'magic number' 0x1234567B is used to make sure that the
# binary layout of 'cfgdata' is what the wininst.exe binary
# expects. If the layout changes, increment that number, make
# the corresponding changes to the wininst.exe sources, and
# recompile them.
header = struct.pack("<iii",
0x1234567B, # tag
len(cfgdata), # length
bitmaplen, # number of bytes in bitmap
)
file.write(header)
file.write(open(arcname, "rb").read())
# The 'magic number' 0x1234567B is used to make sure that the
# binary layout of 'cfgdata' is what the wininst.exe binary
# expects. If the layout changes, increment that number, make
# the corresponding changes to the wininst.exe sources, and
# recompile them.
header = struct.pack("<iii",
0x1234567B, # tag
len(cfgdata), # length
bitmaplen, # number of bytes in bitmap
)
file.write(header)
with open(arcname, "rb") as f:
file.write(f.read())
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses

View file

@ -116,7 +116,7 @@ def finalize_options(self):
self.build_scripts = os.path.join(self.build_base,
'scripts-%d.%d' % sys.version_info[:2])
if self.executable is None:
if self.executable is None and sys.executable:
self.executable = os.path.normpath(sys.executable)
if isinstance(self.parallel, str):

View file

@ -714,20 +714,20 @@ def get_libraries(self, ext):
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
else:
return ext.libraries
elif sys.platform == 'darwin':
# Don't use the default code below
return ext.libraries
elif sys.platform[:3] == 'aix':
# Don't use the default code below
return ext.libraries
# On Android only the main executable and LD_PRELOADs are considered
# to be RTLD_GLOBAL, all the dependencies of the main executable
# remain RTLD_LOCAL and so the shared libraries must be linked with
# libpython when python is built with a shared python library (issue
# bpo-21536).
else:
from distutils import sysconfig
if sysconfig.get_config_var('Py_ENABLE_SHARED'):
pythonlib = 'python{}.{}{}'.format(
sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff,
sysconfig.get_config_var('ABIFLAGS'))
return ext.libraries + [pythonlib]
else:
return ext.libraries
from distutils.sysconfig import get_config_var
if get_config_var('Py_ENABLE_SHARED'):
# Either a native build on an Android device or the
# cross-compilation of Python.
if (hasattr(sys, 'getandroidapilevel') or
('_PYTHON_HOST_PLATFORM' in os.environ and
get_config_var('ANDROID_API_LEVEL') != 0)):
ldversion = get_config_var('LDVERSION')
return ext.libraries + ['python' + ldversion]
return ext.libraries

View file

@ -120,7 +120,8 @@ def check_restructuredtext(self):
def _check_rst_data(self, data):
"""Returns warnings when the provided data doesn't compile."""
source_path = StringIO()
# the include and csv_table directives need this to be a path
source_path = self.distribution.script_name or 'setup.py'
parser = Parser()
settings = frontend.OptionParser(components=(Parser,)).get_default_values()
settings.tab_width = 4

View file

@ -125,8 +125,9 @@ def upload_file(self, command, pyversion, filename):
data['comment'] = ''
if self.sign:
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
open(filename+".asc", "rb").read())
with open(filename + ".asc", "rb") as f:
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
f.read())
# set up the authentication
user_pass = (self.username + ":" + self.password).encode('ascii')

View file

@ -81,7 +81,6 @@ def _spawn_nt(cmd, search_path=1, verbose=0, dry_run=0):
"command %r failed with exit status %d" % (cmd, rc))
if sys.platform == 'darwin':
from distutils import sysconfig
_cfg_target = None
_cfg_target_split = None
@ -95,6 +94,7 @@ def _spawn_posix(cmd, search_path=1, verbose=0, dry_run=0):
if sys.platform == 'darwin':
global _cfg_target, _cfg_target_split
if _cfg_target is None:
from distutils import sysconfig
_cfg_target = sysconfig.get_config_var(
'MACOSX_DEPLOYMENT_TARGET') or ''
if _cfg_target:
@ -172,21 +172,32 @@ def find_executable(executable, path=None):
A string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']. Returns the complete filename or None if not found.
"""
if path is None:
path = os.environ.get('PATH', os.defpath)
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
_, ext = os.path.splitext(executable)
if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
for p in paths:
f = os.path.join(p, executable)
if os.path.isfile(f):
# the file exists, we have a shot at spawn working
return f
return None
else:
if os.path.isfile(executable):
return executable
if path is None:
path = os.environ.get('PATH', None)
if path is None:
try:
path = os.confstr("CS_PATH")
except (AttributeError, ValueError):
# os.confstr() or CS_PATH is not available
path = os.defpath
# bpo-35755: Don't use os.defpath if the PATH environment variable is
# set to an empty string
# PATH='' doesn't match, whereas PATH=':' looks in the current directory
if not path:
return None
paths = path.split(os.pathsep)
for p in paths:
f = os.path.join(p, executable)
if os.path.isfile(f):
# the file exists, we have a shot at spawn working
return f
return None

View file

@ -15,6 +15,7 @@
import sys
from .errors import DistutilsPlatformError
from .util import get_platform, get_host_platform
# These are needed in a couple of spots, so just compute them once.
PREFIX = os.path.normpath(sys.prefix)
@ -28,7 +29,12 @@
if "_PYTHON_PROJECT_BASE" in os.environ:
project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"])
else:
project_base = os.path.dirname(os.path.abspath(sys.executable))
if sys.executable:
project_base = os.path.dirname(os.path.abspath(sys.executable))
else:
# sys.executable can be empty if argv[0] has been changed and Python is
# unable to retrieve the real program name
project_base = os.getcwd()
# python_build: (Boolean) if true, we're either building Python or

View file

@ -0,0 +1 @@
This should be included.

View file

@ -1,4 +1,5 @@
"""Tests for distutils.command.check."""
import os
import textwrap
import unittest
from test.support import run_unittest
@ -13,13 +14,19 @@
pygments = None
HERE = os.path.dirname(__file__)
class CheckTestCase(support.LoggingSilencer,
support.TempdirManager,
unittest.TestCase):
def _run(self, metadata=None, **options):
def _run(self, metadata=None, cwd=None, **options):
if metadata is None:
metadata = {}
if cwd is not None:
old_dir = os.getcwd()
os.chdir(cwd)
pkg_info, dist = self.create_dist(**metadata)
cmd = check(dist)
cmd.initialize_options()
@ -27,6 +34,8 @@ def _run(self, metadata=None, **options):
setattr(cmd, name, value)
cmd.ensure_finalized()
cmd.run()
if cwd is not None:
os.chdir(old_dir)
return cmd
def test_check_metadata(self):
@ -99,6 +108,11 @@ def test_check_restructuredtext(self):
cmd = self._run(metadata, strict=1, restructuredtext=1)
self.assertEqual(cmd._warnings, 0)
# check that includes work to test #31292
metadata['long_description'] = 'title\n=====\n\n.. include:: includetest.rst'
cmd = self._run(metadata, cwd=HERE, strict=1, restructuredtext=1)
self.assertEqual(cmd._warnings, 0)
@unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils")
def test_check_restructuredtext_with_syntax_highlight(self):
# Don't fail if there is a `code` or `code-block` directive

View file

@ -87,11 +87,52 @@ def test_find_executable(self):
rv = find_executable(dont_exist_program , path=tmp_dir)
self.assertIsNone(rv)
# test os.defpath: missing PATH environment variable
# PATH='': no match, except in the current directory
with test_support.EnvironmentVarGuard() as env:
with mock.patch('distutils.spawn.os.defpath', tmp_dir):
env.pop('PATH')
env['PATH'] = ''
with unittest.mock.patch('distutils.spawn.os.confstr',
return_value=tmp_dir, create=True), \
unittest.mock.patch('distutils.spawn.os.defpath',
tmp_dir):
rv = find_executable(program)
self.assertIsNone(rv)
# look in current directory
with test_support.change_cwd(tmp_dir):
rv = find_executable(program)
self.assertEqual(rv, program)
# PATH=':': explicitly looks in the current directory
with test_support.EnvironmentVarGuard() as env:
env['PATH'] = os.pathsep
with unittest.mock.patch('distutils.spawn.os.confstr',
return_value='', create=True), \
unittest.mock.patch('distutils.spawn.os.defpath', ''):
rv = find_executable(program)
self.assertIsNone(rv)
# look in current directory
with test_support.change_cwd(tmp_dir):
rv = find_executable(program)
self.assertEqual(rv, program)
# missing PATH: test os.confstr("CS_PATH") and os.defpath
with test_support.EnvironmentVarGuard() as env:
env.pop('PATH', None)
# without confstr
with unittest.mock.patch('distutils.spawn.os.confstr',
side_effect=ValueError,
create=True), \
unittest.mock.patch('distutils.spawn.os.defpath',
tmp_dir):
rv = find_executable(program)
self.assertEqual(rv, filename)
# with confstr
with unittest.mock.patch('distutils.spawn.os.confstr',
return_value=tmp_dir, create=True), \
unittest.mock.patch('distutils.spawn.os.defpath', ''):
rv = find_executable(program)
self.assertEqual(rv, filename)

View file

@ -92,6 +92,9 @@ def set_executables(self, **kw):
'CCSHARED': '--sc-ccshared',
'LDSHARED': 'sc_ldshared',
'SHLIB_SUFFIX': 'sc_shutil_suffix',
# On macOS, disable _osx_support.customize_compiler()
'CUSTOMIZED_OSX_COMPILER': 'True',
}
comp = compiler()

View file

@ -15,7 +15,7 @@
from distutils import log
from distutils.errors import DistutilsByteCompileError
def get_platform ():
def get_host_platform():
"""Return a string that identifies the current platform. This is used mainly to
distinguish platform-specific build directories and platform-specific built
distributions. Typically includes the OS name and version and the
@ -38,6 +38,8 @@ def get_platform ():
if os.name == 'nt':
if 'amd64' in sys.version.lower():
return 'win-amd64'
if '(arm)' in sys.version.lower():
return 'win-arm32'
return sys.platform
# Set for cross builds explicitly
@ -90,8 +92,16 @@ def get_platform ():
return "%s-%s-%s" % (osname, release, machine)
# get_platform ()
def get_platform():
if os.name == 'nt':
TARGET_TO_PLAT = {
'x86' : 'win32',
'x64' : 'win-amd64',
'arm' : 'win-arm32',
}
return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform()
else:
return get_host_platform()
def convert_path (pathname):
"""Return 'pathname' as a name that will work on the native filesystem,

View file

@ -2300,7 +2300,7 @@ def __repr__(self):
name = self._dt_test.name.split('.')
return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
__str__ = __repr__
__str__ = object.__str__
def shortDescription(self):
return "Doctest: " + self._dt_test.name
@ -2399,7 +2399,6 @@ def id(self):
def __repr__(self):
return self._dt_test.filename
__str__ = __repr__
def format_failure(self, err):
return ('Failed doctest test for %s\n File "%s", line 0\n\n%s'

View file

@ -241,11 +241,9 @@ def __init__(self, input_charset=DEFAULT_CHARSET):
self.output_codec = CODEC_MAP.get(self.output_charset,
self.output_charset)
def __str__(self):
def __repr__(self):
return self.input_charset.lower()
__repr__ = __str__
def __eq__(self, other):
return str(self) == str(other).lower()

View file

@ -222,6 +222,7 @@ def __init__(self, files=None, inplace=False, backup="", bufsize=0,
warnings.warn("'U' mode is deprecated",
DeprecationWarning, 2)
self._mode = mode
self._write_mode = mode.replace('r', 'w') if 'U' not in mode else 'w'
if openhook:
if inplace:
raise ValueError("FileInput cannot use an opening hook in inplace mode")
@ -348,14 +349,14 @@ def _readline(self):
try:
perm = os.fstat(self._file.fileno()).st_mode
except OSError:
self._output = open(self._filename, "w")
self._output = open(self._filename, self._write_mode)
else:
mode = os.O_CREAT | os.O_WRONLY | os.O_TRUNC
if hasattr(os, 'O_BINARY'):
mode |= os.O_BINARY
fd = os.open(self._filename, mode, perm)
self._output = os.fdopen(fd, "w")
self._output = os.fdopen(fd, self._write_mode)
try:
os.chmod(self._filename, perm)
except OSError:

View file

@ -512,16 +512,16 @@ def __trunc__(a):
return a._numerator // a._denominator
def __floor__(a):
"""Will be math.floor(a) in 3.0."""
"""math.floor(a)"""
return a.numerator // a.denominator
def __ceil__(a):
"""Will be math.ceil(a) in 3.0."""
"""math.ceil(a)"""
# The negations cleverly convince floordiv to return the ceiling.
return -(-a.numerator // a.denominator)
def __round__(self, ndigits=None):
"""Will be round(self, ndigits) in 3.0.
"""round(self, ndigits)
Rounds half toward even.
"""

View file

@ -302,26 +302,7 @@ def sendeprt(self, host, port):
def makeport(self):
'''Create a new socket and send a PORT command for it.'''
err = None
sock = None
for res in socket.getaddrinfo(None, 0, self.af, socket.SOCK_STREAM, 0, socket.AI_PASSIVE):
af, socktype, proto, canonname, sa = res
try:
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
except OSError as _:
err = _
if sock:
sock.close()
sock = None
continue
break
if sock is None:
if err is not None:
raise err
else:
raise OSError("getaddrinfo returns an empty list")
sock.listen(1)
sock = socket.create_server(("", 0), family=self.af, backlog=1)
port = sock.getsockname()[1] # Get proper port
host = self.sock.getsockname()[0] # Get proper host
if self.af == socket.AF_INET:

View file

@ -285,10 +285,7 @@ def __new__(*args, **keywords):
if hasattr(func, "func"):
args = func.args + args
tmpkw = func.keywords.copy()
tmpkw.update(keywords)
keywords = tmpkw
del tmpkw
keywords = {**func.keywords, **keywords}
func = func.func
self = super(partial, cls).__new__(cls)
@ -302,9 +299,8 @@ def __call__(*args, **keywords):
if not args:
raise TypeError("descriptor '__call__' of partial needs an argument")
self, *args = args
newkeywords = self.keywords.copy()
newkeywords.update(keywords)
return self.func(*self.args, *args, **newkeywords)
keywords = {**self.keywords, **keywords}
return self.func(*self.args, *args, **keywords)
@recursive_repr()
def __repr__(self):
@ -358,7 +354,23 @@ class partialmethod(object):
callables as instance methods.
"""
def __init__(self, func, *args, **keywords):
def __init__(*args, **keywords):
if len(args) >= 2:
self, func, *args = args
elif not args:
raise TypeError("descriptor '__init__' of partialmethod "
"needs an argument")
elif 'func' in keywords:
func = keywords.pop('func')
self, *args = args
import warnings
warnings.warn("Passing 'func' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError("type 'partialmethod' takes at least one argument, "
"got %d" % (len(args)-1))
args = tuple(args)
if not callable(func) and not hasattr(func, "__get__"):
raise TypeError("{!r} is not callable or a descriptor"
.format(func))
@ -371,12 +383,12 @@ def __init__(self, func, *args, **keywords):
# it's also more efficient since only one function will be called
self.func = func.func
self.args = func.args + args
self.keywords = func.keywords.copy()
self.keywords.update(keywords)
self.keywords = {**func.keywords, **keywords}
else:
self.func = func
self.args = args
self.keywords = keywords
__init__.__text_signature__ = '($self, func, /, *args, **keywords)'
def __repr__(self):
args = ", ".join(map(repr, self.args))
@ -391,11 +403,9 @@ def __repr__(self):
def _make_unbound_method(self):
def _method(*args, **keywords):
call_keywords = self.keywords.copy()
call_keywords.update(keywords)
cls_or_self, *rest = args
call_args = (cls_or_self,) + self.args + tuple(rest)
return self.func(*call_args, **call_keywords)
cls_or_self, *args = args
keywords = {**self.keywords, **keywords}
return self.func(cls_or_self, *self.args, *args, **keywords)
_method.__isabstractmethod__ = self.__isabstractmethod__
_method._partialmethod = self
return _method

View file

@ -283,7 +283,7 @@ def read(self, size=-1):
def read1(self, size=-1):
"""Implements BufferedIOBase.read1()
Reads up to a buffer's worth of data is size is negative."""
Reads up to a buffer's worth of data if size is negative."""
self._check_not_closed()
if self.mode != READ:
import errno

View file

@ -105,9 +105,6 @@
# Mapping status codes to official W3C names
responses = {v: v.phrase for v in http.HTTPStatus.__members__.values()}
# maximal amount of data to read at one time in _safe_read
MAXAMOUNT = 1048576
# maximal line length when calling readline().
_MAXLINE = 65536
_MAXHEADERS = 100
@ -140,6 +137,16 @@
_is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch
_is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search
# These characters are not allowed within HTTP URL paths.
# See https://tools.ietf.org/html/rfc3986#section-3.3 and the
# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition.
# Prevents CVE-2019-9740. Includes control characters such as \r\n.
# We don't restrict chars above \x7f as putrequest() limits us to ASCII.
_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]')
# Arguably only these _should_ allowed:
# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
# We are more lenient for assumed real world compatibility purposes.
# We always set the Content-Length header for these methods because some
# servers will otherwise respond with a 411
_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
@ -320,8 +327,8 @@ def begin(self):
self.headers = self.msg = parse_headers(self.fp)
if self.debuglevel > 0:
for hdr in self.headers:
print("header:", hdr + ":", self.headers.get(hdr))
for hdr, val in self.headers.items():
print("header:", hdr + ":", val)
# are we using the chunked-style of transfer encoding?
tr_enc = self.headers.get("transfer-encoding")
@ -592,43 +599,24 @@ def _readinto_chunked(self, b):
raise IncompleteRead(bytes(b[0:total_bytes]))
def _safe_read(self, amt):
"""Read the number of bytes requested, compensating for partial reads.
Normally, we have a blocking socket, but a read() can be interrupted
by a signal (resulting in a partial read).
Note that we cannot distinguish between EOF and an interrupt when zero
bytes have been read. IncompleteRead() will be raised in this
situation.
"""Read the number of bytes requested.
This function should be used when <amt> bytes "should" be present for
reading. If the bytes are truly not available (due to EOF), then the
IncompleteRead exception can be used to detect the problem.
"""
s = []
while amt > 0:
chunk = self.fp.read(min(amt, MAXAMOUNT))
if not chunk:
raise IncompleteRead(b''.join(s), amt)
s.append(chunk)
amt -= len(chunk)
return b"".join(s)
data = self.fp.read(amt)
if len(data) < amt:
raise IncompleteRead(data, amt-len(data))
return data
def _safe_readinto(self, b):
"""Same as _safe_read, but for reading into a buffer."""
total_bytes = 0
mvb = memoryview(b)
while total_bytes < len(b):
if MAXAMOUNT < len(mvb):
temp_mvb = mvb[0:MAXAMOUNT]
n = self.fp.readinto(temp_mvb)
else:
n = self.fp.readinto(mvb)
if not n:
raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b))
mvb = mvb[n:]
total_bytes += n
return total_bytes
amt = len(b)
n = self.fp.readinto(b)
if n < amt:
raise IncompleteRead(bytes(b[:n]), amt-n)
return n
def read1(self, n=-1):
"""Read with at most one underlying system call. If at least one
@ -1101,6 +1089,10 @@ def putrequest(self, method, url, skip_host=False,
self._method = method
if not url:
url = '/'
# Prevent CVE-2019-9740.
if match := _contains_disallowed_url_pchar_re.search(url):
raise InvalidURL(f"URL can't contain control characters. {url!r} "
f"(found at least {match.group()!r})")
request = '%s %s %s' % (method, url, self._http_vsn_str)
# Non-ASCII characters should have been eliminated earlier
@ -1427,8 +1419,7 @@ def __repr__(self):
e = ''
return '%s(%i bytes read%s)' % (self.__class__.__name__,
len(self.partial), e)
def __str__(self):
return repr(self)
__str__ = object.__str__
class ImproperConnectionState(HTTPException):
pass

View file

@ -3,6 +3,15 @@ Released on 2019-10-20?
======================================
bpo-36429: Fix starting IDLE with pyshell.
Add idlelib.pyshell alias at top; remove pyshell alias at bottom.
Remove obsolete __name__=='__main__' command.
bpo-30348: Increase test coverage of idlelib.autocomplete by 30%.
Patch by Louie Lu.
bpo-23205: Add tests and refactor grep's findfiles.
bpo-36405: Use dict unpacking in idlelib.
bpo-36396: Remove fgBg param of idlelib.config.GetHighlight().

View file

@ -2225,7 +2225,7 @@ def detach(self):
'General': '''
General:
AutoComplete: Popupwait is milleseconds to wait after key char, without
AutoComplete: Popupwait is milliseconds to wait after key char, without
cursor movement, before popping up completion box. Key char is '.' after
identifier or a '/' (or '\\' on Windows) within a string.

View file

@ -6,7 +6,7 @@
<head>
<meta http-equiv="X-UA-Compatible" content="IE=Edge" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>IDLE &#8212; Python 3.8.0a1 documentation</title>
<title>IDLE &#8212; Python 3.8.0a3 documentation</title>
<link rel="stylesheet" href="../_static/pydoctheme.css" type="text/css" />
<link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
@ -19,7 +19,7 @@
<script type="text/javascript" src="../_static/sidebar.js"></script>
<link rel="search" type="application/opensearchdescription+xml"
title="Search within Python 3.8.0a1 documentation"
title="Search within Python 3.8.0a3 documentation"
href="../_static/opensearch.xml"/>
<link rel="author" title="About these documents" href="../about.html" />
<link rel="index" title="Index" href="../genindex.html" />
@ -72,7 +72,7 @@
<li>
<a href="../index.html">3.8.0a1 Documentation</a> &#187;
<a href="../index.html">3.8.0a3 Documentation</a> &#187;
</li>
<li class="nav-item nav-item-1"><a href="index.html" >The Python Standard Library</a> &#187;</li>
@ -372,8 +372,8 @@ debugger. Breakpoints for a file are saved in the users .idlerc directory.</
<dt>Go to file/line</dt>
<dd>Same as in Debug menu.</dd>
</dl>
<p>The Shell window also has an output squeezing facility explained in the
the <em>Python Shell window</em> subsection below.</p>
<p>The Shell window also has an output squeezing facility explained in the <em>Python
Shell window</em> subsection below.</p>
<dl class="docutils">
<dt>Squeeze</dt>
<dd>If the cursor is over an output line, squeeze all the output between
@ -673,24 +673,22 @@ output to Shell will eventually fill memory, resulting in a memory error.
In contrast, some system text windows only keep the last n lines of output.
A Windows console, for instance, keeps a user-settable 1 to 9999 lines,
with 300 the default.</p>
<p>A Tk Text widget, and hence IDLEs Shell, displays characters (codepoints)
in the the BMP (Basic Multilingual Plane) subset of Unicode.
Which characters are displayed with a proper glyph and which with a
replacement box depends on the operating system and installed fonts.
Tab characters cause the following text to begin after
the next tab stop. (They occur every 8 characters).
Newline characters cause following text to appear on a new line.
Other control characters are ignored or displayed as a space, box, or
something else, depending on the operating system and font.
(Moving the text cursor through such output with arrow keys may exhibit
some surprising spacing behavior.)</p>
<div class="highlight-none notranslate"><div class="highlight"><pre><span></span>&gt;&gt;&gt; s = &#39;a\tb\a&lt;\x02&gt;&lt;\r&gt;\bc\nd&#39;
&gt;&gt;&gt; len(s)
14
&gt;&gt;&gt; s # Display repr(s)
&#39;a\tb\x07&lt;\x02&gt;&lt;\r&gt;\x08c\nd&#39;
&gt;&gt;&gt; print(s, end=&#39;&#39;) # Display s as is.
# Result varies by OS and font. Try it.
<p>A Tk Text widget, and hence IDLEs Shell, displays characters (codepoints) in
the BMP (Basic Multilingual Plane) subset of Unicode. Which characters are
displayed with a proper glyph and which with a replacement box depends on the
operating system and installed fonts. Tab characters cause the following text
to begin after the next tab stop. (They occur every 8 characters). Newline
characters cause following text to appear on a new line. Other control
characters are ignored or displayed as a space, box, or something else,
depending on the operating system and font. (Moving the text cursor through
such output with arrow keys may exhibit some surprising spacing behavior.)</p>
<div class="highlight-python3 notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="n">s</span> <span class="o">=</span> <span class="s1">&#39;a</span><span class="se">\t</span><span class="s1">b</span><span class="se">\a</span><span class="s1">&lt;</span><span class="se">\x02</span><span class="s1">&gt;&lt;</span><span class="se">\r</span><span class="s1">&gt;</span><span class="se">\b</span><span class="s1">c</span><span class="se">\n</span><span class="s1">d&#39;</span> <span class="c1"># Enter 22 chars.</span>
<span class="gp">&gt;&gt;&gt; </span><span class="nb">len</span><span class="p">(</span><span class="n">s</span><span class="p">)</span>
<span class="go">14</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">s</span> <span class="c1"># Display repr(s)</span>
<span class="go">&#39;a\tb\x07&lt;\x02&gt;&lt;\r&gt;\x08c\nd&#39;</span>
<span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span><span class="p">(</span><span class="n">s</span><span class="p">,</span> <span class="n">end</span><span class="o">=</span><span class="s1">&#39;&#39;</span><span class="p">)</span> <span class="c1"># Display s as is.</span>
<span class="go"># Result varies by OS and font. Try it.</span>
</pre></div>
</div>
<p>The <code class="docutils literal notranslate"><span class="pre">repr</span></code> function is used for interactive echo of expression
@ -723,7 +721,7 @@ facilitate development of tkinter programs. Enter <code class="docutils literal
<span class="pre">root</span> <span class="pre">=</span> <span class="pre">tk.Tk()</span></code> in standard Python and nothing appears. Enter the same
in IDLE and a tk window appears. In standard Python, one must also enter
<code class="docutils literal notranslate"><span class="pre">root.update()</span></code> to see the window. IDLE does the equivalent in the
background, about 20 times a second, which is about every 50 milleseconds.
background, about 20 times a second, which is about every 50 milliseconds.
Next enter <code class="docutils literal notranslate"><span class="pre">b</span> <span class="pre">=</span> <span class="pre">tk.Button(root,</span> <span class="pre">text='button');</span> <span class="pre">b.pack()</span></code>. Again,
nothing visibly changes in standard Python until one enters <code class="docutils literal notranslate"><span class="pre">root.update()</span></code>.</p>
<p>Most tkinter programs run <code class="docutils literal notranslate"><span class="pre">root.mainloop()</span></code>, which usually does not
@ -912,7 +910,7 @@ also used for testing.</p>
<li>
<a href="../index.html">3.8.0a1 Documentation</a> &#187;
<a href="../index.html">3.8.0a3 Documentation</a> &#187;
</li>
<li class="nav-item nav-item-1"><a href="index.html" >The Python Standard Library</a> &#187;</li>
@ -943,7 +941,7 @@ also used for testing.</p>
<br />
<br />
Last updated on Feb 23, 2019.
Last updated on Apr 26, 2019.
<a href="https://docs.python.org/3/bugs.html">Found a bug</a>?
<br />

View file

@ -2,7 +2,7 @@
Contents are subject to revision at any time, without notice.
Help => About IDLE: diplay About Idle dialog
Help => About IDLE: display About Idle dialog
<to be moved here from help_about.py>

View file

@ -521,7 +521,7 @@ def test_get_current_keyset(self):
def test_get_keyset(self):
conf = self.mock_config()
# Conflic with key set, should be disable to ''
# Conflict with key set, should be disable to ''
conf.defaultCfg['extensions'].add_section('Foobar')
conf.defaultCfg['extensions'].add_section('Foobar_cfgBindings')
conf.defaultCfg['extensions'].set('Foobar', 'enable', 'True')

View file

@ -1,6 +1,8 @@
#! /usr/bin/env python3
import sys
if __name__ == "__main__":
sys.modules['idlelib.pyshell'] = sys.modules['__main__']
try:
from tkinter import *
@ -416,10 +418,7 @@ def build_subprocess_arglist(self):
# run from the IDLE source directory.
del_exitf = idleConf.GetOption('main', 'General', 'delete-exitfunc',
default=False, type='bool')
if __name__ == 'idlelib.pyshell':
command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,)
else:
command = "__import__('run').main(%r)" % (del_exitf,)
command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,)
return [sys.executable] + w + ["-c", command, str(self.port)]
def start_subprocess(self):
@ -1574,7 +1573,6 @@ def main():
capture_warnings(False)
if __name__ == "__main__":
sys.modules['pyshell'] = sys.modules['__main__']
main()
capture_warnings(False) # Make sure turned off; see issue 18081

View file

@ -64,8 +64,7 @@ def dumps(obj, protocol=None):
class CodePickler(pickle.Pickler):
dispatch_table = {types.CodeType: pickle_code}
dispatch_table.update(copyreg.dispatch_table)
dispatch_table = {types.CodeType: pickle_code, **copyreg.dispatch_table}
BUFSIZE = 8*1024

View file

@ -272,6 +272,9 @@ def __enter__(self):
return self
def __exit__(self, *args):
if self.state == "LOGOUT":
return
try:
self.logout()
except OSError:
@ -625,11 +628,8 @@ def logout(self):
Returns server 'BYE' response.
"""
self.state = 'LOGOUT'
try: typ, dat = self._simple_command('LOGOUT')
except: typ, dat = 'NO', ['%s: %s' % sys.exc_info()[:2]]
typ, dat = self._simple_command('LOGOUT')
self.shutdown()
if 'BYE' in self.untagged_responses:
return 'BYE', self.untagged_responses['BYE']
return typ, dat
@ -1012,16 +1012,17 @@ def _command(self, name, *args):
def _command_complete(self, name, tag):
logout = (name == 'LOGOUT')
# BYE is expected after LOGOUT
if name != 'LOGOUT':
if not logout:
self._check_bye()
try:
typ, data = self._get_tagged_response(tag)
typ, data = self._get_tagged_response(tag, expect_bye=logout)
except self.abort as val:
raise self.abort('command: %s => %s' % (name, val))
except self.error as val:
raise self.error('command: %s => %s' % (name, val))
if name != 'LOGOUT':
if not logout:
self._check_bye()
if typ == 'BAD':
raise self.error('%s command error: %s %s' % (name, typ, data))
@ -1117,7 +1118,7 @@ def _get_response(self):
return resp
def _get_tagged_response(self, tag):
def _get_tagged_response(self, tag, expect_bye=False):
while 1:
result = self.tagged_commands[tag]
@ -1125,9 +1126,15 @@ def _get_tagged_response(self, tag):
del self.tagged_commands[tag]
return result
if expect_bye:
typ = 'BYE'
bye = self.untagged_responses.pop(typ, None)
if bye is not None:
# Server replies to the "LOGOUT" command with "BYE"
return (typ, bye)
# If we've seen a BYE at this point, the socket will be
# closed, so report the BYE now.
self._check_bye()
# Some have reported "unexpected response" exceptions.

View file

@ -265,6 +265,7 @@ def _write_atomic(path, data, mode=0o666):
# this might affected the first line number #32911)
# Python 3.8a1 3400 (move frame block handling to compiler #17611)
# Python 3.8a1 3401 (add END_ASYNC_FOR #33041)
# Python 3.8a1 3410 (PEP570 Python Positional-Only Parameters #36540)
#
# MAGIC must change whenever the bytecode emitted by the compiler may no
# longer be understood by older implementations of the eval loop (usually
@ -273,7 +274,7 @@ def _write_atomic(path, data, mode=0o666):
# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
# in PC/launcher.c must also be updated.
MAGIC_NUMBER = (3401).to_bytes(2, 'little') + b'\r\n'
MAGIC_NUMBER = (3410).to_bytes(2, 'little') + b'\r\n'
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
_PYCACHE = '__pycache__'

View file

@ -168,23 +168,30 @@ def isfunction(object):
__kwdefaults__ dict of keyword only parameters with defaults"""
return isinstance(object, types.FunctionType)
def _has_code_flag(f, flag):
"""Return true if ``f`` is a function (or a method or functools.partial
wrapper wrapping a function) whose code object has the given ``flag``
set in its flags."""
while ismethod(f):
f = f.__func__
f = functools._unwrap_partial(f)
if not isfunction(f):
return False
return bool(f.__code__.co_flags & flag)
def isgeneratorfunction(obj):
"""Return true if the object is a user-defined generator function.
Generator function objects provide the same attributes as functions.
See help(isfunction) for a list of attributes."""
obj = functools._unwrap_partial(obj)
return bool((isfunction(obj) or ismethod(obj)) and
obj.__code__.co_flags & CO_GENERATOR)
return _has_code_flag(obj, CO_GENERATOR)
def iscoroutinefunction(obj):
"""Return true if the object is a coroutine function.
Coroutine functions are defined with "async def" syntax.
"""
obj = functools._unwrap_partial(obj)
return bool(((isfunction(obj) or ismethod(obj)) and
obj.__code__.co_flags & CO_COROUTINE))
return _has_code_flag(obj, CO_COROUTINE)
def isasyncgenfunction(obj):
"""Return true if the object is an asynchronous generator function.
@ -192,9 +199,7 @@ def isasyncgenfunction(obj):
Asynchronous generator functions are defined with "async def"
syntax and have "yield" expressions in their body.
"""
obj = functools._unwrap_partial(obj)
return bool((isfunction(obj) or ismethod(obj)) and
obj.__code__.co_flags & CO_ASYNC_GENERATOR)
return _has_code_flag(obj, CO_ASYNC_GENERATOR)
def isasyncgen(object):
"""Return true if the object is an asynchronous generator."""
@ -267,6 +272,7 @@ def iscode(object):
| 16=nested | 32=generator | 64=nofree | 128=coroutine
| 256=iterable_coroutine | 512=async_generator
co_freevars tuple of names of free variables
co_posonlyargcount number of positional only arguments
co_kwonlyargcount number of keyword only arguments (not including ** arg)
co_lnotab encoded mapping of line numbers to bytecode indices
co_name name with which this code object was defined
@ -582,9 +588,12 @@ def _finddoc(obj):
cls = obj.__objclass__
if getattr(cls, name) is not obj:
return None
if ismemberdescriptor(obj):
slots = getattr(cls, '__slots__', None)
if isinstance(slots, dict) and name in slots:
return slots[name]
else:
return None
for base in cls.__mro__:
try:
doc = getattr(base, name).__doc__
@ -1023,26 +1032,20 @@ def getargs(co):
'args' is the list of argument names. Keyword-only arguments are
appended. 'varargs' and 'varkw' are the names of the * and **
arguments or None."""
args, varargs, kwonlyargs, varkw = _getfullargs(co)
return Arguments(args + kwonlyargs, varargs, varkw)
def _getfullargs(co):
"""Get information about the arguments accepted by a code object.
Four things are returned: (args, varargs, kwonlyargs, varkw), where
'args' and 'kwonlyargs' are lists of argument names, and 'varargs'
and 'varkw' are the names of the * and ** arguments or None."""
if not iscode(co):
raise TypeError('{!r} is not a code object'.format(co))
nargs = co.co_argcount
names = co.co_varnames
nargs = co.co_argcount
nposonlyargs = co.co_posonlyargcount
nkwargs = co.co_kwonlyargcount
args = list(names[:nargs])
kwonlyargs = list(names[nargs:nargs+nkwargs])
nposargs = nargs + nposonlyargs
posonlyargs = list(names[:nposonlyargs])
args = list(names[nposonlyargs:nposonlyargs+nargs])
kwonlyargs = list(names[nposargs:nposargs+nkwargs])
step = 0
nargs += nposonlyargs
nargs += nkwargs
varargs = None
if co.co_flags & CO_VARARGS:
@ -1051,8 +1054,7 @@ def _getfullargs(co):
varkw = None
if co.co_flags & CO_VARKEYWORDS:
varkw = co.co_varnames[nargs]
return args, varargs, kwonlyargs, varkw
return Arguments(posonlyargs + args + kwonlyargs, varargs, varkw)
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
@ -1083,7 +1085,7 @@ def getargspec(func):
getfullargspec(func)
if kwonlyargs or ann:
raise ValueError("Function has keyword-only parameters or annotations"
", use getfullargspec() API which can support them")
", use inspect.signature() API which can support them")
return ArgSpec(args, varargs, varkw, defaults)
FullArgSpec = namedtuple('FullArgSpec',
@ -1101,11 +1103,16 @@ def getfullargspec(func):
'kwonlydefaults' is a dictionary mapping names from kwonlyargs to defaults.
'annotations' is a dictionary mapping parameter names to annotations.
.. deprecated:: 3.8
Use inspect.signature() instead of inspect.getfullargspec().
Notable differences from inspect.signature():
- the "self" parameter is always reported, even for bound methods
- wrapper chains defined by __wrapped__ *not* unwrapped automatically
"""
warnings.warn("Use inspect.signature() instead of inspect.getfullargspec()",
DeprecationWarning, stacklevel=2)
try:
# Re: `skip_bound_arg=False`
#
@ -1137,6 +1144,7 @@ def getfullargspec(func):
args = []
varargs = None
varkw = None
posonlyargs = []
kwonlyargs = []
defaults = ()
annotations = {}
@ -1151,7 +1159,9 @@ def getfullargspec(func):
name = param.name
if kind is _POSITIONAL_ONLY:
args.append(name)
posonlyargs.append(name)
if param.default is not param.empty:
defaults += (param.default,)
elif kind is _POSITIONAL_OR_KEYWORD:
args.append(name)
if param.default is not param.empty:
@ -1176,7 +1186,7 @@ def getfullargspec(func):
# compatibility with 'func.__defaults__'
defaults = None
return FullArgSpec(args, varargs, varkw, defaults,
return FullArgSpec(posonlyargs + args, varargs, varkw, defaults,
kwonlyargs, kwdefaults, annotations)
@ -2111,7 +2121,7 @@ def _signature_from_builtin(cls, func, skip_bound_arg=True):
return _signature_fromstr(cls, func, s, skip_bound_arg)
def _signature_from_function(cls, func):
def _signature_from_function(cls, func, skip_bound_arg=True):
"""Private helper: constructs Signature for the given python function."""
is_duck_function = False
@ -2123,15 +2133,22 @@ def _signature_from_function(cls, func):
# of pure function:
raise TypeError('{!r} is not a Python function'.format(func))
s = getattr(func, "__text_signature__", None)
if s:
return _signature_fromstr(cls, func, s, skip_bound_arg)
Parameter = cls._parameter_cls
# Parameter information.
func_code = func.__code__
pos_count = func_code.co_argcount
arg_names = func_code.co_varnames
positional = tuple(arg_names[:pos_count])
posonly_count = func_code.co_posonlyargcount
positional_count = posonly_count + pos_count
positional_only = tuple(arg_names[:posonly_count])
positional = tuple(arg_names[posonly_count:positional_count])
keyword_only_count = func_code.co_kwonlyargcount
keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)]
keyword_only = arg_names[positional_count:(positional_count + keyword_only_count)]
annotations = func.__annotations__
defaults = func.__defaults__
kwdefaults = func.__kwdefaults__
@ -2143,23 +2160,33 @@ def _signature_from_function(cls, func):
parameters = []
non_default_count = positional_count - pos_default_count
all_positional = positional_only + positional
posonly_left = posonly_count
# Non-keyword-only parameters w/o defaults.
non_default_count = pos_count - pos_default_count
for name in positional[:non_default_count]:
for name in all_positional[:non_default_count]:
kind = _POSITIONAL_ONLY if posonly_left else _POSITIONAL_OR_KEYWORD
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD))
kind=kind))
if posonly_left:
posonly_left -= 1
# ... w/ defaults.
for offset, name in enumerate(positional[non_default_count:]):
for offset, name in enumerate(all_positional[non_default_count:]):
kind = _POSITIONAL_ONLY if posonly_left else _POSITIONAL_OR_KEYWORD
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD,
kind=kind,
default=defaults[offset]))
if posonly_left:
posonly_left -= 1
# *args
if func_code.co_flags & CO_VARARGS:
name = arg_names[pos_count + keyword_only_count]
name = arg_names[positional_count + keyword_only_count]
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_VAR_POSITIONAL))
@ -2176,7 +2203,7 @@ def _signature_from_function(cls, func):
default=default))
# **kwargs
if func_code.co_flags & CO_VARKEYWORDS:
index = pos_count + keyword_only_count
index = positional_count + keyword_only_count
if func_code.co_flags & CO_VARARGS:
index += 1
@ -2278,7 +2305,8 @@ def _signature_from_callable(obj, *,
if isfunction(obj) or _signature_is_functionlike(obj):
# If it's a pure Python function, or an object that is duck type
# of a Python function (Cython functions, for instance), then:
return _signature_from_function(sigcls, obj)
return _signature_from_function(sigcls, obj,
skip_bound_arg=skip_bound_arg)
if _signature_is_builtin(obj):
return _signature_from_builtin(sigcls, obj,

View file

@ -532,6 +532,30 @@ def _prefix_from_ip_string(cls, ip_str):
except ValueError:
cls._report_invalid_netmask(ip_str)
@classmethod
def _split_addr_prefix(cls, address):
"""Helper function to parse address of Network/Interface.
Arg:
address: Argument of Network/Interface.
Returns:
(addr, prefix) tuple.
"""
# a packed address or integer
if isinstance(address, (bytes, int)):
return address, cls._max_prefixlen
if not isinstance(address, tuple):
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
address = _split_optional_netmask(address)
# Constructing from a tuple (addr, [mask])
if len(address) > 1:
return address
return address[0], cls._max_prefixlen
def __reduce__(self):
return self.__class__, (str(self),)
@ -597,15 +621,11 @@ def __reduce__(self):
@functools.total_ordering
class _BaseNetwork(_IPAddressBase):
"""A generic IP network object.
This IP class contains the version independent methods which are
used by networks.
"""
def __init__(self, address):
self._cache = {}
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, str(self))
@ -677,8 +697,7 @@ def __contains__(self, other):
# dealing with another address
else:
# address
return (int(self.network_address) <= int(other._ip) <=
int(self.broadcast_address))
return other._ip & self.netmask._ip == self.network_address._ip
def overlaps(self, other):
"""Tell if self is partly contained in other."""
@ -687,22 +706,14 @@ def overlaps(self, other):
other.network_address in self or (
other.broadcast_address in self)))
@property
@functools.cached_property
def broadcast_address(self):
x = self._cache.get('broadcast_address')
if x is None:
x = self._address_class(int(self.network_address) |
int(self.hostmask))
self._cache['broadcast_address'] = x
return x
return self._address_class(int(self.network_address) |
int(self.hostmask))
@property
@functools.cached_property
def hostmask(self):
x = self._cache.get('hostmask')
if x is None:
x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
self._cache['hostmask'] = x
return x
return self._address_class(int(self.netmask) ^ self._ALL_ONES)
@property
def with_prefixlen(self):
@ -1077,9 +1088,6 @@ class _BaseV4:
# Equivalent to 255.255.255.255 or 32 bits of 1's.
_ALL_ONES = (2**IPV4LENGTH) - 1
# the valid octets for host and netmasks. only useful for IPv4.
_valid_mask_octets = frozenset({255, 254, 252, 248, 240, 224, 192, 128, 0})
_max_prefixlen = IPV4LENGTH
# There are only a handful of valid v4 netmasks, so we cache them all
# when constructed (see _make_netmask()).
@ -1165,12 +1173,6 @@ def _parse_octet(cls, octet_str):
raise ValueError(msg % octet_str)
# Convert to integer (we know digits are legal)
octet_int = int(octet_str, 10)
# Any octets that look like they *might* be written in octal,
# and which don't look exactly the same in both octal and
# decimal are rejected as ambiguous
if octet_int > 7 and octet_str[0] == '0':
msg = "Ambiguous (octal/decimal) value in %r not permitted"
raise ValueError(msg % octet_str)
if octet_int > 255:
raise ValueError("Octet %d (> 255) not permitted" % octet_int)
return octet_int
@ -1188,58 +1190,6 @@ def _string_from_ip_int(cls, ip_int):
"""
return '.'.join(map(str, ip_int.to_bytes(4, 'big')))
def _is_valid_netmask(self, netmask):
"""Verify that the netmask is valid.
Args:
netmask: A string, either a prefix or dotted decimal
netmask.
Returns:
A boolean, True if the prefix represents a valid IPv4
netmask.
"""
mask = netmask.split('.')
if len(mask) == 4:
try:
for x in mask:
if int(x) not in self._valid_mask_octets:
return False
except ValueError:
# Found something that isn't an integer or isn't valid
return False
for idx, y in enumerate(mask):
if idx > 0 and y > mask[idx - 1]:
return False
return True
try:
netmask = int(netmask)
except ValueError:
return False
return 0 <= netmask <= self._max_prefixlen
def _is_hostmask(self, ip_str):
"""Test if the IP string is a hostmask (rather than a netmask).
Args:
ip_str: A string, the potential hostmask.
Returns:
A boolean, True if the IP string is a hostmask.
"""
bits = ip_str.split('.')
try:
parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
except ValueError:
return False
if len(parts) != len(bits):
return False
if parts[0] < parts[-1]:
return True
return False
def _reverse_pointer(self):
"""Return the reverse DNS pointer name for the IPv4 address.
@ -1378,36 +1328,20 @@ def is_link_local(self):
class IPv4Interface(IPv4Address):
def __init__(self, address):
if isinstance(address, (bytes, int)):
IPv4Address.__init__(self, address)
self.network = IPv4Network(self._ip)
self._prefixlen = self._max_prefixlen
return
addr, mask = self._split_addr_prefix(address)
if isinstance(address, tuple):
IPv4Address.__init__(self, address[0])
if len(address) > 1:
self._prefixlen = int(address[1])
else:
self._prefixlen = self._max_prefixlen
self.network = IPv4Network(address, strict=False)
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
return
addr = _split_optional_netmask(address)
IPv4Address.__init__(self, addr[0])
self.network = IPv4Network(address, strict=False)
IPv4Address.__init__(self, addr)
self.network = IPv4Network((addr, mask), strict=False)
self.netmask = self.network.netmask
self._prefixlen = self.network._prefixlen
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
@functools.cached_property
def hostmask(self):
return self.network.hostmask
def __str__(self):
return '%s/%d' % (self._string_from_ip_int(self._ip),
self.network.prefixlen)
self._prefixlen)
def __eq__(self, other):
address_equal = IPv4Address.__eq__(self, other)
@ -1474,7 +1408,6 @@ class IPv4Network(_BaseV4, _BaseNetwork):
_address_class = IPv4Address
def __init__(self, address, strict=True):
"""Instantiate a new IPv4 network object.
Args:
@ -1508,24 +1441,8 @@ def __init__(self, address, strict=True):
an IPv4 address.
ValueError: If strict is True and a network address is not
supplied.
"""
_BaseNetwork.__init__(self, address)
# Constructing from a packed address or integer
if isinstance(address, (int, bytes)):
addr = address
mask = self._max_prefixlen
# Constructing from a tuple (addr, [mask])
elif isinstance(address, tuple):
addr = address[0]
mask = address[1] if len(address) > 1 else self._max_prefixlen
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
else:
args = _split_optional_netmask(address)
addr = self._ip_int_from_string(args[0])
mask = args[1] if len(args) == 2 else self._max_prefixlen
addr, mask = self._split_addr_prefix(address)
self.network_address = IPv4Address(addr)
self.netmask, self._prefixlen = self._make_netmask(mask)
@ -2056,32 +1973,20 @@ def sixtofour(self):
class IPv6Interface(IPv6Address):
def __init__(self, address):
if isinstance(address, (bytes, int)):
IPv6Address.__init__(self, address)
self.network = IPv6Network(self._ip)
self._prefixlen = self._max_prefixlen
return
if isinstance(address, tuple):
IPv6Address.__init__(self, address[0])
if len(address) > 1:
self._prefixlen = int(address[1])
else:
self._prefixlen = self._max_prefixlen
self.network = IPv6Network(address, strict=False)
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
return
addr, mask = self._split_addr_prefix(address)
addr = _split_optional_netmask(address)
IPv6Address.__init__(self, addr[0])
self.network = IPv6Network(address, strict=False)
IPv6Address.__init__(self, addr)
self.network = IPv6Network((addr, mask), strict=False)
self.netmask = self.network.netmask
self._prefixlen = self.network._prefixlen
self.hostmask = self.network.hostmask
@functools.cached_property
def hostmask(self):
return self.network.hostmask
def __str__(self):
return '%s/%d' % (self._string_from_ip_int(self._ip),
self.network.prefixlen)
self._prefixlen)
def __eq__(self, other):
address_equal = IPv6Address.__eq__(self, other)
@ -2186,24 +2091,8 @@ def __init__(self, address, strict=True):
an IPv6 address.
ValueError: If strict was True and a network address was not
supplied.
"""
_BaseNetwork.__init__(self, address)
# Constructing from a packed address or integer
if isinstance(address, (int, bytes)):
addr = address
mask = self._max_prefixlen
# Constructing from a tuple (addr, [mask])
elif isinstance(address, tuple):
addr = address[0]
mask = address[1] if len(address) > 1 else self._max_prefixlen
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
else:
args = _split_optional_netmask(address)
addr = self._ip_int_from_string(args[0])
mask = args[1] if len(args) == 2 else self._max_prefixlen
addr, mask = self._split_addr_prefix(address)
self.network_address = IPv6Address(addr)
self.netmask, self._prefixlen = self._make_netmask(mask)

View file

@ -296,7 +296,7 @@ def load(fp, *, cls=None, object_hook=None, parse_float=None,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)
def loads(s, *, encoding=None, cls=None, object_hook=None, parse_float=None,
def loads(s, *, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
"""Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance
containing a JSON document) to a Python object.
@ -330,7 +330,7 @@ def loads(s, *, encoding=None, cls=None, object_hook=None, parse_float=None,
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg; otherwise ``JSONDecoder`` is used.
The ``encoding`` argument is ignored and deprecated.
The ``encoding`` argument is ignored and deprecated since Python 3.1.
"""
if isinstance(s, str):
if s.startswith('\ufeff'):
@ -342,6 +342,15 @@ def loads(s, *, encoding=None, cls=None, object_hook=None, parse_float=None,
f'not {s.__class__.__name__}')
s = s.decode(detect_encoding(s), 'surrogatepass')
if "encoding" in kw:
import warnings
warnings.warn(
"'encoding' is ignored and deprecated. It will be removed in Python 3.9",
DeprecationWarning,
stacklevel=2
)
del kw['encoding']
if (cls is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None and not kw):

View file

@ -268,7 +268,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
list=list,
str=str,
tuple=tuple,
_intstr=int.__str__,
_intstr=int.__repr__,
):
if _indent is not None and not isinstance(_indent, str):
@ -307,7 +307,7 @@ def _iterencode_list(lst, _current_indent_level):
elif value is False:
yield buf + 'false'
elif isinstance(value, int):
# Subclasses of int/float may override __str__, but we still
# Subclasses of int/float may override __repr__, but we still
# want to encode them as integers/floats in JSON. One example
# within the standard library is IntEnum.
yield buf + _intstr(value)

129
Lib/keyword.py Executable file → Normal file
View file

@ -1,98 +1,55 @@
#! /usr/bin/env python3
"""Keywords (from "graminit.c")
"""Keywords (from "Grammar/Grammar")
This file is automatically generated; please don't muck it up!
To update the symbols in this file, 'cd' to the top directory of
the python source tree after building the interpreter and run:
the python source tree and run:
./python Lib/keyword.py
python3 -m Parser.pgen.keywordgen Grammar/Grammar \
Grammar/Tokens \
Lib/keyword.py
Alternatively, you can run 'make regen-keyword'.
"""
__all__ = ["iskeyword", "kwlist"]
kwlist = [
#--start keywords--
'False',
'None',
'True',
'and',
'as',
'assert',
'break',
'class',
'continue',
'def',
'del',
'elif',
'else',
'except',
'finally',
'for',
'from',
'global',
'if',
'import',
'in',
'is',
'lambda',
'nonlocal',
'not',
'or',
'pass',
'raise',
'return',
'try',
'while',
'with',
'yield',
#--end keywords--
]
kwlist.append('async')
kwlist.append('await')
kwlist.sort()
'False',
'None',
'True',
'and',
'as',
'assert',
'async',
'await',
'break',
'class',
'continue',
'def',
'del',
'elif',
'else',
'except',
'finally',
'for',
'from',
'global',
'if',
'import',
'in',
'is',
'lambda',
'nonlocal',
'not',
'or',
'pass',
'raise',
'return',
'try',
'while',
'with',
'yield'
]
iskeyword = frozenset(kwlist).__contains__
def main():
import sys, re
args = sys.argv[1:]
iptfile = args and args[0] or "Python/graminit.c"
if len(args) > 1: optfile = args[1]
else: optfile = "Lib/keyword.py"
# load the output skeleton from the target, taking care to preserve its
# newline convention.
with open(optfile, newline='') as fp:
format = fp.readlines()
nl = format[0][len(format[0].strip()):] if format else '\n'
# scan the source file for keywords
with open(iptfile) as fp:
strprog = re.compile('"([^"]+)"')
lines = []
for line in fp:
if '{1, "' in line:
match = strprog.search(line)
if match:
lines.append(" '" + match.group(1) + "'," + nl)
lines.sort()
# insert the lines of keywords into the skeleton
try:
start = format.index("#--start keywords--" + nl) + 1
end = format.index("#--end keywords--" + nl)
format[start:end] = lines
except ValueError:
sys.stderr.write("target does not contain format markers\n")
sys.exit(1)
# write the output file
with open(optfile, 'w', newline='') as fp:
fp.writelines(format)
if __name__ == "__main__":
main()

View file

@ -321,7 +321,7 @@ def untokenize(iterable):
Round-trip invariant for full input:
Untokenized source will match input source exactly
Round-trip invariant for limited intput:
Round-trip invariant for limited input:
# Output text will tokenize the back to the input
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
newcode = untokenize(t1)

View file

@ -231,49 +231,38 @@ def _releaseLock():
# Prevent a held logging lock from blocking a child from logging.
if not hasattr(os, 'register_at_fork'): # Windows and friends.
def _register_at_fork_acquire_release(instance):
def _register_at_fork_reinit_lock(instance):
pass # no-op when os.register_at_fork does not exist.
else: # The os.register_at_fork API exists
os.register_at_fork(before=_acquireLock,
after_in_child=_releaseLock,
after_in_parent=_releaseLock)
else:
# A collection of instances with a createLock method (logging.Handler)
# to be called in the child after forking. The weakref avoids us keeping
# discarded Handler instances alive. A set is used to avoid accumulating
# duplicate registrations as createLock() is responsible for registering
# a new Handler instance with this set in the first place.
_at_fork_reinit_lock_weakset = weakref.WeakSet()
# A collection of instances with acquire and release methods (logging.Handler)
# to be called before and after fork. The weakref avoids us keeping discarded
# Handler instances alive forever in case an odd program creates and destroys
# many over its lifetime.
_at_fork_acquire_release_weakset = weakref.WeakSet()
def _register_at_fork_reinit_lock(instance):
_acquireLock()
try:
_at_fork_reinit_lock_weakset.add(instance)
finally:
_releaseLock()
def _register_at_fork_acquire_release(instance):
# We put the instance itself in a single WeakSet as we MUST have only
# one atomic weak ref. used by both before and after atfork calls to
# guarantee matched pairs of acquire and release calls.
_at_fork_acquire_release_weakset.add(instance)
def _at_fork_weak_calls(method_name):
for instance in _at_fork_acquire_release_weakset:
method = getattr(instance, method_name)
def _after_at_fork_child_reinit_locks():
# _acquireLock() was called in the parent before forking.
for handler in _at_fork_reinit_lock_weakset:
try:
method()
handler.createLock()
except Exception as err:
# Similar to what PyErr_WriteUnraisable does.
print("Ignoring exception from logging atfork", instance,
method_name, "method:", err, file=sys.stderr)
"._reinit_lock() method:", err, file=sys.stderr)
_releaseLock() # Acquired by os.register_at_fork(before=.
def _before_at_fork_weak_calls():
_at_fork_weak_calls('acquire')
def _after_at_fork_weak_calls():
_at_fork_weak_calls('release')
os.register_at_fork(before=_before_at_fork_weak_calls,
after_in_child=_after_at_fork_weak_calls,
after_in_parent=_after_at_fork_weak_calls)
os.register_at_fork(before=_acquireLock,
after_in_child=_after_at_fork_child_reinit_locks,
after_in_parent=_releaseLock)
#---------------------------------------------------------------------------
@ -364,12 +353,10 @@ def __init__(self, name, level, pathname, lineno,
else:
self.process = None
def __str__(self):
def __repr__(self):
return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
self.pathname, self.lineno, self.msg)
__repr__ = __str__
def getMessage(self):
"""
Return the message for this LogRecord.
@ -902,7 +889,7 @@ def createLock(self):
Acquire a thread lock for serializing access to the underlying I/O.
"""
self.lock = threading.RLock()
_register_at_fork_acquire_release(self)
_register_at_fork_reinit_lock(self)
def acquire(self):
"""
@ -1124,6 +1111,8 @@ def setStream(self, stream):
def __repr__(self):
level = getLevelName(self.level)
name = getattr(self.stream, 'name', '')
# bpo-36015: name can be an int
name = str(name)
if name:
name += ' '
return '<%s %s(%s)>' % (self.__class__.__name__, name, level)

View file

@ -8,9 +8,7 @@
import sys
import types
import warnings
with warnings.catch_warnings():
warnings.simplefilter('ignore', DeprecationWarning)
import imp
LOAD_CONST = dis.opmap['LOAD_CONST']
IMPORT_NAME = dis.opmap['IMPORT_NAME']
@ -19,6 +17,16 @@
STORE_OPS = STORE_NAME, STORE_GLOBAL
EXTENDED_ARG = dis.EXTENDED_ARG
# Old imp constants:
_SEARCH_ERROR = 0
_PY_SOURCE = 1
_PY_COMPILED = 2
_C_EXTENSION = 3
_PKG_DIRECTORY = 5
_C_BUILTIN = 6
_PY_FROZEN = 7
# Modulefinder does a good job at simulating Python's, but it can not
# handle __path__ modifications packages make at runtime. Therefore there
# is a mechanism whereby you can register extra paths in this map for a
@ -43,6 +51,54 @@ def ReplacePackage(oldname, newname):
replacePackageMap[oldname] = newname
def _find_module(name, path=None):
"""An importlib reimplementation of imp.find_module (for our purposes)."""
# It's necessary to clear the caches for our Finder first, in case any
# modules are being added/deleted/modified at runtime. In particular,
# test_modulefinder.py changes file tree contents in a cache-breaking way:
importlib.machinery.PathFinder.invalidate_caches()
spec = importlib.machinery.PathFinder.find_spec(name, path)
if spec is None:
raise ImportError("No module named {name!r}".format(name=name), name=name)
# Some special cases:
if spec.loader is importlib.machinery.BuiltinImporter:
return None, None, ("", "", _C_BUILTIN)
if spec.loader is importlib.machinery.FrozenImporter:
return None, None, ("", "", _PY_FROZEN)
file_path = spec.origin
if spec.loader.is_package(name):
return None, os.path.dirname(file_path), ("", "", _PKG_DIRECTORY)
if isinstance(spec.loader, importlib.machinery.SourceFileLoader):
kind = _PY_SOURCE
mode = "r"
elif isinstance(spec.loader, importlib.machinery.ExtensionFileLoader):
kind = _C_EXTENSION
mode = "rb"
elif isinstance(spec.loader, importlib.machinery.SourcelessFileLoader):
kind = _PY_COMPILED
mode = "rb"
else: # Should never happen.
return None, None, ("", "", _SEARCH_ERROR)
file = open(file_path, mode)
suffix = os.path.splitext(file_path)[-1]
return file, file_path, (suffix, mode, kind)
class Module:
def __init__(self, name, file=None, path=None):
@ -69,7 +125,7 @@ def __repr__(self):
class ModuleFinder:
def __init__(self, path=None, debug=0, excludes=[], replace_paths=[]):
def __init__(self, path=None, debug=0, excludes=None, replace_paths=None):
if path is None:
path = sys.path
self.path = path
@ -77,8 +133,8 @@ def __init__(self, path=None, debug=0, excludes=[], replace_paths=[]):
self.badmodules = {}
self.debug = debug
self.indent = 0
self.excludes = excludes
self.replace_paths = replace_paths
self.excludes = excludes if excludes is not None else []
self.replace_paths = replace_paths if replace_paths is not None else []
self.processed_paths = [] # Used in debugging only
def msg(self, level, str, *args):
@ -105,14 +161,14 @@ def msgout(self, *args):
def run_script(self, pathname):
self.msg(2, "run_script", pathname)
with open(pathname) as fp:
stuff = ("", "r", imp.PY_SOURCE)
stuff = ("", "r", _PY_SOURCE)
self.load_module('__main__', fp, pathname, stuff)
def load_file(self, pathname):
dir, name = os.path.split(pathname)
name, ext = os.path.splitext(name)
with open(pathname) as fp:
stuff = (ext, "r", imp.PY_SOURCE)
stuff = (ext, "r", _PY_SOURCE)
self.load_module(name, fp, pathname, stuff)
def import_hook(self, name, caller=None, fromlist=None, level=-1):
@ -279,13 +335,13 @@ def import_module(self, partname, fqname, parent):
def load_module(self, fqname, fp, pathname, file_info):
suffix, mode, type = file_info
self.msgin(2, "load_module", fqname, fp and "fp", pathname)
if type == imp.PKG_DIRECTORY:
if type == _PKG_DIRECTORY:
m = self.load_package(fqname, pathname)
self.msgout(2, "load_module ->", m)
return m
if type == imp.PY_SOURCE:
if type == _PY_SOURCE:
co = compile(fp.read()+'\n', pathname, 'exec')
elif type == imp.PY_COMPILED:
elif type == _PY_COMPILED:
try:
data = fp.read()
importlib._bootstrap_external._classify_pyc(data, fqname, {})
@ -323,17 +379,20 @@ def _safe_import_hook(self, name, caller, fromlist, level=-1):
except ImportError as msg:
self.msg(2, "ImportError:", str(msg))
self._add_badmodule(name, caller)
except SyntaxError as msg:
self.msg(2, "SyntaxError:", str(msg))
self._add_badmodule(name, caller)
else:
if fromlist:
for sub in fromlist:
if sub in self.badmodules:
self._add_badmodule(sub, caller)
fullname = name + "." + sub
if fullname in self.badmodules:
self._add_badmodule(fullname, caller)
continue
try:
self.import_hook(name, caller, [sub], level=level)
except ImportError as msg:
self.msg(2, "ImportError:", str(msg))
fullname = name + "." + sub
self._add_badmodule(fullname, caller)
def scan_opcodes(self, co):
@ -445,10 +504,11 @@ def find_module(self, name, path, parent=None):
if path is None:
if name in sys.builtin_module_names:
return (None, None, ("", "", imp.C_BUILTIN))
return (None, None, ("", "", _C_BUILTIN))
path = self.path
return imp.find_module(name, path)
return _find_module(name, path)
def report(self):
"""Print a report to stdout, listing the found modules with their
@ -559,8 +619,9 @@ def replace_paths_in_code(self, co):
if isinstance(consts[i], type(co)):
consts[i] = self.replace_paths_in_code(consts[i])
return types.CodeType(co.co_argcount, co.co_kwonlyargcount,
co.co_nlocals, co.co_stacksize, co.co_flags,
return types.CodeType(co.co_argcount, co.co_posonlyargcount,
co.co_kwonlyargcount, co.co_nlocals,
co.co_stacksize, co.co_flags,
co.co_code, tuple(consts), co.co_names,
co.co_varnames, new_filename, co.co_name,
co.co_firstlineno, co.co_lnotab, co.co_freevars,

View file

@ -358,10 +358,36 @@ def shutdown(self, c):
finally:
self.stop_event.set()
def create(self, c, typeid, *args, **kwds):
def create(*args, **kwds):
'''
Create a new shared object and return its id
'''
if len(args) >= 3:
self, c, typeid, *args = args
elif not args:
raise TypeError("descriptor 'create' of 'Server' object "
"needs an argument")
else:
if 'typeid' not in kwds:
raise TypeError('create expected at least 2 positional '
'arguments, got %d' % (len(args)-1))
typeid = kwds.pop('typeid')
if len(args) >= 2:
self, c, *args = args
import warnings
warnings.warn("Passing 'typeid' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
if 'c' not in kwds:
raise TypeError('create expected at least 2 positional '
'arguments, got %d' % (len(args)-1))
c = kwds.pop('c')
self, *args = args
import warnings
warnings.warn("Passing 'c' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
args = tuple(args)
with self.mutex:
callable, exposed, method_to_typeid, proxytype = \
self.registry[typeid]
@ -393,6 +419,7 @@ def create(self, c, typeid, *args, **kwds):
self.incref(c, ident)
return ident, tuple(exposed)
create.__text_signature__ = '($self, c, typeid, /, *args, **kwds)'
def get_methods(self, c, token):
'''
@ -583,10 +610,13 @@ def _run_server(cls, registry, address, authkey, serializer, writer,
util.info('manager serving at %r', server.address)
server.serve_forever()
def _create(self, typeid, *args, **kwds):
def _create(*args, **kwds):
'''
Create a new shared object; return the token and exposed tuple
'''
self, typeid, *args = args
args = tuple(args)
assert self._state.value == State.STARTED, 'server not yet started'
conn = self._Client(self._address, authkey=self._authkey)
try:
@ -1261,15 +1291,26 @@ def __init__(self, *args, **kwargs):
_SharedMemoryTracker(f"shmm_{self.address}_{getpid()}")
util.debug(f"SharedMemoryServer started by pid {getpid()}")
def create(self, c, typeid, *args, **kwargs):
def create(*args, **kwargs):
"""Create a new distributed-shared object (not backed by a shared
memory block) and return its id to be used in a Proxy Object."""
# Unless set up as a shared proxy, don't make shared_memory_context
# a standard part of kwargs. This makes things easier for supplying
# simple functions.
if len(args) >= 3:
typeod = args[2]
elif 'typeid' in kwargs:
typeid = kwargs['typeid']
elif not args:
raise TypeError("descriptor 'create' of 'SharedMemoryServer' "
"object needs an argument")
else:
raise TypeError('create expected at least 2 positional '
'arguments, got %d' % (len(args)-1))
if hasattr(self.registry[typeid][-1], "_shared_memory_proxy"):
kwargs['shared_memory_context'] = self.shared_memory_context
return Server.create(self, c, typeid, *args, **kwargs)
return Server.create(*args, **kwargs)
create.__text_signature__ = '($self, c, typeid, /, *args, **kwargs)'
def shutdown(self, c):
"Call unlink() on all tracked shared memory, terminate the Server."

View file

@ -59,7 +59,7 @@ def detach(self):
class _ResourceSharer(object):
'''Manager for resouces using background thread.'''
'''Manager for resources using background thread.'''
def __init__(self):
self._key = 0
self._cache = {}

View file

@ -44,20 +44,23 @@ def ensure_running(self):
This can be run from any process. Usually a child process will use
the semaphore created by its parent.'''
with self._lock:
if self._pid is not None:
if self._fd is not None:
# semaphore tracker was launched before, is it still running?
try:
pid, _ = os.waitpid(self._pid, os.WNOHANG)
except ChildProcessError:
# The process terminated
pass
else:
if not pid:
# => still alive
return
if self._check_alive():
# => still alive
return
# => dead, launch it again
os.close(self._fd)
# Clean-up to avoid dangling processes.
try:
# _pid can be None if this process is a child from another
# python process, which has started the semaphore_tracker.
if self._pid is not None:
os.waitpid(self._pid, 0)
except ChildProcessError:
# The semaphore_tracker has already been terminated.
pass
self._fd = None
self._pid = None
@ -99,6 +102,17 @@ def ensure_running(self):
finally:
os.close(r)
def _check_alive(self):
'''Check that the pipe has not been closed by sending a probe.'''
try:
# We cannot use send here as it calls ensure_running, creating
# a cycle.
os.write(self._fd, b'PROBE:0\n')
except OSError:
return False
else:
return True
def register(self, name):
'''Register name of semaphore with semaphore tracker.'''
self._send('REGISTER', name)
@ -150,6 +164,8 @@ def main(fd):
cache.add(name)
elif cmd == b'UNREGISTER':
cache.remove(name)
elif cmd == b'PROBE':
pass
else:
raise RuntimeError('unrecognized command %r' % cmd)
except Exception:

View file

@ -46,16 +46,10 @@ def normcase(s):
Makes all characters lowercase and all slashes into backslashes."""
s = os.fspath(s)
try:
if isinstance(s, bytes):
return s.replace(b'/', b'\\').lower()
else:
return s.replace('/', '\\').lower()
except (TypeError, AttributeError):
if not isinstance(s, (bytes, str)):
raise TypeError("normcase() argument must be str or bytes, "
"not %r" % s.__class__.__name__) from None
raise
if isinstance(s, bytes):
return s.replace(b'/', b'\\').lower()
else:
return s.replace('/', '\\').lower()
# Return whether a path is absolute.

View file

@ -1070,3 +1070,40 @@ def __fspath__(self):
@classmethod
def __subclasshook__(cls, subclass):
return hasattr(subclass, '__fspath__')
if name == 'nt':
class _AddedDllDirectory:
def __init__(self, path, cookie, remove_dll_directory):
self.path = path
self._cookie = cookie
self._remove_dll_directory = remove_dll_directory
def close(self):
self._remove_dll_directory(self._cookie)
self.path = None
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def __repr__(self):
if self.path:
return "<AddedDllDirectory({!r})>".format(self.path)
return "<AddedDllDirectory()>"
def add_dll_directory(path):
"""Add a path to the DLL search path.
This search path is used when resolving dependencies for imported
extension modules (the module itself is resolved through sys.path),
and also by ctypes.
Remove the directory by calling close() on the returned object or
using it in a with statement.
"""
import nt
cookie = nt._add_dll_directory(path)
return _AddedDllDirectory(
path,
cookie,
nt._remove_dll_directory
)

View file

@ -34,7 +34,7 @@
# Internals
#
# EBADF - guard agains macOS `stat` throwing EBADF
# EBADF - guard against macOS `stat` throwing EBADF
_IGNORED_ERROS = (ENOENT, ENOTDIR, EBADF)
_IGNORED_WINERRORS = (
@ -411,6 +411,8 @@ def lchmod(self, pathobj, mode):
unlink = os.unlink
link_to = os.link
rmdir = os.rmdir
rename = os.rename
@ -1303,6 +1305,14 @@ def lstat(self):
self._raise_closed()
return self._accessor.lstat(self)
def link_to(self, target):
"""
Create a hard link pointing to a path named target.
"""
if self._closed:
self._raise_closed()
self._accessor.link_to(self, target)
def rename(self, target):
"""
Rename this path to the given path.

View file

@ -491,8 +491,7 @@ def _complete_expression(self, text, line, begidx, endidx):
# Collect globals and locals. It is usually not really sensible to also
# complete builtins, and they clutter the namespace quite heavily, so we
# leave them out.
ns = self.curframe.f_globals.copy()
ns.update(self.curframe_locals)
ns = {**self.curframe.f_globals, **self.curframe_locals}
if '.' in text:
# Walk an attribute chain up to the last part, similar to what
# rlcompleter does. This will bail if any of the parts are not
@ -1377,8 +1376,7 @@ def do_interact(self, arg):
Start an interactive interpreter whose global namespace
contains all the (global and local) names found in the current scope.
"""
ns = self.curframe.f_globals.copy()
ns.update(self.curframe_locals)
ns = {**self.curframe.f_globals, **self.curframe_locals}
code.interact("*interactive*", local=ns)
def do_alias(self, arg):

View file

@ -72,7 +72,7 @@
# type information
# 0.4.0 - added win32_ver() and modified the platform() output for WinXX
# 0.3.4 - fixed a bug in _follow_symlinks()
# 0.3.3 - fixed popen() and "file" command invokation bugs
# 0.3.3 - fixed popen() and "file" command invocation bugs
# 0.3.2 - added architecture() API and support for it in platform()
# 0.3.1 - fixed syscmd_ver() RE to support Windows NT
# 0.3.0 - added system alias support
@ -334,15 +334,32 @@ def _syscmd_ver(system='', release='', version='',
(6, None): "post2012ServerR2",
}
def win32_is_iot():
return win32_edition() in ('IoTUAP', 'NanoServer', 'WindowsCoreHeadless', 'IoTEdgeOS')
def win32_edition():
try:
try:
import winreg
except ImportError:
import _winreg as winreg
except ImportError:
pass
else:
try:
cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion'
with winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE, cvkey) as key:
return winreg.QueryValueEx(key, 'EditionId')[0]
except OSError:
pass
return None
def win32_ver(release='', version='', csd='', ptype=''):
try:
from sys import getwindowsversion
except ImportError:
return release, version, csd, ptype
try:
from winreg import OpenKeyEx, QueryValueEx, CloseKey, HKEY_LOCAL_MACHINE
except ImportError:
from _winreg import OpenKeyEx, QueryValueEx, CloseKey, HKEY_LOCAL_MACHINE
winver = getwindowsversion()
maj, min, build = winver.platform_version or winver[:3]
@ -368,16 +385,20 @@ def win32_ver(release='', version='', csd='', ptype=''):
_WIN32_SERVER_RELEASES.get((maj, None)) or
release)
key = None
try:
key = OpenKeyEx(HKEY_LOCAL_MACHINE,
r'SOFTWARE\Microsoft\Windows NT\CurrentVersion')
ptype = QueryValueEx(key, 'CurrentType')[0]
except:
try:
import winreg
except ImportError:
import _winreg as winreg
except ImportError:
pass
finally:
if key:
CloseKey(key)
else:
try:
cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion'
with winreg.OpenKeyEx(HKEY_LOCAL_MACHINE, cvkey) as key:
ptype = QueryValueEx(key, 'CurrentType')[0]
except:
pass
return release, version, csd, ptype

View file

@ -18,7 +18,7 @@
extsep = '.'
sep = '/'
pathsep = ':'
defpath = ':/bin:/usr/bin'
defpath = '/bin:/usr/bin'
altsep = None
devnull = '/dev/null'
@ -51,11 +51,7 @@ def _get_sep(path):
def normcase(s):
"""Normalize case of pathname. Has no effect under Posix"""
s = os.fspath(s)
if not isinstance(s, (bytes, str)):
raise TypeError("normcase() argument must be str or bytes, "
"not '{}'".format(s.__class__.__name__))
return s
return os.fspath(s)
# Return whether a path is absolute.

View file

@ -425,13 +425,29 @@ def runctx(self, cmd, globals, locals):
return self
# This method is more useful to profile a single function call.
def runcall(self, func, *args, **kw):
def runcall(*args, **kw):
if len(args) >= 2:
self, func, *args = args
elif not args:
raise TypeError("descriptor 'runcall' of 'Profile' object "
"needs an argument")
elif 'func' in kw:
func = kw.pop('func')
self, *args = args
import warnings
warnings.warn("Passing 'func' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError('runcall expected at least 1 positional argument, '
'got %d' % (len(args)-1))
self.set_cmd(repr(func))
sys.setprofile(self.dispatcher)
try:
return func(*args, **kw)
finally:
sys.setprofile(None)
runcall.__text_signature__ = '($self, func, /, *args, **kw)'
#******************************************************************

View file

@ -509,7 +509,7 @@ def func_std_string(func_name): # match what old profile produced
return "%s:%d(%s)" % func_name
#**************************************************************************
# The following functions combine statists for pairs functions.
# The following functions combine statistics for pairs functions.
# The bulk of the processing involves correctly handling "call" lists,
# such as callers and callees.
#**************************************************************************

View file

@ -997,8 +997,8 @@ def docdata(self, object, name=None, mod=None, cl=None):
if name:
push('<dl><dt><strong>%s</strong></dt>\n' % name)
if object.__doc__ is not None:
doc = self.markup(getdoc(object), self.preformat)
doc = self.markup(getdoc(object), self.preformat)
if doc:
push('<dd><tt>%s</tt></dd>\n' % doc)
push('</dl>\n')

View file

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# Autogenerated by Sphinx on Mon Feb 25 13:03:43 2019
# Autogenerated by Sphinx on Mon May 6 20:27:55 2019
topics = {'assert': 'The "assert" statement\n'
'**********************\n'
'\n'
@ -162,20 +162,21 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' Note: If the object is a class instance and the attribute '
'reference\n'
' occurs on both sides of the assignment operator, the RHS '
'expression,\n'
' "a.x" can access either an instance attribute or (if no '
'instance\n'
' attribute exists) a class attribute. The LHS target "a.x" '
'is always\n'
' set as an instance attribute, creating it if necessary. '
'Thus, the\n'
' two occurrences of "a.x" do not necessarily refer to the '
'same\n'
' attribute: if the RHS expression refers to a class '
'attribute, the\n'
' LHS creates a new instance attribute as the target of the\n'
' assignment:\n'
' occurs on both sides of the assignment operator, the '
'right-hand side\n'
' expression, "a.x" can access either an instance attribute or '
'(if no\n'
' instance attribute exists) a class attribute. The left-hand '
'side\n'
' target "a.x" is always set as an instance attribute, '
'creating it if\n'
' necessary. Thus, the two occurrences of "a.x" do not '
'necessarily\n'
' refer to the same attribute: if the right-hand side '
'expression\n'
' refers to a class attribute, the left-hand side creates a '
'new\n'
' instance attribute as the target of the assignment:\n'
'\n'
' class Cls:\n'
' x = 3 # class variable\n'
@ -3302,11 +3303,11 @@ topics = {'assert': 'The "assert" statement\n'
'"str.format()"\n'
' method, to produce a “formatted” string representation '
'of an\n'
' object. The "format_spec" argument is a string that '
' object. The *format_spec* argument is a string that '
'contains a\n'
' description of the formatting options desired. The '
'interpretation\n'
' of the "format_spec" argument is up to the type '
' of the *format_spec* argument is up to the type '
'implementing\n'
' "__format__()", however most classes will either '
'delegate\n'
@ -6189,8 +6190,8 @@ topics = {'assert': 'The "assert" statement\n'
'end up importing "pkg.mod". If you execute "from ..subpkg2 import '
'mod"\n'
'from within "pkg.subpkg1" you will import "pkg.subpkg2.mod". The\n'
'specification for relative imports is contained within **PEP '
'328**.\n'
'specification for relative imports is contained in the Package\n'
'Relative Imports section.\n'
'\n'
'"importlib.import_module()" is provided to support applications '
'that\n'
@ -8002,11 +8003,11 @@ topics = {'assert': 'The "assert" statement\n'
'"str.format()"\n'
' method, to produce a “formatted” string representation of '
'an\n'
' object. The "format_spec" argument is a string that '
' object. The *format_spec* argument is a string that '
'contains a\n'
' description of the formatting options desired. The '
'interpretation\n'
' of the "format_spec" argument is up to the type '
' of the *format_spec* argument is up to the type '
'implementing\n'
' "__format__()", however most classes will either '
'delegate\n'
@ -8768,15 +8769,15 @@ topics = {'assert': 'The "assert" statement\n'
'When a class definition is executed, the following steps '
'occur:\n'
'\n'
'* MRO entries are resolved\n'
'* MRO entries are resolved;\n'
'\n'
'* the appropriate metaclass is determined\n'
'* the appropriate metaclass is determined;\n'
'\n'
'* the class namespace is prepared\n'
'* the class namespace is prepared;\n'
'\n'
'* the class body is executed\n'
'* the class body is executed;\n'
'\n'
'* the class object is created\n'
'* the class object is created.\n'
'\n'
'\n'
'Resolving MRO entries\n'
@ -8806,16 +8807,16 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'* if no bases and no explicit metaclass are given, then '
'"type()" is\n'
' used\n'
' used;\n'
'\n'
'* if an explicit metaclass is given and it is *not* an '
'instance of\n'
' "type()", then it is used directly as the metaclass\n'
' "type()", then it is used directly as the metaclass;\n'
'\n'
'* if an instance of "type()" is given as the explicit '
'metaclass, or\n'
' bases are defined, then the most derived metaclass is '
'used\n'
'used.\n'
'\n'
'The most derived metaclass is selected from the explicitly '
'specified\n'
@ -8931,7 +8932,7 @@ topics = {'assert': 'The "assert" statement\n'
'with the\n'
' class being defined and the assigned name of that '
'particular\n'
' descriptor; and\n'
' descriptor;\n'
'\n'
'* finally, the "__init_subclass__()" hook is called on the '
'immediate\n'
@ -9030,7 +9031,7 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'One can implement the generic class syntax as specified by '
'**PEP 484**\n'
'(for example "List[int]") by defining a special method\n'
'(for example "List[int]") by defining a special method:\n'
'\n'
'classmethod object.__class_getitem__(cls, key)\n'
'\n'
@ -9672,6 +9673,14 @@ topics = {'assert': 'The "assert" statement\n'
'capitalized\n'
' and the rest lowercased.\n'
'\n'
' Changed in version 3.8: The first character is now put '
'into\n'
' titlecase rather than uppercase. This means that '
'characters like\n'
' digraphs will only have their first letter capitalized, '
'instead of\n'
' the full character.\n'
'\n'
'str.casefold()\n'
'\n'
' Return a casefolded copy of the string. Casefolded '
@ -10416,9 +10425,7 @@ topics = {'assert': 'The "assert" statement\n'
' >>> def titlecase(s):\n'
' ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n'
' ... lambda mo: '
'mo.group(0)[0].upper() +\n'
' ... '
'mo.group(0)[1:].lower(),\n'
'mo.group(0).capitalize(),\n'
' ... s)\n'
' ...\n'
' >>> titlecase("they\'re bill\'s friends.")\n'
@ -11286,17 +11293,17 @@ topics = {'assert': 'The "assert" statement\n'
'| |\n'
' | | unavailable; not inherited by '
'| |\n'
' | | subclasses '
' | | subclasses. '
'| |\n'
' '
'+---------------------------+---------------------------------+-------------+\n'
' | "__name__" | The functions name '
' | "__name__" | The functions name. '
'| Writable |\n'
' '
'+---------------------------+---------------------------------+-------------+\n'
' | "__qualname__" | The functions *qualified name* '
' | "__qualname__" | The functions *qualified '
'| Writable |\n'
' | | New in version 3.3. '
' | | name*. New in version 3.3. '
'| |\n'
' '
'+---------------------------+---------------------------------+-------------+\n'
@ -11316,7 +11323,7 @@ topics = {'assert': 'The "assert" statement\n'
'| |\n'
' | | or "None" if no arguments have '
'| |\n'
' | | a default value '
' | | a default value. '
'| |\n'
' '
'+---------------------------+---------------------------------+-------------+\n'
@ -11436,15 +11443,6 @@ topics = {'assert': 'The "assert" statement\n'
'is\n'
' the original function object.\n'
'\n'
' When a user-defined method object is created by retrieving\n'
' another method object from a class or instance, the behaviour '
'is\n'
' the same as for a function object, except that the '
'"__func__"\n'
' attribute of the new instance is not the original method '
'object\n'
' but its "__func__" attribute.\n'
'\n'
' When an instance method object is created by retrieving a '
'class\n'
' method object from a class or instance, its "__self__" '
@ -12181,7 +12179,13 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' "fromkeys()" is a class method that returns a new '
'dictionary.\n'
' *value* defaults to "None".\n'
' *value* defaults to "None". All of the values refer '
'to just a\n'
' single instance, so it generally doesnt make sense '
'for *value*\n'
' to be a mutable object such as an empty list. To get '
'distinct\n'
' values, use a dict comprehension instead.\n'
'\n'
' get(key[, default])\n'
'\n'

View file

@ -42,11 +42,18 @@
from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
from os import urandom as _urandom
from _collections_abc import Set as _Set, Sequence as _Sequence
from hashlib import sha512 as _sha512
from itertools import accumulate as _accumulate, repeat as _repeat
from bisect import bisect as _bisect
import os as _os
try:
# hashlib is pretty heavy to load, try lean internal module first
from _sha512 import sha512 as _sha512
except ImportError:
# fallback to official implementation
from hashlib import sha512 as _sha512
__all__ = ["Random","seed","random","uniform","randint","choice","sample",
"randrange","shuffle","normalvariate","lognormvariate",
"expovariate","vonmisesvariate","gammavariate","triangular",

View file

@ -1309,9 +1309,20 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None):
use_bytes = isinstance(cmd, bytes)
if path is None:
path = os.environ.get("PATH", os.defpath)
path = os.environ.get("PATH", None)
if path is None:
try:
path = os.confstr("CS_PATH")
except (AttributeError, ValueError):
# os.confstr() or CS_PATH is not available
path = os.defpath
# bpo-35755: Don't use os.defpath if the PATH environment variable is
# set to an empty string
# PATH='' doesn't match, whereas PATH=':' looks in the current directory
if not path:
return None
if use_bytes:
path = os.fsencode(path)
path = path.split(os.fsencode(os.pathsep))

View file

@ -60,8 +60,8 @@
EAGAIN = getattr(errno, 'EAGAIN', 11)
EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11)
__all__ = ["fromfd", "getfqdn", "create_connection",
"AddressFamily", "SocketKind"]
__all__ = ["fromfd", "getfqdn", "create_connection", "create_server",
"has_dualstack_ipv6", "AddressFamily", "SocketKind"]
__all__.extend(os._get_exports_list(_socket))
# Set up the socket.AF_* socket.SOCK_* constants as members of IntEnums for
@ -728,6 +728,92 @@ def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT,
else:
raise error("getaddrinfo returns an empty list")
def has_dualstack_ipv6():
"""Return True if the platform supports creating a SOCK_STREAM socket
which can handle both AF_INET and AF_INET6 (IPv4 / IPv6) connections.
"""
if not has_ipv6 \
or not hasattr(_socket, 'IPPROTO_IPV6') \
or not hasattr(_socket, 'IPV6_V6ONLY'):
return False
try:
with socket(AF_INET6, SOCK_STREAM) as sock:
sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 0)
return True
except error:
return False
def create_server(address, *, family=AF_INET, backlog=None, reuse_port=False,
dualstack_ipv6=False):
"""Convenience function which creates a SOCK_STREAM type socket
bound to *address* (a 2-tuple (host, port)) and return the socket
object.
*family* should be either AF_INET or AF_INET6.
*backlog* is the queue size passed to socket.listen().
*reuse_port* dictates whether to use the SO_REUSEPORT socket option.
*dualstack_ipv6*: if true and the platform supports it, it will
create an AF_INET6 socket able to accept both IPv4 or IPv6
connections. When false it will explicitly disable this option on
platforms that enable it by default (e.g. Linux).
>>> with create_server((None, 8000)) as server:
... while True:
... conn, addr = server.accept()
... # handle new connection
"""
if reuse_port and not hasattr(_socket, "SO_REUSEPORT"):
raise ValueError("SO_REUSEPORT not supported on this platform")
if dualstack_ipv6:
if not has_dualstack_ipv6():
raise ValueError("dualstack_ipv6 not supported on this platform")
if family != AF_INET6:
raise ValueError("dualstack_ipv6 requires AF_INET6 family")
sock = socket(family, SOCK_STREAM)
try:
# Note about Windows. We don't set SO_REUSEADDR because:
# 1) It's unnecessary: bind() will succeed even in case of a
# previous closed socket on the same address and still in
# TIME_WAIT state.
# 2) If set, another socket is free to bind() on the same
# address, effectively preventing this one from accepting
# connections. Also, it may set the process in a state where
# it'll no longer respond to any signals or graceful kills.
# See: msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx
if os.name not in ('nt', 'cygwin') and \
hasattr(_socket, 'SO_REUSEADDR'):
try:
sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
except error:
# Fail later on bind(), for platforms which may not
# support this option.
pass
if reuse_port:
sock.setsockopt(SOL_SOCKET, SO_REUSEPORT, 1)
if has_ipv6 and family == AF_INET6:
if dualstack_ipv6:
sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 0)
elif hasattr(_socket, "IPV6_V6ONLY") and \
hasattr(_socket, "IPPROTO_IPV6"):
sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 1)
try:
sock.bind(address)
except error as err:
msg = '%s (while attempting to bind on address %r)' % \
(err.strerror, address)
raise error(err.errno, msg) from None
if backlog is None:
sock.listen()
else:
sock.listen(backlog)
return sock
except error:
sock.close()
raise
def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
"""Resolve host and port into list of address info entries.

View file

@ -59,11 +59,9 @@ def __new__(cls, value, name):
self.name = name
return self
def __str__(self):
def __repr__(self):
return self.name
__repr__ = __str__
MAXREPEAT = _NamedIntConstant(MAXREPEAT, 'MAXREPEAT')
def _makecodes(names):

View file

@ -7,18 +7,21 @@
Calculating averages
--------------------
================== =============================================
================== ==================================================
Function Description
================== =============================================
================== ==================================================
mean Arithmetic mean (average) of data.
fmean Fast, floating point arithmetic mean.
geometric_mean Geometric mean of data.
harmonic_mean Harmonic mean of data.
median Median (middle value) of data.
median_low Low median of data.
median_high High median of data.
median_grouped Median, or 50th percentile, of grouped data.
mode Mode (most common value) of data.
multimode List of modes (most common values of data)
================== =============================================
multimode List of modes (most common values of data).
quantiles Divide data into intervals with equal probability.
================== ==================================================
Calculate the arithmetic mean ("the average") of data:
@ -77,10 +80,11 @@
"""
__all__ = [ 'StatisticsError', 'NormalDist',
__all__ = [ 'StatisticsError', 'NormalDist', 'quantiles',
'pstdev', 'pvariance', 'stdev', 'variance',
'median', 'median_low', 'median_high', 'median_grouped',
'mean', 'mode', 'multimode', 'harmonic_mean', 'fmean',
'geometric_mean',
]
import math
@ -328,6 +332,24 @@ def count(x):
except ZeroDivisionError:
raise StatisticsError('fmean requires at least one data point') from None
def geometric_mean(data):
"""Convert data to floats and compute the geometric mean.
Raises a StatisticsError if the input dataset is empty,
if it contains a zero, or if it contains a negative value.
No special efforts are made to achieve exact results.
(However, this may change in the future.)
>>> round(geometric_mean([54, 24, 36]), 9)
36.0
"""
try:
return exp(fmean(map(log, data)))
except ValueError:
raise StatisticsError('geometric mean requires a non-empty dataset '
' containing positive numbers') from None
def harmonic_mean(data):
"""Return the harmonic mean of data.
@ -542,6 +564,54 @@ def multimode(data):
maxcount, mode_items = next(groupby(counts, key=itemgetter(1)), (0, []))
return list(map(itemgetter(0), mode_items))
def quantiles(dist, *, n=4, method='exclusive'):
'''Divide *dist* into *n* continuous intervals with equal probability.
Returns a list of (n - 1) cut points separating the intervals.
Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles.
Set *n* to 100 for percentiles which gives the 99 cuts points that
separate *dist* in to 100 equal sized groups.
The *dist* can be any iterable containing sample data or it can be
an instance of a class that defines an inv_cdf() method. For sample
data, the cut points are linearly interpolated between data points.
If *method* is set to *inclusive*, *dist* is treated as population
data. The minimum value is treated as the 0th percentile and the
maximum value is treated as the 100th percentile.
'''
# Possible future API extensions:
# quantiles(data, already_sorted=True)
# quantiles(data, cut_points=[0.02, 0.25, 0.50, 0.75, 0.98])
if n < 1:
raise StatisticsError('n must be at least 1')
if hasattr(dist, 'inv_cdf'):
return [dist.inv_cdf(i / n) for i in range(1, n)]
data = sorted(dist)
ld = len(data)
if ld < 2:
raise StatisticsError('must have at least two data points')
if method == 'inclusive':
m = ld - 1
result = []
for i in range(1, n):
j = i * m // n
delta = i*m - j*n
interpolated = (data[j] * (n - delta) + data[j+1] * delta) / n
result.append(interpolated)
return result
if method == 'exclusive':
m = ld + 1
result = []
for i in range(1, n):
j = i * m // n # rescale i to m/n
j = 1 if j < 1 else ld-1 if j > ld-1 else j # clamp to 1 .. ld-1
delta = i*m - j*n # exact integer math
interpolated = (data[j-1] * (n - delta) + data[j] * delta) / n
result.append(interpolated)
return result
raise ValueError(f'Unknown method: {method!r}')
# === Measures of spread ===
@ -709,7 +779,8 @@ class NormalDist:
# https://en.wikipedia.org/wiki/Normal_distribution
# https://en.wikipedia.org/wiki/Variance#Properties
__slots__ = ('mu', 'sigma')
__slots__ = {'mu': 'Arithmetic mean of a normal distribution',
'sigma': 'Standard deviation of a normal distribution'}
def __init__(self, mu=0.0, sigma=1.0):
'NormalDist where mu is the mean and sigma is the standard deviation.'
@ -726,7 +797,7 @@ def from_samples(cls, data):
xbar = fmean(data)
return cls(xbar, stdev(data, xbar))
def samples(self, n, seed=None):
def samples(self, n, *, seed=None):
'Generate *n* samples for a given mean and standard deviation.'
gauss = random.gauss if seed is None else random.Random(seed).gauss
mu, sigma = self.mu, self.sigma

View file

@ -203,7 +203,6 @@ def __repr__(self):
return "%s(%d)" % (self.__class__.__name__, int(self))
__del__ = Close
__str__ = __repr__
else:
# When select or poll has indicated that the file is writable,
# we can write up to _PIPE_BUF bytes without risk of blocking.

View file

@ -626,6 +626,8 @@ def get_platform():
if os.name == 'nt':
if 'amd64' in sys.version.lower():
return 'win-amd64'
if '(arm)' in sys.version.lower():
return 'win-arm32'
return sys.platform
if os.name != "posix" or not hasattr(os, 'uname'):

View file

@ -717,11 +717,32 @@ class TarInfo(object):
usually created internally.
"""
__slots__ = ("name", "mode", "uid", "gid", "size", "mtime",
"chksum", "type", "linkname", "uname", "gname",
"devmajor", "devminor",
"offset", "offset_data", "pax_headers", "sparse",
"tarfile", "_sparse_structs", "_link_target")
__slots__ = dict(
name = 'Name of the archive member.',
mode = 'Permission bits.',
uid = 'User ID of the user who originally stored this member.',
gid = 'Group ID of the user who originally stored this member.',
size = 'Size in bytes.',
mtime = 'Time of last modification.',
chksum = 'Header checksum.',
type = ('File type. type is usually one of these constants: '
'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
linkname = ('Name of the target file name, which is only present '
'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
uname = 'User name.',
gname = 'Group name.',
devmajor = 'Device major number.',
devminor = 'Device minor number.',
offset = 'The tar header starts here.',
offset_data = "The file's data starts here.",
pax_headers = ('A dictionary containing key-value pairs of an '
'associated pax extended header.'),
sparse = 'Sparse member information.',
tarfile = None,
_sparse_structs = None,
_link_target = None,
)
def __init__(self, name=""):
"""Construct a TarInfo object. name is the optional name
@ -747,10 +768,9 @@ def __init__(self, name=""):
self.sparse = None # sparse member information
self.pax_headers = {} # pax header information
# In pax headers the "name" and "linkname" field are called
# "path" and "linkpath".
@property
def path(self):
'In pax headers, "name" is called "path".'
return self.name
@path.setter
@ -759,6 +779,7 @@ def path(self, name):
@property
def linkpath(self):
'In pax headers, "linkname" is called "linkpath".'
return self.linkname
@linkpath.setter
@ -1350,24 +1371,42 @@ def _block(self, count):
return blocks * BLOCKSIZE
def isreg(self):
'Return True if the Tarinfo object is a regular file.'
return self.type in REGULAR_TYPES
def isfile(self):
'Return True if the Tarinfo object is a regular file.'
return self.isreg()
def isdir(self):
'Return True if it is a directory.'
return self.type == DIRTYPE
def issym(self):
'Return True if it is a symbolic link.'
return self.type == SYMTYPE
def islnk(self):
'Return True if it is a hard link.'
return self.type == LNKTYPE
def ischr(self):
'Return True if it is a character device.'
return self.type == CHRTYPE
def isblk(self):
'Return True if it is a block device.'
return self.type == BLKTYPE
def isfifo(self):
'Return True if it is a FIFO.'
return self.type == FIFOTYPE
def issparse(self):
return self.sparse is not None
def isdev(self):
'Return True if it is one of character device, block device or FIFO.'
return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
# class TarInfo

View file

@ -3334,9 +3334,7 @@ def _listener(cls, conn, families):
new_conn.close()
l.close()
l = socket.socket()
l.bind((test.support.HOST, 0))
l.listen()
l = socket.create_server((test.support.HOST, 0))
conn.send(l.getsockname())
new_conn, addr = l.accept()
conn.send(new_conn)
@ -4345,9 +4343,7 @@ def _child_test_wait_socket(cls, address, slow):
def test_wait_socket(self, slow=False):
from multiprocessing.connection import wait
l = socket.socket()
l.bind((test.support.HOST, 0))
l.listen()
l = socket.create_server((test.support.HOST, 0))
addr = l.getsockname()
readers = []
procs = []
@ -4895,6 +4891,34 @@ def test_semaphore_tracker_sigkill(self):
# Uncatchable signal.
self.check_semaphore_tracker_death(signal.SIGKILL, True)
@staticmethod
def _is_semaphore_tracker_reused(conn, pid):
from multiprocessing.semaphore_tracker import _semaphore_tracker
_semaphore_tracker.ensure_running()
# The pid should be None in the child process, expect for the fork
# context. It should not be a new value.
reused = _semaphore_tracker._pid in (None, pid)
reused &= _semaphore_tracker._check_alive()
conn.send(reused)
def test_semaphore_tracker_reused(self):
from multiprocessing.semaphore_tracker import _semaphore_tracker
_semaphore_tracker.ensure_running()
pid = _semaphore_tracker._pid
r, w = multiprocessing.Pipe(duplex=False)
p = multiprocessing.Process(target=self._is_semaphore_tracker_reused,
args=(w, pid))
p.start()
is_semaphore_tracker_reused = r.recv()
# Clean up
p.join()
w.close()
r.close()
self.assertTrue(is_semaphore_tracker_reused)
class TestSimpleQueue(unittest.TestCase):

View file

@ -4,17 +4,17 @@
Find the test_os test method which alters the environment:
./python -m test.bisect --fail-env-changed test_os
./python -m test.bisect_cmd --fail-env-changed test_os
Find a reference leak in "test_os", write the list of failing tests into the
"bisect" file:
./python -m test.bisect -o bisect -R 3:3 test_os
./python -m test.bisect_cmd -o bisect -R 3:3 test_os
Load an existing list of tests from a file using -i option:
./python -m test --list-cases -m FileTests test_os > tests
./python -m test.bisect -i tests test_os
./python -m test.bisect_cmd -i tests test_os
"""
import argparse

View file

@ -1795,6 +1795,82 @@ def test_fromisoformat_fails_typeerror(self):
with self.assertRaises(TypeError):
self.theclass.fromisoformat(bad_type)
def test_fromisocalendar(self):
# For each test case, assert that fromisocalendar is the
# inverse of the isocalendar function
dates = [
(2016, 4, 3),
(2005, 1, 2), # (2004, 53, 7)
(2008, 12, 30), # (2009, 1, 2)
(2010, 1, 2), # (2009, 53, 6)
(2009, 12, 31), # (2009, 53, 4)
(1900, 1, 1), # Unusual non-leap year (year % 100 == 0)
(1900, 12, 31),
(2000, 1, 1), # Unusual leap year (year % 400 == 0)
(2000, 12, 31),
(2004, 1, 1), # Leap year
(2004, 12, 31),
(1, 1, 1),
(9999, 12, 31),
(MINYEAR, 1, 1),
(MAXYEAR, 12, 31),
]
for datecomps in dates:
with self.subTest(datecomps=datecomps):
dobj = self.theclass(*datecomps)
isocal = dobj.isocalendar()
d_roundtrip = self.theclass.fromisocalendar(*isocal)
self.assertEqual(dobj, d_roundtrip)
def test_fromisocalendar_value_errors(self):
isocals = [
(2019, 0, 1),
(2019, -1, 1),
(2019, 54, 1),
(2019, 1, 0),
(2019, 1, -1),
(2019, 1, 8),
(2019, 53, 1),
(10000, 1, 1),
(0, 1, 1),
(9999999, 1, 1),
(2<<32, 1, 1),
(2019, 2<<32, 1),
(2019, 1, 2<<32),
]
for isocal in isocals:
with self.subTest(isocal=isocal):
with self.assertRaises(ValueError):
self.theclass.fromisocalendar(*isocal)
def test_fromisocalendar_type_errors(self):
err_txformers = [
str,
float,
lambda x: None,
]
# Take a valid base tuple and transform it to contain one argument
# with the wrong type. Repeat this for each argument, e.g.
# [("2019", 1, 1), (2019, "1", 1), (2019, 1, "1"), ...]
isocals = []
base = (2019, 1, 1)
for i in range(3):
for txformer in err_txformers:
err_val = list(base)
err_val[i] = txformer(err_val[i])
isocals.append(tuple(err_val))
for isocal in isocals:
with self.subTest(isocal=isocal):
with self.assertRaises(TypeError):
self.theclass.fromisocalendar(*isocal)
#############################################################################
# datetime tests
@ -3407,7 +3483,7 @@ def utcoffset(self, t):
self.assertEqual(got, expected)
# However, if they're different members, uctoffset is not ignored.
# Note that a time can't actually have an operand-depedent offset,
# Note that a time can't actually have an operand-dependent offset,
# though (and time.utcoffset() passes None to tzinfo.utcoffset()),
# so skip this test for time.
if cls is not time:
@ -5942,6 +6018,41 @@ class TZInfoSubclass(tzinfo):
with self.subTest(arg=arg, exact=exact):
self.assertFalse(is_tzinfo(arg, exact))
def test_date_from_timestamp(self):
ts = datetime(1995, 4, 12).timestamp()
for macro in [0, 1]:
with self.subTest(macro=macro):
d = _testcapi.get_date_fromtimestamp(int(ts), macro)
self.assertEqual(d, date(1995, 4, 12))
def test_datetime_from_timestamp(self):
ts0 = datetime(1995, 4, 12).timestamp()
ts1 = datetime(1995, 4, 12, 12, 30).timestamp()
cases = [
((1995, 4, 12), None, False),
((1995, 4, 12), None, True),
((1995, 4, 12), timezone(timedelta(hours=1)), True),
((1995, 4, 12, 14, 30), None, False),
((1995, 4, 12, 14, 30), None, True),
((1995, 4, 12, 14, 30), timezone(timedelta(hours=1)), True),
]
from_timestamp = _testcapi.get_datetime_fromtimestamp
for case in cases:
for macro in [0, 1]:
with self.subTest(case=case, macro=macro):
dtup, tzinfo, usetz = case
dt_orig = datetime(*dtup, tzinfo=tzinfo)
ts = int(dt_orig.timestamp())
dt_rt = from_timestamp(ts, tzinfo, usetz, macro)
self.assertEqual(dt_orig, dt_rt)
def load_tests(loader, standard_tests, pattern):
standard_tests.addTest(ZoneInfoCompleteTest())
return standard_tests

View file

@ -285,12 +285,9 @@ def test_sendmsg(self):
self._test_send(lambda sock, data: sock.sendmsg([data]))
def test_accept(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock = socket.create_server((support.HOST, 0))
self.addCleanup(sock.close)
sock.bind((support.HOST, 0))
port = sock.getsockname()[1]
sock.listen()
code = '\n'.join((
'import socket, time',

View file

@ -5,7 +5,7 @@
# line 5
# line 7
def spam(a, b, c, d=3, e=4, f=5, *g, **h):
def spam(a, /, b, c, d=3, e=4, f=5, *g, **h):
eggs(b + d, c + f)
# line 11
@ -80,3 +80,14 @@ async def lobbest(grenade):
raise Exception()
except:
tb = sys.exc_info()[2]
class Callable:
def __call__(self, *args):
return args
def as_method_of(self, obj):
from types import MethodType
return MethodType(self, obj)
custom_method = Callable().as_method_of(42)
del Callable

View file

@ -137,3 +137,19 @@ def func136():
def func137():
never_reached1
never_reached2
#line 141
def positional_only_arg(a, /):
pass
#line 145
def all_markers(a, b, /, c, d, *, e, f):
pass
# line 149
def all_markers_with_args_and_kwargs(a, b, /, c, d, *args, e, f, **kwargs):
pass
#line 153
def all_markers_with_defaults(a, b=1, /, c=2, d=3, *, e=4, f=5):
pass

View file

@ -226,8 +226,9 @@ def _create_parser():
'(instead of the Python stdlib test suite)')
group = parser.add_argument_group('Special runs')
group.add_argument('-l', '--findleaks', action='store_true',
help='if GC is available detect tests that leak memory')
group.add_argument('-l', '--findleaks', action='store_const', const=2,
default=1,
help='deprecated alias to --fail-env-changed')
group.add_argument('-L', '--runleaks', action='store_true',
help='run the leaks(1) command just before exit.' +
more_details)
@ -309,7 +310,7 @@ def _parse_args(args, **kwargs):
# Defaults
ns = argparse.Namespace(testdir=None, verbose=0, quiet=False,
exclude=False, single=False, randomize=False, fromfile=None,
findleaks=False, use_resources=None, trace=False, coverdir='coverage',
findleaks=1, use_resources=None, trace=False, coverdir='coverage',
runleaks=False, huntrleaks=False, verbose2=False, print_slow=False,
random_seed=None, use_mp=None, verbose3=False, forever=False,
header=False, failfast=False, match_tests=None, pgo=False)
@ -330,12 +331,13 @@ def _parse_args(args, **kwargs):
parser.error("unrecognized arguments: %s" % arg)
sys.exit(1)
if ns.findleaks > 1:
# --findleaks implies --fail-env-changed
ns.fail_env_changed = True
if ns.single and ns.fromfile:
parser.error("-s and -f don't go together!")
if ns.use_mp is not None and ns.trace:
parser.error("-T and -j don't go together!")
if ns.use_mp is not None and ns.findleaks:
parser.error("-l and -j don't go together!")
if ns.failfast and not (ns.verbose or ns.verbose3):
parser.error("-G/--failfast needs either -v or -W")
if ns.pgo and (ns.verbose or ns.verbose2 or ns.verbose3):

View file

@ -20,10 +20,6 @@
from test.libregrtest.setup import setup_tests
from test.libregrtest.utils import removepy, count, format_duration, printlist
from test import support
try:
import gc
except ImportError:
gc = None
# When tests are run from the Python build directory, it is best practice
@ -79,8 +75,8 @@ def __init__(self):
self.skipped = []
self.resource_denieds = []
self.environment_changed = []
self.rerun = []
self.run_no_tests = []
self.rerun = []
self.first_result = None
self.interrupted = False
@ -90,9 +86,6 @@ def __init__(self):
# used by --coverage, trace.Trace instance
self.tracer = None
# used by --findleaks, store for gc.garbage
self.found_garbage = []
# used to display the progress bar "[ 3/100]"
self.start_time = time.monotonic()
self.test_count = ''
@ -105,26 +98,43 @@ def __init__(self):
# used by --junit-xml
self.testsuite_xml = None
def accumulate_result(self, test, result):
ok, test_time, xml_data = result
if ok not in (CHILD_ERROR, INTERRUPTED):
self.test_times.append((test_time, test))
self.win_load_tracker = None
def get_executed(self):
return (set(self.good) | set(self.bad) | set(self.skipped)
| set(self.resource_denieds) | set(self.environment_changed)
| set(self.run_no_tests))
def accumulate_result(self, result, rerun=False):
test_name = result.test_name
ok = result.result
if ok not in (CHILD_ERROR, INTERRUPTED) and not rerun:
self.test_times.append((result.test_time, test_name))
if ok == PASSED:
self.good.append(test)
self.good.append(test_name)
elif ok in (FAILED, CHILD_ERROR):
self.bad.append(test)
if not rerun:
self.bad.append(test_name)
elif ok == ENV_CHANGED:
self.environment_changed.append(test)
self.environment_changed.append(test_name)
elif ok == SKIPPED:
self.skipped.append(test)
self.skipped.append(test_name)
elif ok == RESOURCE_DENIED:
self.skipped.append(test)
self.resource_denieds.append(test)
self.skipped.append(test_name)
self.resource_denieds.append(test_name)
elif ok == TEST_DID_NOT_RUN:
self.run_no_tests.append(test)
elif ok != INTERRUPTED:
self.run_no_tests.append(test_name)
elif ok == INTERRUPTED:
self.interrupted = True
else:
raise ValueError("invalid test result: %r" % ok)
if rerun and ok not in {FAILED, CHILD_ERROR, INTERRUPTED}:
self.bad.remove(test_name)
xml_data = result.xml_data
if xml_data:
import xml.etree.ElementTree as ET
for e in xml_data:
@ -134,7 +144,7 @@ def accumulate_result(self, test, result):
print(xml_data, file=sys.__stderr__)
raise
def display_progress(self, test_index, test):
def display_progress(self, test_index, text):
if self.ns.quiet:
return
@ -143,12 +153,12 @@ def display_progress(self, test_index, test):
fails = len(self.bad) + len(self.environment_changed)
if fails and not self.ns.pgo:
line = f"{line}/{fails}"
line = f"[{line}] {test}"
line = f"[{line}] {text}"
# add the system load prefix: "load avg: 1.80 "
if hasattr(os, 'getloadavg'):
load_avg_1min = os.getloadavg()[0]
line = f"load avg: {load_avg_1min:.2f} {line}"
load_avg = self.getloadavg()
if load_avg is not None:
line = f"load avg: {load_avg:.2f} {line}"
# add the timestamp prefix: "0:01:05 "
test_time = time.monotonic() - self.start_time
@ -164,22 +174,6 @@ def parse_args(self, kwargs):
"faulthandler.dump_traceback_later", file=sys.stderr)
ns.timeout = None
if ns.threshold is not None and gc is None:
print('No GC available, ignore --threshold.', file=sys.stderr)
ns.threshold = None
if ns.findleaks:
if gc is not None:
# Uncomment the line below to report garbage that is not
# freeable by reference counting alone. By default only
# garbage that is not collectable by the GC is reported.
pass
#gc.set_debug(gc.DEBUG_SAVEALL)
else:
print('No GC available, disabling --findleaks',
file=sys.stderr)
ns.findleaks = False
if ns.xmlpath:
support.junit_xml_list = self.testsuite_xml = []
@ -275,13 +269,13 @@ def list_cases(self):
support.verbose = False
support.set_match_tests(self.ns.match_tests)
for test in self.selected:
abstest = get_abs_module(self.ns, test)
for test_name in self.selected:
abstest = get_abs_module(self.ns, test_name)
try:
suite = unittest.defaultTestLoader.loadTestsFromName(abstest)
self._list_cases(suite)
except unittest.SkipTest:
self.skipped.append(test)
self.skipped.append(test_name)
if self.skipped:
print(file=sys.stderr)
@ -298,23 +292,19 @@ def rerun_failed_tests(self):
print()
print("Re-running failed tests in verbose mode")
self.rerun = self.bad[:]
for test in self.rerun:
print("Re-running test %r in verbose mode" % test, flush=True)
try:
self.ns.verbose = True
ok = runtest(self.ns, test)
except KeyboardInterrupt:
self.interrupted = True
# print a newline separate from the ^C
print()
for test_name in self.rerun:
print(f"Re-running {test_name} in verbose mode", flush=True)
self.ns.verbose = True
result = runtest(self.ns, test_name)
self.accumulate_result(result, rerun=True)
if result.result == INTERRUPTED:
break
else:
if ok[0] in {PASSED, ENV_CHANGED, SKIPPED, RESOURCE_DENIED}:
self.bad.remove(test)
else:
if self.bad:
print(count(len(self.bad), 'test'), "failed again:")
printlist(self.bad)
if self.bad:
print(count(len(self.bad), 'test'), "failed again:")
printlist(self.bad)
self.display_result()
@ -327,11 +317,11 @@ def display_result(self):
print("== Tests result: %s ==" % self.get_tests_result())
if self.interrupted:
print()
# print a newline after ^C
print("Test suite interrupted by signal SIGINT.")
executed = set(self.good) | set(self.bad) | set(self.skipped)
omitted = set(self.selected) - executed
omitted = set(self.selected) - self.get_executed()
if omitted:
print()
print(count(len(omitted), "test"), "omitted:")
printlist(omitted)
@ -348,8 +338,8 @@ def display_result(self):
self.test_times.sort(reverse=True)
print()
print("10 slowest tests:")
for time, test in self.test_times[:10]:
print("- %s: %s" % (test, format_duration(time)))
for test_time, test in self.test_times[:10]:
print("- %s: %s" % (test, format_duration(test_time)))
if self.bad:
print()
@ -387,10 +377,10 @@ def run_tests_sequential(self):
print("Run tests sequentially")
previous_test = None
for test_index, test in enumerate(self.tests, 1):
for test_index, test_name in enumerate(self.tests, 1):
start_time = time.monotonic()
text = test
text = test_name
if previous_test:
text = '%s -- %s' % (text, previous_test)
self.display_progress(test_index, text)
@ -398,22 +388,19 @@ def run_tests_sequential(self):
if self.tracer:
# If we're tracing code coverage, then we don't exit with status
# if on a false return value from main.
cmd = ('result = runtest(self.ns, test); '
'self.accumulate_result(test, result)')
cmd = ('result = runtest(self.ns, test_name); '
'self.accumulate_result(result)')
ns = dict(locals())
self.tracer.runctx(cmd, globals=globals(), locals=ns)
result = ns['result']
else:
try:
result = runtest(self.ns, test)
except KeyboardInterrupt:
self.interrupted = True
self.accumulate_result(test, (INTERRUPTED, None, None))
break
else:
self.accumulate_result(test, result)
result = runtest(self.ns, test_name)
self.accumulate_result(result)
previous_test = format_test_result(test, result[0])
if result.result == INTERRUPTED:
break
previous_test = format_test_result(result)
test_time = time.monotonic() - start_time
if test_time >= PROGRESS_MIN_TIME:
previous_test = "%s in %s" % (previous_test, format_duration(test_time))
@ -421,16 +408,6 @@ def run_tests_sequential(self):
# be quiet: say nothing if the test passed shortly
previous_test = None
if self.ns.findleaks:
gc.collect()
if gc.garbage:
print("Warning: test created", len(gc.garbage), end=' ')
print("uncollectable object(s).")
# move the uncollectable objects somewhere so we don't see
# them again
self.found_garbage.extend(gc.garbage)
del gc.garbage[:]
# Unload the newly imported modules (best effort finalization)
for module in sys.modules.keys():
if module not in save_modules and module.startswith("test."):
@ -441,8 +418,8 @@ def run_tests_sequential(self):
def _test_forever(self, tests):
while True:
for test in tests:
yield test
for test_name in tests:
yield test_name
if self.bad:
return
if self.ns.fail_env_changed and self.environment_changed:
@ -515,6 +492,10 @@ def run_tests(self):
self.run_tests_sequential()
def finalize(self):
if self.win_load_tracker is not None:
self.win_load_tracker.close()
self.win_load_tracker = None
if self.next_single_filename:
if self.next_single_test:
with open(self.next_single_filename, 'w') as fp:
@ -585,6 +566,15 @@ def main(self, tests=None, **kwargs):
with support.temp_cwd(test_cwd, quiet=True):
self._main(tests, kwargs)
def getloadavg(self):
if self.win_load_tracker is not None:
return self.win_load_tracker.getloadavg()
if hasattr(os, 'getloadavg'):
return os.getloadavg()[0]
return None
def _main(self, tests, kwargs):
if self.ns.huntrleaks:
warmup, repetitions, _ = self.ns.huntrleaks
@ -616,6 +606,18 @@ def _main(self, tests, kwargs):
self.list_cases()
sys.exit(0)
# If we're on windows and this is the parent runner (not a worker),
# track the load average.
if sys.platform == 'win32' and (self.ns.worker_args is None):
from test.libregrtest.win_utils import WindowsLoadTracker
try:
self.win_load_tracker = WindowsLoadTracker()
except FileNotFoundError as error:
# Windows IoT Core and Windows Nano Server do not provide
# typeperf.exe for x64, x86 or ARM
print(f'Failed to create WindowsLoadTracker: {error}')
self.run_tests()
self.display_result()

View file

@ -1,4 +1,3 @@
import errno
import os
import re
import sys
@ -8,13 +7,17 @@
try:
from _abc import _get_dump
except ImportError:
import weakref
def _get_dump(cls):
# For legacy Python version
return (cls._abc_registry, cls._abc_cache,
# Reimplement _get_dump() for pure-Python implementation of
# the abc module (Lib/_py_abc.py)
registry_weakrefs = set(weakref.ref(obj) for obj in cls._abc_registry)
return (registry_weakrefs, cls._abc_cache,
cls._abc_negative_cache, cls._abc_negative_cache_version)
def dash_R(the_module, test, indirect_test, huntrleaks):
def dash_R(ns, test_name, test_func):
"""Run a test multiple times, looking for reference leaks.
Returns:
@ -28,6 +31,10 @@ def dash_R(the_module, test, indirect_test, huntrleaks):
raise Exception("Tracking reference leaks requires a debug build "
"of Python")
# Avoid false positives due to various caches
# filling slowly with random data:
warm_caches()
# Save current values for dash_R_cleanup() to restore.
fs = warnings.filters[:]
ps = copyreg.dispatch_table.copy()
@ -53,31 +60,52 @@ def dash_R(the_module, test, indirect_test, huntrleaks):
def get_pooled_int(value):
return int_pool.setdefault(value, value)
nwarmup, ntracked, fname = huntrleaks
nwarmup, ntracked, fname = ns.huntrleaks
fname = os.path.join(support.SAVEDCWD, fname)
repcount = nwarmup + ntracked
# Pre-allocate to ensure that the loop doesn't allocate anything new
rep_range = list(range(repcount))
rc_deltas = [0] * repcount
alloc_deltas = [0] * repcount
fd_deltas = [0] * repcount
getallocatedblocks = sys.getallocatedblocks
gettotalrefcount = sys.gettotalrefcount
fd_count = support.fd_count
print("beginning", repcount, "repetitions", file=sys.stderr)
print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr,
flush=True)
# initialize variables to make pyflakes quiet
rc_before = alloc_before = fd_before = 0
for i in range(repcount):
indirect_test()
alloc_after, rc_after, fd_after = dash_R_cleanup(fs, ps, pic, zdc,
abcs)
print('.', end='', file=sys.stderr, flush=True)
if i >= nwarmup:
rc_deltas[i] = get_pooled_int(rc_after - rc_before)
alloc_deltas[i] = get_pooled_int(alloc_after - alloc_before)
fd_deltas[i] = get_pooled_int(fd_after - fd_before)
if not ns.quiet:
print("beginning", repcount, "repetitions", file=sys.stderr)
print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr,
flush=True)
dash_R_cleanup(fs, ps, pic, zdc, abcs)
for i in rep_range:
test_func()
dash_R_cleanup(fs, ps, pic, zdc, abcs)
# dash_R_cleanup() ends with collecting cyclic trash:
# read memory statistics immediately after.
alloc_after = getallocatedblocks()
rc_after = gettotalrefcount()
fd_after = fd_count()
if not ns.quiet:
print('.', end='', file=sys.stderr, flush=True)
rc_deltas[i] = get_pooled_int(rc_after - rc_before)
alloc_deltas[i] = get_pooled_int(alloc_after - alloc_before)
fd_deltas[i] = get_pooled_int(fd_after - fd_before)
alloc_before = alloc_after
rc_before = rc_after
fd_before = fd_after
print(file=sys.stderr)
if not ns.quiet:
print(file=sys.stderr)
# These checkers return False on success, True on failure
def check_rc_deltas(deltas):
@ -108,7 +136,7 @@ def check_fd_deltas(deltas):
deltas = deltas[nwarmup:]
if checker(deltas):
msg = '%s leaked %s %s, sum=%s' % (
test, deltas, item_name, sum(deltas))
test_name, deltas, item_name, sum(deltas))
print(msg, file=sys.stderr, flush=True)
with open(fname, "a") as refrep:
print(msg, file=refrep)
@ -118,7 +146,7 @@ def check_fd_deltas(deltas):
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
import gc, copyreg
import copyreg
import collections.abc
# Restore some original values.
@ -150,16 +178,8 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs):
clear_caches()
# Collect cyclic trash and read memory statistics immediately after.
func1 = sys.getallocatedblocks
func2 = sys.gettotalrefcount
gc.collect()
return func1(), func2(), support.fd_count()
def clear_caches():
import gc
# Clear the warnings registry, so they can be displayed again
for mod in sys.modules.values():
if hasattr(mod, '__warningregistry__'):
@ -252,7 +272,7 @@ def clear_caches():
for f in typing._cleanups:
f()
gc.collect()
support.gc_collect()
def warm_caches():

View file

@ -1,4 +1,7 @@
import collections
import faulthandler
import functools
import gc
import importlib
import io
import os
@ -6,9 +9,11 @@
import time
import traceback
import unittest
from test import support
from test.libregrtest.refleak import dash_R, clear_caches
from test.libregrtest.save_env import saved_test_environment
from test.libregrtest.utils import print_warning
# Test result constants.
@ -55,9 +60,17 @@
NOTTESTS = set()
def format_test_result(test_name, result):
fmt = _FORMAT_TEST_RESULT.get(result, "%s")
return fmt % test_name
# used by --findleaks, store for gc.garbage
FOUND_GARBAGE = []
def format_test_result(result):
fmt = _FORMAT_TEST_RESULT.get(result.result, "%s")
return fmt % result.test_name
def findtestdir(path=None):
return path or os.path.dirname(os.path.dirname(__file__)) or os.curdir
def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS):
@ -73,24 +86,84 @@ def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS):
return stdtests + sorted(tests)
def get_abs_module(ns, test):
if test.startswith('test.') or ns.testdir:
return test
def get_abs_module(ns, test_name):
if test_name.startswith('test.') or ns.testdir:
return test_name
else:
# Always import it from the test package
return 'test.' + test
# Import it from the test package
return 'test.' + test_name
def runtest(ns, test):
TestResult = collections.namedtuple('TestResult',
'test_name result test_time xml_data')
def _runtest(ns, test_name):
# Handle faulthandler timeout, capture stdout+stderr, XML serialization
# and measure time.
output_on_failure = ns.verbose3
use_timeout = (ns.timeout is not None)
if use_timeout:
faulthandler.dump_traceback_later(ns.timeout, exit=True)
start_time = time.perf_counter()
try:
support.set_match_tests(ns.match_tests)
support.junit_xml_list = xml_list = [] if ns.xmlpath else None
if ns.failfast:
support.failfast = True
if output_on_failure:
support.verbose = True
stream = io.StringIO()
orig_stdout = sys.stdout
orig_stderr = sys.stderr
try:
sys.stdout = stream
sys.stderr = stream
result = _runtest_inner(ns, test_name,
display_failure=False)
if result != PASSED:
output = stream.getvalue()
orig_stderr.write(output)
orig_stderr.flush()
finally:
sys.stdout = orig_stdout
sys.stderr = orig_stderr
else:
# Tell tests to be moderately quiet
support.verbose = ns.verbose
result = _runtest_inner(ns, test_name,
display_failure=not ns.verbose)
if xml_list:
import xml.etree.ElementTree as ET
xml_data = [ET.tostring(x).decode('us-ascii') for x in xml_list]
else:
xml_data = None
test_time = time.perf_counter() - start_time
return TestResult(test_name, result, test_time, xml_data)
finally:
if use_timeout:
faulthandler.cancel_dump_traceback_later()
support.junit_xml_list = None
def runtest(ns, test_name):
"""Run a single test.
ns -- regrtest namespace of options
test -- the name of the test
test_name -- the name of the test
Returns the tuple (result, test_time, xml_data), where result is one
of the constants:
INTERRUPTED KeyboardInterrupt when run under -j
INTERRUPTED KeyboardInterrupt
RESOURCE_DENIED test skipped because resource denied
SKIPPED test skipped for some other reason
ENV_CHANGED test failed because it changed the execution environment
@ -101,130 +174,123 @@ def runtest(ns, test):
If ns.xmlpath is not None, xml_data is a list containing each
generated testsuite element.
"""
output_on_failure = ns.verbose3
use_timeout = (ns.timeout is not None)
if use_timeout:
faulthandler.dump_traceback_later(ns.timeout, exit=True)
try:
support.set_match_tests(ns.match_tests)
# reset the environment_altered flag to detect if a test altered
# the environment
support.environment_altered = False
support.junit_xml_list = xml_list = [] if ns.xmlpath else None
if ns.failfast:
support.failfast = True
if output_on_failure:
support.verbose = True
return _runtest(ns, test_name)
except:
if not ns.pgo:
msg = traceback.format_exc()
print(f"test {test_name} crashed -- {msg}",
file=sys.stderr, flush=True)
return TestResult(test_name, FAILED, 0.0, None)
stream = io.StringIO()
orig_stdout = sys.stdout
orig_stderr = sys.stderr
try:
sys.stdout = stream
sys.stderr = stream
result = runtest_inner(ns, test, display_failure=False)
if result[0] != PASSED:
output = stream.getvalue()
orig_stderr.write(output)
orig_stderr.flush()
finally:
sys.stdout = orig_stdout
sys.stderr = orig_stderr
else:
support.verbose = ns.verbose # Tell tests to be moderately quiet
result = runtest_inner(ns, test, display_failure=not ns.verbose)
if xml_list:
import xml.etree.ElementTree as ET
xml_data = [ET.tostring(x).decode('us-ascii') for x in xml_list]
def _test_module(the_module):
loader = unittest.TestLoader()
tests = loader.loadTestsFromModule(the_module)
for error in loader.errors:
print(error, file=sys.stderr)
if loader.errors:
raise Exception("errors while loading tests")
support.run_unittest(tests)
def _runtest_inner2(ns, test_name):
# Load the test function, run the test function, handle huntrleaks
# and findleaks to detect leaks
abstest = get_abs_module(ns, test_name)
# remove the module from sys.module to reload it if it was already imported
support.unload(abstest)
the_module = importlib.import_module(abstest)
# If the test has a test_main, that will run the appropriate
# tests. If not, use normal unittest test loading.
test_runner = getattr(the_module, "test_main", None)
if test_runner is None:
test_runner = functools.partial(_test_module, the_module)
try:
if ns.huntrleaks:
# Return True if the test leaked references
refleak = dash_R(ns, test_name, test_runner)
else:
xml_data = None
return result + (xml_data,)
test_runner()
refleak = False
finally:
if use_timeout:
faulthandler.cancel_dump_traceback_later()
cleanup_test_droppings(test, ns.verbose)
support.junit_xml_list = None
cleanup_test_droppings(test_name, ns.verbose)
support.gc_collect()
if gc.garbage:
support.environment_altered = True
print_warning(f"{test_name} created {len(gc.garbage)} "
f"uncollectable object(s).")
# move the uncollectable objects somewhere,
# so we don't see them again
FOUND_GARBAGE.extend(gc.garbage)
gc.garbage.clear()
def post_test_cleanup():
support.reap_children()
return refleak
def runtest_inner(ns, test, display_failure=True):
support.unload(test)
test_time = 0.0
refleak = False # True if the test leaked references.
def _runtest_inner(ns, test_name, display_failure=True):
# Detect environment changes, handle exceptions.
# Reset the environment_altered flag to detect if a test altered
# the environment
support.environment_altered = False
if ns.pgo:
display_failure = False
try:
abstest = get_abs_module(ns, test)
clear_caches()
with saved_test_environment(test, ns.verbose, ns.quiet, pgo=ns.pgo) as environment:
start_time = time.perf_counter()
the_module = importlib.import_module(abstest)
# If the test has a test_main, that will run the appropriate
# tests. If not, use normal unittest test loading.
test_runner = getattr(the_module, "test_main", None)
if test_runner is None:
def test_runner():
loader = unittest.TestLoader()
tests = loader.loadTestsFromModule(the_module)
for error in loader.errors:
print(error, file=sys.stderr)
if loader.errors:
raise Exception("errors while loading tests")
support.run_unittest(tests)
if ns.huntrleaks:
refleak = dash_R(the_module, test, test_runner, ns.huntrleaks)
else:
test_runner()
test_time = time.perf_counter() - start_time
post_test_cleanup()
with saved_test_environment(test_name, ns.verbose, ns.quiet, pgo=ns.pgo) as environment:
refleak = _runtest_inner2(ns, test_name)
except support.ResourceDenied as msg:
if not ns.quiet and not ns.pgo:
print(test, "skipped --", msg, flush=True)
return RESOURCE_DENIED, test_time
print(f"{test_name} skipped -- {msg}", flush=True)
return RESOURCE_DENIED
except unittest.SkipTest as msg:
if not ns.quiet and not ns.pgo:
print(test, "skipped --", msg, flush=True)
return SKIPPED, test_time
except KeyboardInterrupt:
raise
except support.TestFailed as msg:
if not ns.pgo:
if display_failure:
print("test", test, "failed --", msg, file=sys.stderr,
flush=True)
else:
print("test", test, "failed", file=sys.stderr, flush=True)
return FAILED, test_time
print(f"{test_name} skipped -- {msg}", flush=True)
return SKIPPED
except support.TestFailed as exc:
msg = f"test {test_name} failed"
if display_failure:
msg = f"{msg} -- {exc}"
print(msg, file=sys.stderr, flush=True)
return FAILED
except support.TestDidNotRun:
return TEST_DID_NOT_RUN, test_time
return TEST_DID_NOT_RUN
except KeyboardInterrupt:
print()
return INTERRUPTED
except:
msg = traceback.format_exc()
if not ns.pgo:
print("test", test, "crashed --", msg, file=sys.stderr,
flush=True)
return FAILED, test_time
else:
if refleak:
return FAILED, test_time
if environment.changed:
return ENV_CHANGED, test_time
return PASSED, test_time
msg = traceback.format_exc()
print(f"test {test_name} crashed -- {msg}",
file=sys.stderr, flush=True)
return FAILED
if refleak:
return FAILED
if environment.changed:
return ENV_CHANGED
return PASSED
def cleanup_test_droppings(testname, verbose):
import shutil
import stat
import gc
def cleanup_test_droppings(test_name, verbose):
# First kill any dangling references to open files etc.
# This can also issue some ResourceWarnings which would otherwise get
# triggered during the following test run, and possibly produce failures.
gc.collect()
support.gc_collect()
# Try to clean up junk commonly left behind. While tests shouldn't leave
# any files or directories behind, when a test fails that can be tedious
@ -239,23 +305,23 @@ def cleanup_test_droppings(testname, verbose):
continue
if os.path.isdir(name):
import shutil
kind, nuker = "directory", shutil.rmtree
elif os.path.isfile(name):
kind, nuker = "file", os.unlink
else:
raise SystemError("os.path says %r exists but is neither "
"directory nor file" % name)
raise RuntimeError(f"os.path says {name!r} exists but is neither "
f"directory nor file")
if verbose:
print("%r left behind %s %r" % (testname, kind, name))
print_warning("%r left behind %s %r" % (test_name, kind, name))
support.environment_altered = True
try:
import stat
# fix possible permissions problems that might prevent cleanup
os.chmod(name, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
nuker(name)
except Exception as msg:
print(("%r left behind %s %r and it couldn't be "
"removed: %s" % (testname, kind, name, msg)), file=sys.stderr)
def findtestdir(path=None):
return path or os.path.dirname(os.path.dirname(__file__)) or os.curdir
except Exception as exc:
print_warning(f"{test_name} left behind {kind} {name!r} "
f"and it couldn't be removed: {exc}")

View file

@ -1,7 +1,9 @@
import collections
import faulthandler
import json
import os
import queue
import subprocess
import sys
import threading
import time
@ -11,7 +13,7 @@
from test.libregrtest.runtest import (
runtest, INTERRUPTED, CHILD_ERROR, PROGRESS_MIN_TIME,
format_test_result)
format_test_result, TestResult)
from test.libregrtest.setup import setup_tests
from test.libregrtest.utils import format_duration
@ -19,20 +21,12 @@
# Display the running tests if nothing happened last N seconds
PROGRESS_UPDATE = 30.0 # seconds
# If interrupted, display the wait progress every N seconds
WAIT_PROGRESS = 2.0 # seconds
def must_stop(result):
return result.result in (INTERRUPTED, CHILD_ERROR)
def run_test_in_subprocess(testname, ns):
"""Run the given test in a subprocess with --worker-args.
ns is the option Namespace parsed from command-line arguments. regrtest
is invoked in a subprocess with the --worker-args argument; when the
subprocess exits, its return code, stdout and stderr are returned as a
3-tuple.
"""
from subprocess import Popen, PIPE
ns_dict = vars(ns)
worker_args = (ns_dict, testname)
worker_args = json.dumps(worker_args)
@ -47,15 +41,12 @@ def run_test_in_subprocess(testname, ns):
# Running the child from the same working directory as regrtest's original
# invocation ensures that TEMPDIR for the child is the same when
# sysconfig.is_python_build() is true. See issue 15300.
popen = Popen(cmd,
stdout=PIPE, stderr=PIPE,
universal_newlines=True,
close_fds=(os.name != 'nt'),
cwd=support.SAVEDCWD)
with popen:
stdout, stderr = popen.communicate()
retcode = popen.wait()
return retcode, stdout, stderr
return subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
close_fds=(os.name != 'nt'),
cwd=support.SAVEDCWD)
def run_tests_worker(worker_args):
@ -64,14 +55,7 @@ def run_tests_worker(worker_args):
setup_tests(ns)
try:
result = runtest(ns, testname)
except KeyboardInterrupt:
result = INTERRUPTED, '', None
except BaseException as e:
traceback.print_exc()
result = CHILD_ERROR, str(e)
result = runtest(ns, testname)
print() # Force a newline (just in case)
print(json.dumps(result), flush=True)
sys.exit(0)
@ -83,7 +67,6 @@ class MultiprocessIterator:
"""A thread-safe iterator over tests for multiprocess mode."""
def __init__(self, tests):
self.interrupted = False
self.lock = threading.Lock()
self.tests = tests
@ -92,152 +75,213 @@ def __iter__(self):
def __next__(self):
with self.lock:
if self.interrupted:
raise StopIteration('tests interrupted')
return next(self.tests)
MultiprocessResult = collections.namedtuple('MultiprocessResult',
'result stdout stderr error_msg')
class MultiprocessThread(threading.Thread):
def __init__(self, pending, output, ns):
super().__init__()
self.pending = pending
self.output = output
self.ns = ns
self.current_test = None
self.current_test_name = None
self.start_time = None
self._popen = None
def _runtest(self):
try:
test = next(self.pending)
except StopIteration:
self.output.put((None, None, None, None))
return True
def kill(self):
if not self.is_alive():
return
if self._popen is not None:
self._popen.kill()
def _runtest(self, test_name):
try:
self.start_time = time.monotonic()
self.current_test = test
self.current_test_name = test_name
retcode, stdout, stderr = run_test_in_subprocess(test, self.ns)
popen = run_test_in_subprocess(test_name, self.ns)
self._popen = popen
with popen:
try:
stdout, stderr = popen.communicate()
except:
popen.kill()
popen.wait()
raise
retcode = popen.wait()
finally:
self.current_test = None
self.current_test_name = None
self._popen = None
stdout = stdout.strip()
stderr = stderr.rstrip()
err_msg = None
if retcode != 0:
result = (CHILD_ERROR, "Exit code %s" % retcode, None)
self.output.put((test, stdout.rstrip(), stderr.rstrip(),
result))
return False
err_msg = "Exit code %s" % retcode
else:
stdout, _, result = stdout.rpartition("\n")
stdout = stdout.rstrip()
if not result:
err_msg = "Failed to parse worker stdout"
else:
try:
# deserialize run_tests_worker() output
result = json.loads(result)
result = TestResult(*result)
except Exception as exc:
err_msg = "Failed to parse worker JSON: %s" % exc
stdout, _, result = stdout.strip().rpartition("\n")
if not result:
self.output.put((None, None, None, None))
return True
if err_msg is not None:
test_time = time.monotonic() - self.start_time
result = TestResult(test_name, CHILD_ERROR, test_time, None)
result = json.loads(result)
assert len(result) == 3, f"Invalid result tuple: {result!r}"
self.output.put((test, stdout.rstrip(), stderr.rstrip(),
result))
return False
return MultiprocessResult(result, stdout, stderr, err_msg)
def run(self):
while True:
try:
try:
test_name = next(self.pending)
except StopIteration:
break
mp_result = self._runtest(test_name)
self.output.put((False, mp_result))
if must_stop(mp_result.result):
break
except BaseException:
self.output.put((True, traceback.format_exc()))
break
def get_running(workers):
running = []
for worker in workers:
current_test_name = worker.current_test_name
if not current_test_name:
continue
dt = time.monotonic() - worker.start_time
if dt >= PROGRESS_MIN_TIME:
text = '%s (%s)' % (current_test_name, format_duration(dt))
running.append(text)
return running
class MultiprocessRunner:
def __init__(self, regrtest):
self.regrtest = regrtest
self.ns = regrtest.ns
self.output = queue.Queue()
self.pending = MultiprocessIterator(self.regrtest.tests)
if self.ns.timeout is not None:
self.test_timeout = self.ns.timeout * 1.5
else:
self.test_timeout = None
self.workers = None
def start_workers(self):
self.workers = [MultiprocessThread(self.pending, self.output, self.ns)
for _ in range(self.ns.use_mp)]
print("Run tests in parallel using %s child processes"
% len(self.workers))
for worker in self.workers:
worker.start()
def wait_workers(self):
for worker in self.workers:
worker.kill()
for worker in self.workers:
worker.join()
def _get_result(self):
if not any(worker.is_alive() for worker in self.workers):
# all worker threads are done: consume pending results
try:
return self.output.get(timeout=0)
except queue.Empty:
return None
while True:
if self.test_timeout is not None:
faulthandler.dump_traceback_later(self.test_timeout, exit=True)
# wait for a thread
timeout = max(PROGRESS_UPDATE, PROGRESS_MIN_TIME)
try:
return self.output.get(timeout=timeout)
except queue.Empty:
pass
# display progress
running = get_running(self.workers)
if running and not self.ns.pgo:
print('running: %s' % ', '.join(running), flush=True)
def display_result(self, mp_result):
result = mp_result.result
text = format_test_result(result)
if mp_result.error_msg is not None:
# CHILD_ERROR
text += ' (%s)' % mp_result.error_msg
elif (result.test_time >= PROGRESS_MIN_TIME and not self.ns.pgo):
text += ' (%s)' % format_duration(result.test_time)
running = get_running(self.workers)
if running and not self.ns.pgo:
text += ' -- running: %s' % ', '.join(running)
self.regrtest.display_progress(self.test_index, text)
def _process_result(self, item):
if item[0]:
# Thread got an exception
format_exc = item[1]
print(f"regrtest worker thread failed: {format_exc}",
file=sys.stderr, flush=True)
return True
self.test_index += 1
mp_result = item[1]
self.regrtest.accumulate_result(mp_result.result)
self.display_result(mp_result)
if mp_result.stdout:
print(mp_result.stdout, flush=True)
if mp_result.stderr and not self.ns.pgo:
print(mp_result.stderr, file=sys.stderr, flush=True)
if must_stop(mp_result.result):
return True
return False
def run_tests(self):
self.start_workers()
self.test_index = 0
try:
stop = False
while not stop:
stop = self._runtest()
except BaseException:
self.output.put((None, None, None, None))
raise
while True:
item = self._get_result()
if item is None:
break
stop = self._process_result(item)
if stop:
break
except KeyboardInterrupt:
print()
self.regrtest.interrupted = True
finally:
if self.test_timeout is not None:
faulthandler.cancel_dump_traceback_later()
self.wait_workers()
def run_tests_multiprocess(regrtest):
output = queue.Queue()
pending = MultiprocessIterator(regrtest.tests)
test_timeout = regrtest.ns.timeout
use_timeout = (test_timeout is not None)
workers = [MultiprocessThread(pending, output, regrtest.ns)
for i in range(regrtest.ns.use_mp)]
print("Run tests in parallel using %s child processes"
% len(workers))
for worker in workers:
worker.start()
def get_running(workers):
running = []
for worker in workers:
current_test = worker.current_test
if not current_test:
continue
dt = time.monotonic() - worker.start_time
if dt >= PROGRESS_MIN_TIME:
text = '%s (%s)' % (current_test, format_duration(dt))
running.append(text)
return running
finished = 0
test_index = 1
get_timeout = max(PROGRESS_UPDATE, PROGRESS_MIN_TIME)
try:
while finished < regrtest.ns.use_mp:
if use_timeout:
faulthandler.dump_traceback_later(test_timeout, exit=True)
try:
item = output.get(timeout=get_timeout)
except queue.Empty:
running = get_running(workers)
if running and not regrtest.ns.pgo:
print('running: %s' % ', '.join(running), flush=True)
continue
test, stdout, stderr, result = item
if test is None:
finished += 1
continue
regrtest.accumulate_result(test, result)
# Display progress
ok, test_time, xml_data = result
text = format_test_result(test, ok)
if (ok not in (CHILD_ERROR, INTERRUPTED)
and test_time >= PROGRESS_MIN_TIME
and not regrtest.ns.pgo):
text += ' (%s)' % format_duration(test_time)
elif ok == CHILD_ERROR:
text = '%s (%s)' % (text, test_time)
running = get_running(workers)
if running and not regrtest.ns.pgo:
text += ' -- running: %s' % ', '.join(running)
regrtest.display_progress(test_index, text)
# Copy stdout and stderr from the child process
if stdout:
print(stdout, flush=True)
if stderr and not regrtest.ns.pgo:
print(stderr, file=sys.stderr, flush=True)
if result[0] == INTERRUPTED:
raise KeyboardInterrupt
test_index += 1
except KeyboardInterrupt:
regrtest.interrupted = True
pending.interrupted = True
print()
finally:
if use_timeout:
faulthandler.cancel_dump_traceback_later()
# If tests are interrupted, wait until tests complete
wait_start = time.monotonic()
while True:
running = [worker.current_test for worker in workers]
running = list(filter(bool, running))
if not running:
break
dt = time.monotonic() - wait_start
line = "Waiting for %s (%s tests)" % (', '.join(running), len(running))
if dt >= WAIT_PROGRESS:
line = "%s since %.0f sec" % (line, dt)
print(line, flush=True)
for worker in workers:
worker.join(WAIT_PROGRESS)
MultiprocessRunner(regrtest).run_tests()

View file

@ -9,6 +9,7 @@
import threading
import warnings
from test import support
from test.libregrtest.utils import print_warning
try:
import _multiprocessing, multiprocessing.process
except ImportError:
@ -283,8 +284,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
self.changed = True
restore(original)
if not self.quiet and not self.pgo:
print(f"Warning -- {name} was modified by {self.testname}",
file=sys.stderr, flush=True)
print_warning(f"{name} was modified by {self.testname}")
print(f" Before: {original}\n After: {current} ",
file=sys.stderr, flush=True)
return False

View file

@ -10,8 +10,6 @@
except ImportError:
gc = None
from test.libregrtest.refleak import warm_caches
def setup_tests(ns):
try:
@ -79,10 +77,6 @@ def setup_tests(ns):
if ns.huntrleaks:
unittest.BaseTestSuite._cleanup = False
# Avoid false positives due to various caches
# filling slowly with random data:
warm_caches()
if ns.memlimit is not None:
support.set_memlimit(ns.memlimit)

Some files were not shown because too many files have changed in this diff Show more