mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-06 13:11:15 -07:00
Bump cherrypy from 18.9.0 to 18.10.0 (#2353)
* Bump cherrypy from 18.9.0 to 18.10.0 Bumps [cherrypy](https://github.com/cherrypy/cherrypy) from 18.9.0 to 18.10.0. - [Changelog](https://github.com/cherrypy/cherrypy/blob/main/CHANGES.rst) - [Commits](https://github.com/cherrypy/cherrypy/compare/v18.9.0...v18.10.0) --- updated-dependencies: - dependency-name: cherrypy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update cherrypy==18.10.0 --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci]
This commit is contained in:
parent
5e977c044a
commit
a528f052b9
73 changed files with 1713 additions and 1008 deletions
|
@ -1 +1 @@
|
||||||
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
#-------------------------------------------------------------------
|
#-------------------------------------------------------------------
|
||||||
# tarfile.py
|
# tarfile.py
|
||||||
#-------------------------------------------------------------------
|
#-------------------------------------------------------------------
|
||||||
|
@ -46,7 +45,6 @@ import time
|
||||||
import struct
|
import struct
|
||||||
import copy
|
import copy
|
||||||
import re
|
import re
|
||||||
import warnings
|
|
||||||
|
|
||||||
from .compat.py38 import removesuffix
|
from .compat.py38 import removesuffix
|
||||||
|
|
||||||
|
@ -639,6 +637,10 @@ class _FileInFile(object):
|
||||||
def flush(self):
|
def flush(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mode(self):
|
||||||
|
return 'rb'
|
||||||
|
|
||||||
def readable(self):
|
def readable(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -875,7 +877,7 @@ class TarInfo(object):
|
||||||
pax_headers = ('A dictionary containing key-value pairs of an '
|
pax_headers = ('A dictionary containing key-value pairs of an '
|
||||||
'associated pax extended header.'),
|
'associated pax extended header.'),
|
||||||
sparse = 'Sparse member information.',
|
sparse = 'Sparse member information.',
|
||||||
tarfile = None,
|
_tarfile = None,
|
||||||
_sparse_structs = None,
|
_sparse_structs = None,
|
||||||
_link_target = None,
|
_link_target = None,
|
||||||
)
|
)
|
||||||
|
@ -904,6 +906,24 @@ class TarInfo(object):
|
||||||
self.sparse = None # sparse member information
|
self.sparse = None # sparse member information
|
||||||
self.pax_headers = {} # pax header information
|
self.pax_headers = {} # pax header information
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tarfile(self):
|
||||||
|
import warnings
|
||||||
|
warnings.warn(
|
||||||
|
'The undocumented "tarfile" attribute of TarInfo objects '
|
||||||
|
+ 'is deprecated and will be removed in Python 3.16',
|
||||||
|
DeprecationWarning, stacklevel=2)
|
||||||
|
return self._tarfile
|
||||||
|
|
||||||
|
@tarfile.setter
|
||||||
|
def tarfile(self, tarfile):
|
||||||
|
import warnings
|
||||||
|
warnings.warn(
|
||||||
|
'The undocumented "tarfile" attribute of TarInfo objects '
|
||||||
|
+ 'is deprecated and will be removed in Python 3.16',
|
||||||
|
DeprecationWarning, stacklevel=2)
|
||||||
|
self._tarfile = tarfile
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path(self):
|
def path(self):
|
||||||
'In pax headers, "name" is called "path".'
|
'In pax headers, "name" is called "path".'
|
||||||
|
@ -1198,7 +1218,7 @@ class TarInfo(object):
|
||||||
for keyword, value in pax_headers.items():
|
for keyword, value in pax_headers.items():
|
||||||
keyword = keyword.encode("utf-8")
|
keyword = keyword.encode("utf-8")
|
||||||
if binary:
|
if binary:
|
||||||
# Try to restore the original byte representation of `value'.
|
# Try to restore the original byte representation of 'value'.
|
||||||
# Needless to say, that the encoding must match the string.
|
# Needless to say, that the encoding must match the string.
|
||||||
value = value.encode(encoding, "surrogateescape")
|
value = value.encode(encoding, "surrogateescape")
|
||||||
else:
|
else:
|
||||||
|
@ -1643,14 +1663,14 @@ class TarFile(object):
|
||||||
def __init__(self, name=None, mode="r", fileobj=None, format=None,
|
def __init__(self, name=None, mode="r", fileobj=None, format=None,
|
||||||
tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
|
tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
|
||||||
errors="surrogateescape", pax_headers=None, debug=None,
|
errors="surrogateescape", pax_headers=None, debug=None,
|
||||||
errorlevel=None, copybufsize=None):
|
errorlevel=None, copybufsize=None, stream=False):
|
||||||
"""Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
|
"""Open an (uncompressed) tar archive 'name'. 'mode' is either 'r' to
|
||||||
read from an existing archive, 'a' to append data to an existing
|
read from an existing archive, 'a' to append data to an existing
|
||||||
file or 'w' to create a new file overwriting an existing one. `mode'
|
file or 'w' to create a new file overwriting an existing one. 'mode'
|
||||||
defaults to 'r'.
|
defaults to 'r'.
|
||||||
If `fileobj' is given, it is used for reading or writing data. If it
|
If 'fileobj' is given, it is used for reading or writing data. If it
|
||||||
can be determined, `mode' is overridden by `fileobj's mode.
|
can be determined, 'mode' is overridden by 'fileobj's mode.
|
||||||
`fileobj' is not closed, when TarFile is closed.
|
'fileobj' is not closed, when TarFile is closed.
|
||||||
"""
|
"""
|
||||||
modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
|
modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
|
||||||
if mode not in modes:
|
if mode not in modes:
|
||||||
|
@ -1675,6 +1695,8 @@ class TarFile(object):
|
||||||
self.name = os.path.abspath(name) if name else None
|
self.name = os.path.abspath(name) if name else None
|
||||||
self.fileobj = fileobj
|
self.fileobj = fileobj
|
||||||
|
|
||||||
|
self.stream = stream
|
||||||
|
|
||||||
# Init attributes.
|
# Init attributes.
|
||||||
if format is not None:
|
if format is not None:
|
||||||
self.format = format
|
self.format = format
|
||||||
|
@ -1977,7 +1999,7 @@ class TarFile(object):
|
||||||
self.fileobj.close()
|
self.fileobj.close()
|
||||||
|
|
||||||
def getmember(self, name):
|
def getmember(self, name):
|
||||||
"""Return a TarInfo object for member ``name``. If ``name`` can not be
|
"""Return a TarInfo object for member 'name'. If 'name' can not be
|
||||||
found in the archive, KeyError is raised. If a member occurs more
|
found in the archive, KeyError is raised. If a member occurs more
|
||||||
than once in the archive, its last occurrence is assumed to be the
|
than once in the archive, its last occurrence is assumed to be the
|
||||||
most up-to-date version.
|
most up-to-date version.
|
||||||
|
@ -2005,9 +2027,9 @@ class TarFile(object):
|
||||||
|
|
||||||
def gettarinfo(self, name=None, arcname=None, fileobj=None):
|
def gettarinfo(self, name=None, arcname=None, fileobj=None):
|
||||||
"""Create a TarInfo object from the result of os.stat or equivalent
|
"""Create a TarInfo object from the result of os.stat or equivalent
|
||||||
on an existing file. The file is either named by ``name``, or
|
on an existing file. The file is either named by 'name', or
|
||||||
specified as a file object ``fileobj`` with a file descriptor. If
|
specified as a file object 'fileobj' with a file descriptor. If
|
||||||
given, ``arcname`` specifies an alternative name for the file in the
|
given, 'arcname' specifies an alternative name for the file in the
|
||||||
archive, otherwise, the name is taken from the 'name' attribute of
|
archive, otherwise, the name is taken from the 'name' attribute of
|
||||||
'fileobj', or the 'name' argument. The name should be a text
|
'fileobj', or the 'name' argument. The name should be a text
|
||||||
string.
|
string.
|
||||||
|
@ -2031,7 +2053,7 @@ class TarFile(object):
|
||||||
# Now, fill the TarInfo object with
|
# Now, fill the TarInfo object with
|
||||||
# information specific for the file.
|
# information specific for the file.
|
||||||
tarinfo = self.tarinfo()
|
tarinfo = self.tarinfo()
|
||||||
tarinfo.tarfile = self # Not needed
|
tarinfo._tarfile = self # To be removed in 3.16.
|
||||||
|
|
||||||
# Use os.stat or os.lstat, depending on if symlinks shall be resolved.
|
# Use os.stat or os.lstat, depending on if symlinks shall be resolved.
|
||||||
if fileobj is None:
|
if fileobj is None:
|
||||||
|
@ -2103,11 +2125,15 @@ class TarFile(object):
|
||||||
return tarinfo
|
return tarinfo
|
||||||
|
|
||||||
def list(self, verbose=True, *, members=None):
|
def list(self, verbose=True, *, members=None):
|
||||||
"""Print a table of contents to sys.stdout. If ``verbose`` is False, only
|
"""Print a table of contents to sys.stdout. If 'verbose' is False, only
|
||||||
the names of the members are printed. If it is True, an `ls -l'-like
|
the names of the members are printed. If it is True, an 'ls -l'-like
|
||||||
output is produced. ``members`` is optional and must be a subset of the
|
output is produced. 'members' is optional and must be a subset of the
|
||||||
list returned by getmembers().
|
list returned by getmembers().
|
||||||
"""
|
"""
|
||||||
|
# Convert tarinfo type to stat type.
|
||||||
|
type2mode = {REGTYPE: stat.S_IFREG, SYMTYPE: stat.S_IFLNK,
|
||||||
|
FIFOTYPE: stat.S_IFIFO, CHRTYPE: stat.S_IFCHR,
|
||||||
|
DIRTYPE: stat.S_IFDIR, BLKTYPE: stat.S_IFBLK}
|
||||||
self._check()
|
self._check()
|
||||||
|
|
||||||
if members is None:
|
if members is None:
|
||||||
|
@ -2117,7 +2143,8 @@ class TarFile(object):
|
||||||
if tarinfo.mode is None:
|
if tarinfo.mode is None:
|
||||||
_safe_print("??????????")
|
_safe_print("??????????")
|
||||||
else:
|
else:
|
||||||
_safe_print(stat.filemode(tarinfo.mode))
|
modetype = type2mode.get(tarinfo.type, 0)
|
||||||
|
_safe_print(stat.filemode(modetype | tarinfo.mode))
|
||||||
_safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
|
_safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
|
||||||
tarinfo.gname or tarinfo.gid))
|
tarinfo.gname or tarinfo.gid))
|
||||||
if tarinfo.ischr() or tarinfo.isblk():
|
if tarinfo.ischr() or tarinfo.isblk():
|
||||||
|
@ -2141,11 +2168,11 @@ class TarFile(object):
|
||||||
print()
|
print()
|
||||||
|
|
||||||
def add(self, name, arcname=None, recursive=True, *, filter=None):
|
def add(self, name, arcname=None, recursive=True, *, filter=None):
|
||||||
"""Add the file ``name`` to the archive. ``name`` may be any type of file
|
"""Add the file 'name' to the archive. 'name' may be any type of file
|
||||||
(directory, fifo, symbolic link, etc.). If given, ``arcname``
|
(directory, fifo, symbolic link, etc.). If given, 'arcname'
|
||||||
specifies an alternative name for the file in the archive.
|
specifies an alternative name for the file in the archive.
|
||||||
Directories are added recursively by default. This can be avoided by
|
Directories are added recursively by default. This can be avoided by
|
||||||
setting ``recursive`` to False. ``filter`` is a function
|
setting 'recursive' to False. 'filter' is a function
|
||||||
that expects a TarInfo object argument and returns the changed
|
that expects a TarInfo object argument and returns the changed
|
||||||
TarInfo object, if it returns None the TarInfo object will be
|
TarInfo object, if it returns None the TarInfo object will be
|
||||||
excluded from the archive.
|
excluded from the archive.
|
||||||
|
@ -2192,13 +2219,16 @@ class TarFile(object):
|
||||||
self.addfile(tarinfo)
|
self.addfile(tarinfo)
|
||||||
|
|
||||||
def addfile(self, tarinfo, fileobj=None):
|
def addfile(self, tarinfo, fileobj=None):
|
||||||
"""Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
|
"""Add the TarInfo object 'tarinfo' to the archive. If 'tarinfo' represents
|
||||||
given, it should be a binary file, and tarinfo.size bytes are read
|
a non zero-size regular file, the 'fileobj' argument should be a binary file,
|
||||||
from it and added to the archive. You can create TarInfo objects
|
and tarinfo.size bytes are read from it and added to the archive.
|
||||||
directly, or by using gettarinfo().
|
You can create TarInfo objects directly, or by using gettarinfo().
|
||||||
"""
|
"""
|
||||||
self._check("awx")
|
self._check("awx")
|
||||||
|
|
||||||
|
if fileobj is None and tarinfo.isreg() and tarinfo.size != 0:
|
||||||
|
raise ValueError("fileobj not provided for non zero-size regular file")
|
||||||
|
|
||||||
tarinfo = copy.copy(tarinfo)
|
tarinfo = copy.copy(tarinfo)
|
||||||
|
|
||||||
buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
|
buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
|
||||||
|
@ -2220,11 +2250,12 @@ class TarFile(object):
|
||||||
if filter is None:
|
if filter is None:
|
||||||
filter = self.extraction_filter
|
filter = self.extraction_filter
|
||||||
if filter is None:
|
if filter is None:
|
||||||
|
import warnings
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
'Python 3.14 will, by default, filter extracted tar '
|
'Python 3.14 will, by default, filter extracted tar '
|
||||||
+ 'archives and reject files or modify their metadata. '
|
+ 'archives and reject files or modify their metadata. '
|
||||||
+ 'Use the filter argument to control this behavior.',
|
+ 'Use the filter argument to control this behavior.',
|
||||||
DeprecationWarning)
|
DeprecationWarning, stacklevel=3)
|
||||||
return fully_trusted_filter
|
return fully_trusted_filter
|
||||||
if isinstance(filter, str):
|
if isinstance(filter, str):
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
|
@ -2243,12 +2274,12 @@ class TarFile(object):
|
||||||
filter=None):
|
filter=None):
|
||||||
"""Extract all members from the archive to the current working
|
"""Extract all members from the archive to the current working
|
||||||
directory and set owner, modification time and permissions on
|
directory and set owner, modification time and permissions on
|
||||||
directories afterwards. `path' specifies a different directory
|
directories afterwards. 'path' specifies a different directory
|
||||||
to extract to. `members' is optional and must be a subset of the
|
to extract to. 'members' is optional and must be a subset of the
|
||||||
list returned by getmembers(). If `numeric_owner` is True, only
|
list returned by getmembers(). If 'numeric_owner' is True, only
|
||||||
the numbers for user/group names are used and not the names.
|
the numbers for user/group names are used and not the names.
|
||||||
|
|
||||||
The `filter` function will be called on each member just
|
The 'filter' function will be called on each member just
|
||||||
before extraction.
|
before extraction.
|
||||||
It can return a changed TarInfo or None to skip the member.
|
It can return a changed TarInfo or None to skip the member.
|
||||||
String names of common filters are accepted.
|
String names of common filters are accepted.
|
||||||
|
@ -2288,13 +2319,13 @@ class TarFile(object):
|
||||||
filter=None):
|
filter=None):
|
||||||
"""Extract a member from the archive to the current working directory,
|
"""Extract a member from the archive to the current working directory,
|
||||||
using its full name. Its file information is extracted as accurately
|
using its full name. Its file information is extracted as accurately
|
||||||
as possible. `member' may be a filename or a TarInfo object. You can
|
as possible. 'member' may be a filename or a TarInfo object. You can
|
||||||
specify a different directory using `path'. File attributes (owner,
|
specify a different directory using 'path'. File attributes (owner,
|
||||||
mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
|
mtime, mode) are set unless 'set_attrs' is False. If 'numeric_owner'
|
||||||
is True, only the numbers for user/group names are used and not
|
is True, only the numbers for user/group names are used and not
|
||||||
the names.
|
the names.
|
||||||
|
|
||||||
The `filter` function will be called before extraction.
|
The 'filter' function will be called before extraction.
|
||||||
It can return a changed TarInfo or None to skip the member.
|
It can return a changed TarInfo or None to skip the member.
|
||||||
String names of common filters are accepted.
|
String names of common filters are accepted.
|
||||||
"""
|
"""
|
||||||
|
@ -2359,10 +2390,10 @@ class TarFile(object):
|
||||||
self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
|
self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
|
||||||
|
|
||||||
def extractfile(self, member):
|
def extractfile(self, member):
|
||||||
"""Extract a member from the archive as a file object. ``member`` may be
|
"""Extract a member from the archive as a file object. 'member' may be
|
||||||
a filename or a TarInfo object. If ``member`` is a regular file or
|
a filename or a TarInfo object. If 'member' is a regular file or
|
||||||
a link, an io.BufferedReader object is returned. For all other
|
a link, an io.BufferedReader object is returned. For all other
|
||||||
existing members, None is returned. If ``member`` does not appear
|
existing members, None is returned. If 'member' does not appear
|
||||||
in the archive, KeyError is raised.
|
in the archive, KeyError is raised.
|
||||||
"""
|
"""
|
||||||
self._check("r")
|
self._check("r")
|
||||||
|
@ -2406,7 +2437,7 @@ class TarFile(object):
|
||||||
if upperdirs and not os.path.exists(upperdirs):
|
if upperdirs and not os.path.exists(upperdirs):
|
||||||
# Create directories that are not part of the archive with
|
# Create directories that are not part of the archive with
|
||||||
# default permissions.
|
# default permissions.
|
||||||
os.makedirs(upperdirs)
|
os.makedirs(upperdirs, exist_ok=True)
|
||||||
|
|
||||||
if tarinfo.islnk() or tarinfo.issym():
|
if tarinfo.islnk() or tarinfo.issym():
|
||||||
self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
|
self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
|
||||||
|
@ -2559,7 +2590,8 @@ class TarFile(object):
|
||||||
os.lchown(targetpath, u, g)
|
os.lchown(targetpath, u, g)
|
||||||
else:
|
else:
|
||||||
os.chown(targetpath, u, g)
|
os.chown(targetpath, u, g)
|
||||||
except OSError as e:
|
except (OSError, OverflowError) as e:
|
||||||
|
# OverflowError can be raised if an ID doesn't fit in 'id_t'
|
||||||
raise ExtractError("could not change owner") from e
|
raise ExtractError("could not change owner") from e
|
||||||
|
|
||||||
def chmod(self, tarinfo, targetpath):
|
def chmod(self, tarinfo, targetpath):
|
||||||
|
@ -2642,7 +2674,9 @@ class TarFile(object):
|
||||||
break
|
break
|
||||||
|
|
||||||
if tarinfo is not None:
|
if tarinfo is not None:
|
||||||
self.members.append(tarinfo)
|
# if streaming the file we do not want to cache the tarinfo
|
||||||
|
if not self.stream:
|
||||||
|
self.members.append(tarinfo)
|
||||||
else:
|
else:
|
||||||
self._loaded = True
|
self._loaded = True
|
||||||
|
|
||||||
|
@ -2693,11 +2727,12 @@ class TarFile(object):
|
||||||
|
|
||||||
def _load(self):
|
def _load(self):
|
||||||
"""Read through the entire archive file and look for readable
|
"""Read through the entire archive file and look for readable
|
||||||
members.
|
members. This should not run if the file is set to stream.
|
||||||
"""
|
"""
|
||||||
while self.next() is not None:
|
if not self.stream:
|
||||||
pass
|
while self.next() is not None:
|
||||||
self._loaded = True
|
pass
|
||||||
|
self._loaded = True
|
||||||
|
|
||||||
def _check(self, mode=None):
|
def _check(self, mode=None):
|
||||||
"""Check if TarFile is still open, and if the operation's mode
|
"""Check if TarFile is still open, and if the operation's mode
|
||||||
|
|
|
@ -57,9 +57,11 @@ These API's are described in the `CherryPy specification
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import pkg_resources
|
import importlib.metadata as importlib_metadata
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
# fall back for python <= 3.7
|
||||||
|
# This try/except can be removed with py <= 3.7 support
|
||||||
|
import importlib_metadata
|
||||||
|
|
||||||
from threading import local as _local
|
from threading import local as _local
|
||||||
|
|
||||||
|
@ -109,7 +111,7 @@ tree = _cptree.Tree()
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
__version__ = pkg_resources.require('cherrypy')[0].version
|
__version__ = importlib_metadata.version('cherrypy')
|
||||||
except Exception:
|
except Exception:
|
||||||
__version__ = 'unknown'
|
__version__ = 'unknown'
|
||||||
|
|
||||||
|
@ -181,24 +183,28 @@ def quickstart(root=None, script_name='', config=None):
|
||||||
class _Serving(_local):
|
class _Serving(_local):
|
||||||
"""An interface for registering request and response objects.
|
"""An interface for registering request and response objects.
|
||||||
|
|
||||||
Rather than have a separate "thread local" object for the request and
|
Rather than have a separate "thread local" object for the request
|
||||||
the response, this class works as a single threadlocal container for
|
and the response, this class works as a single threadlocal container
|
||||||
both objects (and any others which developers wish to define). In this
|
for both objects (and any others which developers wish to define).
|
||||||
way, we can easily dump those objects when we stop/start a new HTTP
|
In this way, we can easily dump those objects when we stop/start a
|
||||||
conversation, yet still refer to them as module-level globals in a
|
new HTTP conversation, yet still refer to them as module-level
|
||||||
thread-safe way.
|
globals in a thread-safe way.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
request = _cprequest.Request(_httputil.Host('127.0.0.1', 80),
|
request = _cprequest.Request(_httputil.Host('127.0.0.1', 80),
|
||||||
_httputil.Host('127.0.0.1', 1111))
|
_httputil.Host('127.0.0.1', 1111))
|
||||||
|
"""The request object for the current thread.
|
||||||
|
|
||||||
|
In the main thread, and any threads which are not receiving HTTP
|
||||||
|
requests, this is None.
|
||||||
"""
|
"""
|
||||||
The request object for the current thread. In the main thread,
|
|
||||||
and any threads which are not receiving HTTP requests, this is None."""
|
|
||||||
|
|
||||||
response = _cprequest.Response()
|
response = _cprequest.Response()
|
||||||
|
"""The response object for the current thread.
|
||||||
|
|
||||||
|
In the main thread, and any threads which are not receiving HTTP
|
||||||
|
requests, this is None.
|
||||||
"""
|
"""
|
||||||
The response object for the current thread. In the main thread,
|
|
||||||
and any threads which are not receiving HTTP requests, this is None."""
|
|
||||||
|
|
||||||
def load(self, request, response):
|
def load(self, request, response):
|
||||||
self.request = request
|
self.request = request
|
||||||
|
@ -316,8 +322,8 @@ class _GlobalLogManager(_cplogging.LogManager):
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
"""Log the given message to the app.log or global log.
|
"""Log the given message to the app.log or global log.
|
||||||
|
|
||||||
Log the given message to the app.log or global
|
Log the given message to the app.log or global log as
|
||||||
log as appropriate.
|
appropriate.
|
||||||
"""
|
"""
|
||||||
# Do NOT use try/except here. See
|
# Do NOT use try/except here. See
|
||||||
# https://github.com/cherrypy/cherrypy/issues/945
|
# https://github.com/cherrypy/cherrypy/issues/945
|
||||||
|
@ -330,8 +336,8 @@ class _GlobalLogManager(_cplogging.LogManager):
|
||||||
def access(self):
|
def access(self):
|
||||||
"""Log an access message to the app.log or global log.
|
"""Log an access message to the app.log or global log.
|
||||||
|
|
||||||
Log the given message to the app.log or global
|
Log the given message to the app.log or global log as
|
||||||
log as appropriate.
|
appropriate.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
return request.app.log.access()
|
return request.app.log.access()
|
||||||
|
|
|
@ -313,7 +313,10 @@ class Checker(object):
|
||||||
|
|
||||||
# -------------------- Specific config warnings -------------------- #
|
# -------------------- Specific config warnings -------------------- #
|
||||||
def check_localhost(self):
|
def check_localhost(self):
|
||||||
"""Warn if any socket_host is 'localhost'. See #711."""
|
"""Warn if any socket_host is 'localhost'.
|
||||||
|
|
||||||
|
See #711.
|
||||||
|
"""
|
||||||
for k, v in cherrypy.config.items():
|
for k, v in cherrypy.config.items():
|
||||||
if k == 'server.socket_host' and v == 'localhost':
|
if k == 'server.socket_host' and v == 'localhost':
|
||||||
warnings.warn("The use of 'localhost' as a socket host can "
|
warnings.warn("The use of 'localhost' as a socket host can "
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
"""
|
"""Configuration system for CherryPy.
|
||||||
Configuration system for CherryPy.
|
|
||||||
|
|
||||||
Configuration in CherryPy is implemented via dictionaries. Keys are strings
|
Configuration in CherryPy is implemented via dictionaries. Keys are strings
|
||||||
which name the mapped value, which may be of any type.
|
which name the mapped value, which may be of any type.
|
||||||
|
@ -132,8 +131,8 @@ def _if_filename_register_autoreload(ob):
|
||||||
def merge(base, other):
|
def merge(base, other):
|
||||||
"""Merge one app config (from a dict, file, or filename) into another.
|
"""Merge one app config (from a dict, file, or filename) into another.
|
||||||
|
|
||||||
If the given config is a filename, it will be appended to
|
If the given config is a filename, it will be appended to the list
|
||||||
the list of files to monitor for "autoreload" changes.
|
of files to monitor for "autoreload" changes.
|
||||||
"""
|
"""
|
||||||
_if_filename_register_autoreload(other)
|
_if_filename_register_autoreload(other)
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
"""CherryPy dispatchers.
|
"""CherryPy dispatchers.
|
||||||
|
|
||||||
A 'dispatcher' is the object which looks up the 'page handler' callable
|
A 'dispatcher' is the object which looks up the 'page handler' callable
|
||||||
and collects config for the current request based on the path_info, other
|
and collects config for the current request based on the path_info,
|
||||||
request attributes, and the application architecture. The core calls the
|
other request attributes, and the application architecture. The core
|
||||||
dispatcher as early as possible, passing it a 'path_info' argument.
|
calls the dispatcher as early as possible, passing it a 'path_info'
|
||||||
|
argument.
|
||||||
|
|
||||||
The default dispatcher discovers the page handler by matching path_info
|
The default dispatcher discovers the page handler by matching path_info
|
||||||
to a hierarchical arrangement of objects, starting at request.app.root.
|
to a hierarchical arrangement of objects, starting at request.app.root.
|
||||||
|
@ -21,7 +22,6 @@ import cherrypy
|
||||||
|
|
||||||
|
|
||||||
class PageHandler(object):
|
class PageHandler(object):
|
||||||
|
|
||||||
"""Callable which sets response.body."""
|
"""Callable which sets response.body."""
|
||||||
|
|
||||||
def __init__(self, callable, *args, **kwargs):
|
def __init__(self, callable, *args, **kwargs):
|
||||||
|
@ -64,8 +64,7 @@ class PageHandler(object):
|
||||||
|
|
||||||
|
|
||||||
def test_callable_spec(callable, callable_args, callable_kwargs):
|
def test_callable_spec(callable, callable_args, callable_kwargs):
|
||||||
"""
|
"""Inspect callable and test to see if the given args are suitable for it.
|
||||||
Inspect callable and test to see if the given args are suitable for it.
|
|
||||||
|
|
||||||
When an error occurs during the handler's invoking stage there are 2
|
When an error occurs during the handler's invoking stage there are 2
|
||||||
erroneous cases:
|
erroneous cases:
|
||||||
|
@ -252,16 +251,16 @@ else:
|
||||||
|
|
||||||
|
|
||||||
class Dispatcher(object):
|
class Dispatcher(object):
|
||||||
|
|
||||||
"""CherryPy Dispatcher which walks a tree of objects to find a handler.
|
"""CherryPy Dispatcher which walks a tree of objects to find a handler.
|
||||||
|
|
||||||
The tree is rooted at cherrypy.request.app.root, and each hierarchical
|
The tree is rooted at cherrypy.request.app.root, and each
|
||||||
component in the path_info argument is matched to a corresponding nested
|
hierarchical component in the path_info argument is matched to a
|
||||||
attribute of the root object. Matching handlers must have an 'exposed'
|
corresponding nested attribute of the root object. Matching handlers
|
||||||
attribute which evaluates to True. The special method name "index"
|
must have an 'exposed' attribute which evaluates to True. The
|
||||||
matches a URI which ends in a slash ("/"). The special method name
|
special method name "index" matches a URI which ends in a slash
|
||||||
"default" may match a portion of the path_info (but only when no longer
|
("/"). The special method name "default" may match a portion of the
|
||||||
substring of the path_info matches some other object).
|
path_info (but only when no longer substring of the path_info
|
||||||
|
matches some other object).
|
||||||
|
|
||||||
This is the default, built-in dispatcher for CherryPy.
|
This is the default, built-in dispatcher for CherryPy.
|
||||||
"""
|
"""
|
||||||
|
@ -306,9 +305,9 @@ class Dispatcher(object):
|
||||||
|
|
||||||
The second object returned will be a list of names which are
|
The second object returned will be a list of names which are
|
||||||
'virtual path' components: parts of the URL which are dynamic,
|
'virtual path' components: parts of the URL which are dynamic,
|
||||||
and were not used when looking up the handler.
|
and were not used when looking up the handler. These virtual
|
||||||
These virtual path components are passed to the handler as
|
path components are passed to the handler as positional
|
||||||
positional arguments.
|
arguments.
|
||||||
"""
|
"""
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
app = request.app
|
app = request.app
|
||||||
|
@ -448,13 +447,11 @@ class Dispatcher(object):
|
||||||
|
|
||||||
|
|
||||||
class MethodDispatcher(Dispatcher):
|
class MethodDispatcher(Dispatcher):
|
||||||
|
|
||||||
"""Additional dispatch based on cherrypy.request.method.upper().
|
"""Additional dispatch based on cherrypy.request.method.upper().
|
||||||
|
|
||||||
Methods named GET, POST, etc will be called on an exposed class.
|
Methods named GET, POST, etc will be called on an exposed class. The
|
||||||
The method names must be all caps; the appropriate Allow header
|
method names must be all caps; the appropriate Allow header will be
|
||||||
will be output showing all capitalized method names as allowable
|
output showing all capitalized method names as allowable HTTP verbs.
|
||||||
HTTP verbs.
|
|
||||||
|
|
||||||
Note that the containing class must be exposed, not the methods.
|
Note that the containing class must be exposed, not the methods.
|
||||||
"""
|
"""
|
||||||
|
@ -492,16 +489,14 @@ class MethodDispatcher(Dispatcher):
|
||||||
|
|
||||||
|
|
||||||
class RoutesDispatcher(object):
|
class RoutesDispatcher(object):
|
||||||
|
|
||||||
"""A Routes based dispatcher for CherryPy."""
|
"""A Routes based dispatcher for CherryPy."""
|
||||||
|
|
||||||
def __init__(self, full_result=False, **mapper_options):
|
def __init__(self, full_result=False, **mapper_options):
|
||||||
"""
|
"""Routes dispatcher.
|
||||||
Routes dispatcher
|
|
||||||
|
|
||||||
Set full_result to True if you wish the controller
|
Set full_result to True if you wish the controller and the
|
||||||
and the action to be passed on to the page handler
|
action to be passed on to the page handler parameters. By
|
||||||
parameters. By default they won't be.
|
default they won't be.
|
||||||
"""
|
"""
|
||||||
import routes
|
import routes
|
||||||
self.full_result = full_result
|
self.full_result = full_result
|
||||||
|
@ -617,8 +612,7 @@ def XMLRPCDispatcher(next_dispatcher=Dispatcher()):
|
||||||
|
|
||||||
def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True,
|
def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True,
|
||||||
**domains):
|
**domains):
|
||||||
"""
|
"""Select a different handler based on the Host header.
|
||||||
Select a different handler based on the Host header.
|
|
||||||
|
|
||||||
This can be useful when running multiple sites within one CP server.
|
This can be useful when running multiple sites within one CP server.
|
||||||
It allows several domains to point to different parts of a single
|
It allows several domains to point to different parts of a single
|
||||||
|
|
|
@ -136,19 +136,17 @@ from cherrypy.lib import httputil as _httputil
|
||||||
|
|
||||||
|
|
||||||
class CherryPyException(Exception):
|
class CherryPyException(Exception):
|
||||||
|
|
||||||
"""A base class for CherryPy exceptions."""
|
"""A base class for CherryPy exceptions."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InternalRedirect(CherryPyException):
|
class InternalRedirect(CherryPyException):
|
||||||
|
|
||||||
"""Exception raised to switch to the handler for a different URL.
|
"""Exception raised to switch to the handler for a different URL.
|
||||||
|
|
||||||
This exception will redirect processing to another path within the site
|
This exception will redirect processing to another path within the
|
||||||
(without informing the client). Provide the new path as an argument when
|
site (without informing the client). Provide the new path as an
|
||||||
raising the exception. Provide any params in the querystring for the new
|
argument when raising the exception. Provide any params in the
|
||||||
URL.
|
querystring for the new URL.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, path, query_string=''):
|
def __init__(self, path, query_string=''):
|
||||||
|
@ -173,7 +171,6 @@ class InternalRedirect(CherryPyException):
|
||||||
|
|
||||||
|
|
||||||
class HTTPRedirect(CherryPyException):
|
class HTTPRedirect(CherryPyException):
|
||||||
|
|
||||||
"""Exception raised when the request should be redirected.
|
"""Exception raised when the request should be redirected.
|
||||||
|
|
||||||
This exception will force a HTTP redirect to the URL or URL's you give it.
|
This exception will force a HTTP redirect to the URL or URL's you give it.
|
||||||
|
@ -202,7 +199,7 @@ class HTTPRedirect(CherryPyException):
|
||||||
"""The list of URL's to emit."""
|
"""The list of URL's to emit."""
|
||||||
|
|
||||||
encoding = 'utf-8'
|
encoding = 'utf-8'
|
||||||
"""The encoding when passed urls are not native strings"""
|
"""The encoding when passed urls are not native strings."""
|
||||||
|
|
||||||
def __init__(self, urls, status=None, encoding=None):
|
def __init__(self, urls, status=None, encoding=None):
|
||||||
self.urls = abs_urls = [
|
self.urls = abs_urls = [
|
||||||
|
@ -230,8 +227,7 @@ class HTTPRedirect(CherryPyException):
|
||||||
|
|
||||||
@classproperty
|
@classproperty
|
||||||
def default_status(cls):
|
def default_status(cls):
|
||||||
"""
|
"""The default redirect status for the request.
|
||||||
The default redirect status for the request.
|
|
||||||
|
|
||||||
RFC 2616 indicates a 301 response code fits our goal; however,
|
RFC 2616 indicates a 301 response code fits our goal; however,
|
||||||
browser support for 301 is quite messy. Use 302/303 instead. See
|
browser support for 301 is quite messy. Use 302/303 instead. See
|
||||||
|
@ -249,8 +245,9 @@ class HTTPRedirect(CherryPyException):
|
||||||
"""Modify cherrypy.response status, headers, and body to represent
|
"""Modify cherrypy.response status, headers, and body to represent
|
||||||
self.
|
self.
|
||||||
|
|
||||||
CherryPy uses this internally, but you can also use it to create an
|
CherryPy uses this internally, but you can also use it to create
|
||||||
HTTPRedirect object and set its output without *raising* the exception.
|
an HTTPRedirect object and set its output without *raising* the
|
||||||
|
exception.
|
||||||
"""
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
response.status = status = self.status
|
response.status = status = self.status
|
||||||
|
@ -339,7 +336,6 @@ def clean_headers(status):
|
||||||
|
|
||||||
|
|
||||||
class HTTPError(CherryPyException):
|
class HTTPError(CherryPyException):
|
||||||
|
|
||||||
"""Exception used to return an HTTP error code (4xx-5xx) to the client.
|
"""Exception used to return an HTTP error code (4xx-5xx) to the client.
|
||||||
|
|
||||||
This exception can be used to automatically send a response using a
|
This exception can be used to automatically send a response using a
|
||||||
|
@ -358,7 +354,9 @@ class HTTPError(CherryPyException):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
status = None
|
status = None
|
||||||
"""The HTTP status code. May be of type int or str (with a Reason-Phrase).
|
"""The HTTP status code.
|
||||||
|
|
||||||
|
May be of type int or str (with a Reason-Phrase).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
code = None
|
code = None
|
||||||
|
@ -386,8 +384,9 @@ class HTTPError(CherryPyException):
|
||||||
"""Modify cherrypy.response status, headers, and body to represent
|
"""Modify cherrypy.response status, headers, and body to represent
|
||||||
self.
|
self.
|
||||||
|
|
||||||
CherryPy uses this internally, but you can also use it to create an
|
CherryPy uses this internally, but you can also use it to create
|
||||||
HTTPError object and set its output without *raising* the exception.
|
an HTTPError object and set its output without *raising* the
|
||||||
|
exception.
|
||||||
"""
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
|
|
||||||
|
@ -426,11 +425,10 @@ class HTTPError(CherryPyException):
|
||||||
|
|
||||||
|
|
||||||
class NotFound(HTTPError):
|
class NotFound(HTTPError):
|
||||||
|
|
||||||
"""Exception raised when a URL could not be mapped to any handler (404).
|
"""Exception raised when a URL could not be mapped to any handler (404).
|
||||||
|
|
||||||
This is equivalent to raising
|
This is equivalent to raising :class:`HTTPError("404 Not Found")
|
||||||
:class:`HTTPError("404 Not Found") <cherrypy._cperror.HTTPError>`.
|
<cherrypy._cperror.HTTPError>`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, path=None):
|
def __init__(self, path=None):
|
||||||
|
@ -477,8 +475,8 @@ _HTTPErrorTemplate = '''<!DOCTYPE html PUBLIC
|
||||||
def get_error_page(status, **kwargs):
|
def get_error_page(status, **kwargs):
|
||||||
"""Return an HTML page, containing a pretty error response.
|
"""Return an HTML page, containing a pretty error response.
|
||||||
|
|
||||||
status should be an int or a str.
|
status should be an int or a str. kwargs will be interpolated into
|
||||||
kwargs will be interpolated into the page template.
|
the page template.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
code, reason, message = _httputil.valid_status(status)
|
code, reason, message = _httputil.valid_status(status)
|
||||||
|
@ -595,8 +593,8 @@ def bare_error(extrabody=None):
|
||||||
"""Produce status, headers, body for a critical error.
|
"""Produce status, headers, body for a critical error.
|
||||||
|
|
||||||
Returns a triple without calling any other questionable functions,
|
Returns a triple without calling any other questionable functions,
|
||||||
so it should be as error-free as possible. Call it from an HTTP server
|
so it should be as error-free as possible. Call it from an HTTP
|
||||||
if you get errors outside of the request.
|
server if you get errors outside of the request.
|
||||||
|
|
||||||
If extrabody is None, a friendly but rather unhelpful error message
|
If extrabody is None, a friendly but rather unhelpful error message
|
||||||
is set in the body. If extrabody is a string, it will be appended
|
is set in the body. If extrabody is a string, it will be appended
|
||||||
|
|
|
@ -123,7 +123,6 @@ logfmt = logging.Formatter('%(message)s')
|
||||||
|
|
||||||
|
|
||||||
class NullHandler(logging.Handler):
|
class NullHandler(logging.Handler):
|
||||||
|
|
||||||
"""A no-op logging handler to silence the logging.lastResort handler."""
|
"""A no-op logging handler to silence the logging.lastResort handler."""
|
||||||
|
|
||||||
def handle(self, record):
|
def handle(self, record):
|
||||||
|
@ -137,15 +136,16 @@ class NullHandler(logging.Handler):
|
||||||
|
|
||||||
|
|
||||||
class LogManager(object):
|
class LogManager(object):
|
||||||
|
|
||||||
"""An object to assist both simple and advanced logging.
|
"""An object to assist both simple and advanced logging.
|
||||||
|
|
||||||
``cherrypy.log`` is an instance of this class.
|
``cherrypy.log`` is an instance of this class.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
appid = None
|
appid = None
|
||||||
"""The id() of the Application object which owns this log manager. If this
|
"""The id() of the Application object which owns this log manager.
|
||||||
is a global log manager, appid is None."""
|
|
||||||
|
If this is a global log manager, appid is None.
|
||||||
|
"""
|
||||||
|
|
||||||
error_log = None
|
error_log = None
|
||||||
"""The actual :class:`logging.Logger` instance for error messages."""
|
"""The actual :class:`logging.Logger` instance for error messages."""
|
||||||
|
@ -317,8 +317,8 @@ class LogManager(object):
|
||||||
def screen(self):
|
def screen(self):
|
||||||
"""Turn stderr/stdout logging on or off.
|
"""Turn stderr/stdout logging on or off.
|
||||||
|
|
||||||
If you set this to True, it'll add the appropriate StreamHandler for
|
If you set this to True, it'll add the appropriate StreamHandler
|
||||||
you. If you set it to False, it will remove the handler.
|
for you. If you set it to False, it will remove the handler.
|
||||||
"""
|
"""
|
||||||
h = self._get_builtin_handler
|
h = self._get_builtin_handler
|
||||||
has_h = h(self.error_log, 'screen') or h(self.access_log, 'screen')
|
has_h = h(self.error_log, 'screen') or h(self.access_log, 'screen')
|
||||||
|
@ -414,7 +414,6 @@ class LogManager(object):
|
||||||
|
|
||||||
|
|
||||||
class WSGIErrorHandler(logging.Handler):
|
class WSGIErrorHandler(logging.Handler):
|
||||||
|
|
||||||
"A handler class which writes logging records to environ['wsgi.errors']."
|
"A handler class which writes logging records to environ['wsgi.errors']."
|
||||||
|
|
||||||
def flush(self):
|
def flush(self):
|
||||||
|
@ -452,6 +451,8 @@ class WSGIErrorHandler(logging.Handler):
|
||||||
|
|
||||||
class LazyRfc3339UtcTime(object):
|
class LazyRfc3339UtcTime(object):
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Return utcnow() in RFC3339 UTC Format."""
|
"""Return datetime in RFC3339 UTC Format."""
|
||||||
iso_formatted_now = datetime.datetime.utcnow().isoformat('T')
|
iso_formatted_now = datetime.datetime.now(
|
||||||
|
datetime.timezone.utc,
|
||||||
|
).isoformat('T')
|
||||||
return f'{iso_formatted_now!s}Z'
|
return f'{iso_formatted_now!s}Z'
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
"""Native adapter for serving CherryPy via mod_python
|
"""Native adapter for serving CherryPy via mod_python.
|
||||||
|
|
||||||
Basic usage:
|
Basic usage:
|
||||||
|
|
||||||
|
|
|
@ -120,10 +120,10 @@ class NativeGateway(cheroot.server.Gateway):
|
||||||
class CPHTTPServer(cheroot.server.HTTPServer):
|
class CPHTTPServer(cheroot.server.HTTPServer):
|
||||||
"""Wrapper for cheroot.server.HTTPServer.
|
"""Wrapper for cheroot.server.HTTPServer.
|
||||||
|
|
||||||
cheroot has been designed to not reference CherryPy in any way,
|
cheroot has been designed to not reference CherryPy in any way, so
|
||||||
so that it can be used in other frameworks and applications.
|
that it can be used in other frameworks and applications. Therefore,
|
||||||
Therefore, we wrap it here, so we can apply some attributes
|
we wrap it here, so we can apply some attributes from config ->
|
||||||
from config -> cherrypy.server -> HTTPServer.
|
cherrypy.server -> HTTPServer.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, server_adapter=cherrypy.server):
|
def __init__(self, server_adapter=cherrypy.server):
|
||||||
|
|
|
@ -248,7 +248,10 @@ def process_multipart_form_data(entity):
|
||||||
|
|
||||||
|
|
||||||
def _old_process_multipart(entity):
|
def _old_process_multipart(entity):
|
||||||
"""The behavior of 3.2 and lower. Deprecated and will be changed in 3.3."""
|
"""The behavior of 3.2 and lower.
|
||||||
|
|
||||||
|
Deprecated and will be changed in 3.3.
|
||||||
|
"""
|
||||||
process_multipart(entity)
|
process_multipart(entity)
|
||||||
|
|
||||||
params = entity.params
|
params = entity.params
|
||||||
|
@ -277,7 +280,6 @@ def _old_process_multipart(entity):
|
||||||
|
|
||||||
# -------------------------------- Entities --------------------------------- #
|
# -------------------------------- Entities --------------------------------- #
|
||||||
class Entity(object):
|
class Entity(object):
|
||||||
|
|
||||||
"""An HTTP request body, or MIME multipart body.
|
"""An HTTP request body, or MIME multipart body.
|
||||||
|
|
||||||
This class collects information about the HTTP request entity. When a
|
This class collects information about the HTTP request entity. When a
|
||||||
|
@ -346,13 +348,15 @@ class Entity(object):
|
||||||
content_type = None
|
content_type = None
|
||||||
"""The value of the Content-Type request header.
|
"""The value of the Content-Type request header.
|
||||||
|
|
||||||
If the Entity is part of a multipart payload, this will be the Content-Type
|
If the Entity is part of a multipart payload, this will be the
|
||||||
given in the MIME headers for this part.
|
Content-Type given in the MIME headers for this part.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
default_content_type = 'application/x-www-form-urlencoded'
|
default_content_type = 'application/x-www-form-urlencoded'
|
||||||
"""This defines a default ``Content-Type`` to use if no Content-Type header
|
"""This defines a default ``Content-Type`` to use if no Content-Type header
|
||||||
is given. The empty string is used for RequestBody, which results in the
|
is given.
|
||||||
|
|
||||||
|
The empty string is used for RequestBody, which results in the
|
||||||
request body not being read or parsed at all. This is by design; a missing
|
request body not being read or parsed at all. This is by design; a missing
|
||||||
``Content-Type`` header in the HTTP request entity is an error at best,
|
``Content-Type`` header in the HTTP request entity is an error at best,
|
||||||
and a security hole at worst. For multipart parts, however, the MIME spec
|
and a security hole at worst. For multipart parts, however, the MIME spec
|
||||||
|
@ -402,8 +406,8 @@ class Entity(object):
|
||||||
part_class = None
|
part_class = None
|
||||||
"""The class used for multipart parts.
|
"""The class used for multipart parts.
|
||||||
|
|
||||||
You can replace this with custom subclasses to alter the processing of
|
You can replace this with custom subclasses to alter the processing
|
||||||
multipart parts.
|
of multipart parts.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, fp, headers, params=None, parts=None):
|
def __init__(self, fp, headers, params=None, parts=None):
|
||||||
|
@ -509,7 +513,8 @@ class Entity(object):
|
||||||
"""Return a file-like object into which the request body will be read.
|
"""Return a file-like object into which the request body will be read.
|
||||||
|
|
||||||
By default, this will return a TemporaryFile. Override as needed.
|
By default, this will return a TemporaryFile. Override as needed.
|
||||||
See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`."""
|
See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`.
|
||||||
|
"""
|
||||||
return tempfile.TemporaryFile()
|
return tempfile.TemporaryFile()
|
||||||
|
|
||||||
def fullvalue(self):
|
def fullvalue(self):
|
||||||
|
@ -525,7 +530,7 @@ class Entity(object):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def decode_entity(self, value):
|
def decode_entity(self, value):
|
||||||
"""Return a given byte encoded value as a string"""
|
"""Return a given byte encoded value as a string."""
|
||||||
for charset in self.attempt_charsets:
|
for charset in self.attempt_charsets:
|
||||||
try:
|
try:
|
||||||
value = value.decode(charset)
|
value = value.decode(charset)
|
||||||
|
@ -569,7 +574,6 @@ class Entity(object):
|
||||||
|
|
||||||
|
|
||||||
class Part(Entity):
|
class Part(Entity):
|
||||||
|
|
||||||
"""A MIME part entity, part of a multipart entity."""
|
"""A MIME part entity, part of a multipart entity."""
|
||||||
|
|
||||||
# "The default character set, which must be assumed in the absence of a
|
# "The default character set, which must be assumed in the absence of a
|
||||||
|
@ -653,8 +657,8 @@ class Part(Entity):
|
||||||
def read_lines_to_boundary(self, fp_out=None):
|
def read_lines_to_boundary(self, fp_out=None):
|
||||||
"""Read bytes from self.fp and return or write them to a file.
|
"""Read bytes from self.fp and return or write them to a file.
|
||||||
|
|
||||||
If the 'fp_out' argument is None (the default), all bytes read are
|
If the 'fp_out' argument is None (the default), all bytes read
|
||||||
returned in a single byte string.
|
are returned in a single byte string.
|
||||||
|
|
||||||
If the 'fp_out' argument is not None, it must be a file-like
|
If the 'fp_out' argument is not None, it must be a file-like
|
||||||
object that supports the 'write' method; all bytes read will be
|
object that supports the 'write' method; all bytes read will be
|
||||||
|
@ -755,15 +759,15 @@ class SizedReader:
|
||||||
def read(self, size=None, fp_out=None):
|
def read(self, size=None, fp_out=None):
|
||||||
"""Read bytes from the request body and return or write them to a file.
|
"""Read bytes from the request body and return or write them to a file.
|
||||||
|
|
||||||
A number of bytes less than or equal to the 'size' argument are read
|
A number of bytes less than or equal to the 'size' argument are
|
||||||
off the socket. The actual number of bytes read are tracked in
|
read off the socket. The actual number of bytes read are tracked
|
||||||
self.bytes_read. The number may be smaller than 'size' when 1) the
|
in self.bytes_read. The number may be smaller than 'size' when
|
||||||
client sends fewer bytes, 2) the 'Content-Length' request header
|
1) the client sends fewer bytes, 2) the 'Content-Length' request
|
||||||
specifies fewer bytes than requested, or 3) the number of bytes read
|
header specifies fewer bytes than requested, or 3) the number of
|
||||||
exceeds self.maxbytes (in which case, 413 is raised).
|
bytes read exceeds self.maxbytes (in which case, 413 is raised).
|
||||||
|
|
||||||
If the 'fp_out' argument is None (the default), all bytes read are
|
If the 'fp_out' argument is None (the default), all bytes read
|
||||||
returned in a single byte string.
|
are returned in a single byte string.
|
||||||
|
|
||||||
If the 'fp_out' argument is not None, it must be a file-like
|
If the 'fp_out' argument is not None, it must be a file-like
|
||||||
object that supports the 'write' method; all bytes read will be
|
object that supports the 'write' method; all bytes read will be
|
||||||
|
@ -918,7 +922,6 @@ class SizedReader:
|
||||||
|
|
||||||
|
|
||||||
class RequestBody(Entity):
|
class RequestBody(Entity):
|
||||||
|
|
||||||
"""The entity of the HTTP request."""
|
"""The entity of the HTTP request."""
|
||||||
|
|
||||||
bufsize = 8 * 1024
|
bufsize = 8 * 1024
|
||||||
|
|
|
@ -16,7 +16,6 @@ from cherrypy.lib import httputil, reprconf, encoding
|
||||||
|
|
||||||
|
|
||||||
class Hook(object):
|
class Hook(object):
|
||||||
|
|
||||||
"""A callback and its metadata: failsafe, priority, and kwargs."""
|
"""A callback and its metadata: failsafe, priority, and kwargs."""
|
||||||
|
|
||||||
callback = None
|
callback = None
|
||||||
|
@ -30,10 +29,12 @@ class Hook(object):
|
||||||
from the same call point raise exceptions."""
|
from the same call point raise exceptions."""
|
||||||
|
|
||||||
priority = 50
|
priority = 50
|
||||||
|
"""Defines the order of execution for a list of Hooks.
|
||||||
|
|
||||||
|
Priority numbers should be limited to the closed interval [0, 100],
|
||||||
|
but values outside this range are acceptable, as are fractional
|
||||||
|
values.
|
||||||
"""
|
"""
|
||||||
Defines the order of execution for a list of Hooks. Priority numbers
|
|
||||||
should be limited to the closed interval [0, 100], but values outside
|
|
||||||
this range are acceptable, as are fractional values."""
|
|
||||||
|
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
"""
|
"""
|
||||||
|
@ -74,7 +75,6 @@ class Hook(object):
|
||||||
|
|
||||||
|
|
||||||
class HookMap(dict):
|
class HookMap(dict):
|
||||||
|
|
||||||
"""A map of call points to lists of callbacks (Hook objects)."""
|
"""A map of call points to lists of callbacks (Hook objects)."""
|
||||||
|
|
||||||
def __new__(cls, points=None):
|
def __new__(cls, points=None):
|
||||||
|
@ -190,23 +190,23 @@ hookpoints = ['on_start_resource', 'before_request_body',
|
||||||
|
|
||||||
|
|
||||||
class Request(object):
|
class Request(object):
|
||||||
|
|
||||||
"""An HTTP request.
|
"""An HTTP request.
|
||||||
|
|
||||||
This object represents the metadata of an HTTP request message;
|
This object represents the metadata of an HTTP request message; that
|
||||||
that is, it contains attributes which describe the environment
|
is, it contains attributes which describe the environment in which
|
||||||
in which the request URL, headers, and body were sent (if you
|
the request URL, headers, and body were sent (if you want tools to
|
||||||
want tools to interpret the headers and body, those are elsewhere,
|
interpret the headers and body, those are elsewhere, mostly in
|
||||||
mostly in Tools). This 'metadata' consists of socket data,
|
Tools). This 'metadata' consists of socket data, transport
|
||||||
transport characteristics, and the Request-Line. This object
|
characteristics, and the Request-Line. This object also contains
|
||||||
also contains data regarding the configuration in effect for
|
data regarding the configuration in effect for the given URL, and
|
||||||
the given URL, and the execution plan for generating a response.
|
the execution plan for generating a response.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
prev = None
|
prev = None
|
||||||
|
"""The previous Request object (if any).
|
||||||
|
|
||||||
|
This should be None unless we are processing an InternalRedirect.
|
||||||
"""
|
"""
|
||||||
The previous Request object (if any). This should be None
|
|
||||||
unless we are processing an InternalRedirect."""
|
|
||||||
|
|
||||||
# Conversation/connection attributes
|
# Conversation/connection attributes
|
||||||
local = httputil.Host('127.0.0.1', 80)
|
local = httputil.Host('127.0.0.1', 80)
|
||||||
|
@ -216,9 +216,10 @@ class Request(object):
|
||||||
'An httputil.Host(ip, port, hostname) object for the client socket.'
|
'An httputil.Host(ip, port, hostname) object for the client socket.'
|
||||||
|
|
||||||
scheme = 'http'
|
scheme = 'http'
|
||||||
|
"""The protocol used between client and server.
|
||||||
|
|
||||||
|
In most cases, this will be either 'http' or 'https'.
|
||||||
"""
|
"""
|
||||||
The protocol used between client and server. In most cases,
|
|
||||||
this will be either 'http' or 'https'."""
|
|
||||||
|
|
||||||
server_protocol = 'HTTP/1.1'
|
server_protocol = 'HTTP/1.1'
|
||||||
"""
|
"""
|
||||||
|
@ -227,25 +228,30 @@ class Request(object):
|
||||||
|
|
||||||
base = ''
|
base = ''
|
||||||
"""The (scheme://host) portion of the requested URL.
|
"""The (scheme://host) portion of the requested URL.
|
||||||
|
|
||||||
In some cases (e.g. when proxying via mod_rewrite), this may contain
|
In some cases (e.g. when proxying via mod_rewrite), this may contain
|
||||||
path segments which cherrypy.url uses when constructing url's, but
|
path segments which cherrypy.url uses when constructing url's, but
|
||||||
which otherwise are ignored by CherryPy. Regardless, this value
|
which otherwise are ignored by CherryPy. Regardless, this value MUST
|
||||||
MUST NOT end in a slash."""
|
NOT end in a slash.
|
||||||
|
"""
|
||||||
|
|
||||||
# Request-Line attributes
|
# Request-Line attributes
|
||||||
request_line = ''
|
request_line = ''
|
||||||
|
"""The complete Request-Line received from the client.
|
||||||
|
|
||||||
|
This is a single string consisting of the request method, URI, and
|
||||||
|
protocol version (joined by spaces). Any final CRLF is removed.
|
||||||
"""
|
"""
|
||||||
The complete Request-Line received from the client. This is a
|
|
||||||
single string consisting of the request method, URI, and protocol
|
|
||||||
version (joined by spaces). Any final CRLF is removed."""
|
|
||||||
|
|
||||||
method = 'GET'
|
method = 'GET'
|
||||||
|
"""Indicates the HTTP method to be performed on the resource identified by
|
||||||
|
the Request-URI.
|
||||||
|
|
||||||
|
Common methods include GET, HEAD, POST, PUT, and DELETE. CherryPy
|
||||||
|
allows any extension method; however, various HTTP servers and
|
||||||
|
gateways may restrict the set of allowable methods. CherryPy
|
||||||
|
applications SHOULD restrict the set (on a per-URI basis).
|
||||||
"""
|
"""
|
||||||
Indicates the HTTP method to be performed on the resource identified
|
|
||||||
by the Request-URI. Common methods include GET, HEAD, POST, PUT, and
|
|
||||||
DELETE. CherryPy allows any extension method; however, various HTTP
|
|
||||||
servers and gateways may restrict the set of allowable methods.
|
|
||||||
CherryPy applications SHOULD restrict the set (on a per-URI basis)."""
|
|
||||||
|
|
||||||
query_string = ''
|
query_string = ''
|
||||||
"""
|
"""
|
||||||
|
@ -277,22 +283,26 @@ class Request(object):
|
||||||
A dict which combines query string (GET) and request entity (POST)
|
A dict which combines query string (GET) and request entity (POST)
|
||||||
variables. This is populated in two stages: GET params are added
|
variables. This is populated in two stages: GET params are added
|
||||||
before the 'on_start_resource' hook, and POST params are added
|
before the 'on_start_resource' hook, and POST params are added
|
||||||
between the 'before_request_body' and 'before_handler' hooks."""
|
between the 'before_request_body' and 'before_handler' hooks.
|
||||||
|
"""
|
||||||
|
|
||||||
# Message attributes
|
# Message attributes
|
||||||
header_list = []
|
header_list = []
|
||||||
|
"""A list of the HTTP request headers as (name, value) tuples.
|
||||||
|
|
||||||
|
In general, you should use request.headers (a dict) instead.
|
||||||
"""
|
"""
|
||||||
A list of the HTTP request headers as (name, value) tuples.
|
|
||||||
In general, you should use request.headers (a dict) instead."""
|
|
||||||
|
|
||||||
headers = httputil.HeaderMap()
|
headers = httputil.HeaderMap()
|
||||||
"""
|
"""A dict-like object containing the request headers.
|
||||||
A dict-like object containing the request headers. Keys are header
|
|
||||||
|
Keys are header
|
||||||
names (in Title-Case format); however, you may get and set them in
|
names (in Title-Case format); however, you may get and set them in
|
||||||
a case-insensitive manner. That is, headers['Content-Type'] and
|
a case-insensitive manner. That is, headers['Content-Type'] and
|
||||||
headers['content-type'] refer to the same value. Values are header
|
headers['content-type'] refer to the same value. Values are header
|
||||||
values (decoded according to :rfc:`2047` if necessary). See also:
|
values (decoded according to :rfc:`2047` if necessary). See also:
|
||||||
httputil.HeaderMap, httputil.HeaderElement."""
|
httputil.HeaderMap, httputil.HeaderElement.
|
||||||
|
"""
|
||||||
|
|
||||||
cookie = SimpleCookie()
|
cookie = SimpleCookie()
|
||||||
"""See help(Cookie)."""
|
"""See help(Cookie)."""
|
||||||
|
@ -336,7 +346,8 @@ class Request(object):
|
||||||
or multipart, this will be None. Otherwise, this will be an instance
|
or multipart, this will be None. Otherwise, this will be an instance
|
||||||
of :class:`RequestBody<cherrypy._cpreqbody.RequestBody>` (which you
|
of :class:`RequestBody<cherrypy._cpreqbody.RequestBody>` (which you
|
||||||
can .read()); this value is set between the 'before_request_body' and
|
can .read()); this value is set between the 'before_request_body' and
|
||||||
'before_handler' hooks (assuming that process_request_body is True)."""
|
'before_handler' hooks (assuming that process_request_body is True).
|
||||||
|
"""
|
||||||
|
|
||||||
# Dispatch attributes
|
# Dispatch attributes
|
||||||
dispatch = cherrypy.dispatch.Dispatcher()
|
dispatch = cherrypy.dispatch.Dispatcher()
|
||||||
|
@ -347,23 +358,24 @@ class Request(object):
|
||||||
calls the dispatcher as early as possible, passing it a 'path_info'
|
calls the dispatcher as early as possible, passing it a 'path_info'
|
||||||
argument.
|
argument.
|
||||||
|
|
||||||
The default dispatcher discovers the page handler by matching path_info
|
The default dispatcher discovers the page handler by matching
|
||||||
to a hierarchical arrangement of objects, starting at request.app.root.
|
path_info to a hierarchical arrangement of objects, starting at
|
||||||
See help(cherrypy.dispatch) for more information."""
|
request.app.root. See help(cherrypy.dispatch) for more information.
|
||||||
|
"""
|
||||||
|
|
||||||
script_name = ''
|
script_name = ''
|
||||||
"""
|
"""The 'mount point' of the application which is handling this request.
|
||||||
The 'mount point' of the application which is handling this request.
|
|
||||||
|
|
||||||
This attribute MUST NOT end in a slash. If the script_name refers to
|
This attribute MUST NOT end in a slash. If the script_name refers to
|
||||||
the root of the URI, it MUST be an empty string (not "/").
|
the root of the URI, it MUST be an empty string (not "/").
|
||||||
"""
|
"""
|
||||||
|
|
||||||
path_info = '/'
|
path_info = '/'
|
||||||
|
"""The 'relative path' portion of the Request-URI.
|
||||||
|
|
||||||
|
This is relative to the script_name ('mount point') of the
|
||||||
|
application which is handling this request.
|
||||||
"""
|
"""
|
||||||
The 'relative path' portion of the Request-URI. This is relative
|
|
||||||
to the script_name ('mount point') of the application which is
|
|
||||||
handling this request."""
|
|
||||||
|
|
||||||
login = None
|
login = None
|
||||||
"""
|
"""
|
||||||
|
@ -391,14 +403,16 @@ class Request(object):
|
||||||
of the form: {Toolbox.namespace: {Tool.name: config dict}}."""
|
of the form: {Toolbox.namespace: {Tool.name: config dict}}."""
|
||||||
|
|
||||||
config = None
|
config = None
|
||||||
|
"""A flat dict of all configuration entries which apply to the current
|
||||||
|
request.
|
||||||
|
|
||||||
|
These entries are collected from global config, application config
|
||||||
|
(based on request.path_info), and from handler config (exactly how
|
||||||
|
is governed by the request.dispatch object in effect for this
|
||||||
|
request; by default, handler config can be attached anywhere in the
|
||||||
|
tree between request.app.root and the final handler, and inherits
|
||||||
|
downward).
|
||||||
"""
|
"""
|
||||||
A flat dict of all configuration entries which apply to the
|
|
||||||
current request. These entries are collected from global config,
|
|
||||||
application config (based on request.path_info), and from handler
|
|
||||||
config (exactly how is governed by the request.dispatch object in
|
|
||||||
effect for this request; by default, handler config can be attached
|
|
||||||
anywhere in the tree between request.app.root and the final handler,
|
|
||||||
and inherits downward)."""
|
|
||||||
|
|
||||||
is_index = None
|
is_index = None
|
||||||
"""
|
"""
|
||||||
|
@ -409,13 +423,14 @@ class Request(object):
|
||||||
the trailing slash. See cherrypy.tools.trailing_slash."""
|
the trailing slash. See cherrypy.tools.trailing_slash."""
|
||||||
|
|
||||||
hooks = HookMap(hookpoints)
|
hooks = HookMap(hookpoints)
|
||||||
"""
|
"""A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}.
|
||||||
A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}.
|
|
||||||
Each key is a str naming the hook point, and each value is a list
|
Each key is a str naming the hook point, and each value is a list
|
||||||
of hooks which will be called at that hook point during this request.
|
of hooks which will be called at that hook point during this request.
|
||||||
The list of hooks is generally populated as early as possible (mostly
|
The list of hooks is generally populated as early as possible (mostly
|
||||||
from Tools specified in config), but may be extended at any time.
|
from Tools specified in config), but may be extended at any time.
|
||||||
See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools."""
|
See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools.
|
||||||
|
"""
|
||||||
|
|
||||||
error_response = cherrypy.HTTPError(500).set_response
|
error_response = cherrypy.HTTPError(500).set_response
|
||||||
"""
|
"""
|
||||||
|
@ -428,12 +443,11 @@ class Request(object):
|
||||||
error response to the user-agent."""
|
error response to the user-agent."""
|
||||||
|
|
||||||
error_page = {}
|
error_page = {}
|
||||||
"""
|
"""A dict of {error code: response filename or callable} pairs.
|
||||||
A dict of {error code: response filename or callable} pairs.
|
|
||||||
|
|
||||||
The error code must be an int representing a given HTTP error code,
|
The error code must be an int representing a given HTTP error code,
|
||||||
or the string 'default', which will be used if no matching entry
|
or the string 'default', which will be used if no matching entry is
|
||||||
is found for a given numeric code.
|
found for a given numeric code.
|
||||||
|
|
||||||
If a filename is provided, the file should contain a Python string-
|
If a filename is provided, the file should contain a Python string-
|
||||||
formatting template, and can expect by default to receive format
|
formatting template, and can expect by default to receive format
|
||||||
|
@ -447,8 +461,8 @@ class Request(object):
|
||||||
iterable of strings which will be set to response.body. It may also
|
iterable of strings which will be set to response.body. It may also
|
||||||
override headers or perform any other processing.
|
override headers or perform any other processing.
|
||||||
|
|
||||||
If no entry is given for an error code, and no 'default' entry exists,
|
If no entry is given for an error code, and no 'default' entry
|
||||||
a default template will be used.
|
exists, a default template will be used.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
show_tracebacks = True
|
show_tracebacks = True
|
||||||
|
@ -473,9 +487,10 @@ class Request(object):
|
||||||
"""True once the close method has been called, False otherwise."""
|
"""True once the close method has been called, False otherwise."""
|
||||||
|
|
||||||
stage = None
|
stage = None
|
||||||
|
"""A string containing the stage reached in the request-handling process.
|
||||||
|
|
||||||
|
This is useful when debugging a live server with hung requests.
|
||||||
"""
|
"""
|
||||||
A string containing the stage reached in the request-handling process.
|
|
||||||
This is useful when debugging a live server with hung requests."""
|
|
||||||
|
|
||||||
unique_id = None
|
unique_id = None
|
||||||
"""A lazy object generating and memorizing UUID4 on ``str()`` render."""
|
"""A lazy object generating and memorizing UUID4 on ``str()`` render."""
|
||||||
|
@ -492,9 +507,10 @@ class Request(object):
|
||||||
server_protocol='HTTP/1.1'):
|
server_protocol='HTTP/1.1'):
|
||||||
"""Populate a new Request object.
|
"""Populate a new Request object.
|
||||||
|
|
||||||
local_host should be an httputil.Host object with the server info.
|
local_host should be an httputil.Host object with the server
|
||||||
remote_host should be an httputil.Host object with the client info.
|
info. remote_host should be an httputil.Host object with the
|
||||||
scheme should be a string, either "http" or "https".
|
client info. scheme should be a string, either "http" or
|
||||||
|
"https".
|
||||||
"""
|
"""
|
||||||
self.local = local_host
|
self.local = local_host
|
||||||
self.remote = remote_host
|
self.remote = remote_host
|
||||||
|
@ -514,7 +530,10 @@ class Request(object):
|
||||||
self.unique_id = LazyUUID4()
|
self.unique_id = LazyUUID4()
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Run cleanup code. (Core)"""
|
"""Run cleanup code.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
if not self.closed:
|
if not self.closed:
|
||||||
self.closed = True
|
self.closed = True
|
||||||
self.stage = 'on_end_request'
|
self.stage = 'on_end_request'
|
||||||
|
@ -551,7 +570,6 @@ class Request(object):
|
||||||
|
|
||||||
Consumer code (HTTP servers) should then access these response
|
Consumer code (HTTP servers) should then access these response
|
||||||
attributes to build the outbound stream.
|
attributes to build the outbound stream.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
self.stage = 'run'
|
self.stage = 'run'
|
||||||
|
@ -631,7 +649,10 @@ class Request(object):
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def respond(self, path_info):
|
def respond(self, path_info):
|
||||||
"""Generate a response for the resource at self.path_info. (Core)"""
|
"""Generate a response for the resource at self.path_info.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
|
@ -702,7 +723,10 @@ class Request(object):
|
||||||
response.finalize()
|
response.finalize()
|
||||||
|
|
||||||
def process_query_string(self):
|
def process_query_string(self):
|
||||||
"""Parse the query string into Python structures. (Core)"""
|
"""Parse the query string into Python structures.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
p = httputil.parse_query_string(
|
p = httputil.parse_query_string(
|
||||||
self.query_string, encoding=self.query_string_encoding)
|
self.query_string, encoding=self.query_string_encoding)
|
||||||
|
@ -715,7 +739,10 @@ class Request(object):
|
||||||
self.params.update(p)
|
self.params.update(p)
|
||||||
|
|
||||||
def process_headers(self):
|
def process_headers(self):
|
||||||
"""Parse HTTP header data into Python structures. (Core)"""
|
"""Parse HTTP header data into Python structures.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
# Process the headers into self.headers
|
# Process the headers into self.headers
|
||||||
headers = self.headers
|
headers = self.headers
|
||||||
for name, value in self.header_list:
|
for name, value in self.header_list:
|
||||||
|
@ -751,7 +778,10 @@ class Request(object):
|
||||||
self.base = '%s://%s' % (self.scheme, host)
|
self.base = '%s://%s' % (self.scheme, host)
|
||||||
|
|
||||||
def get_resource(self, path):
|
def get_resource(self, path):
|
||||||
"""Call a dispatcher (which sets self.handler and .config). (Core)"""
|
"""Call a dispatcher (which sets self.handler and .config).
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
# First, see if there is a custom dispatch at this URI. Custom
|
# First, see if there is a custom dispatch at this URI. Custom
|
||||||
# dispatchers can only be specified in app.config, not in _cp_config
|
# dispatchers can only be specified in app.config, not in _cp_config
|
||||||
# (since custom dispatchers may not even have an app.root).
|
# (since custom dispatchers may not even have an app.root).
|
||||||
|
@ -762,7 +792,10 @@ class Request(object):
|
||||||
dispatch(path)
|
dispatch(path)
|
||||||
|
|
||||||
def handle_error(self):
|
def handle_error(self):
|
||||||
"""Handle the last unanticipated exception. (Core)"""
|
"""Handle the last unanticipated exception.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
self.hooks.run('before_error_response')
|
self.hooks.run('before_error_response')
|
||||||
if self.error_response:
|
if self.error_response:
|
||||||
|
@ -776,7 +809,6 @@ class Request(object):
|
||||||
|
|
||||||
|
|
||||||
class ResponseBody(object):
|
class ResponseBody(object):
|
||||||
|
|
||||||
"""The body of the HTTP response (the response entity)."""
|
"""The body of the HTTP response (the response entity)."""
|
||||||
|
|
||||||
unicode_err = ('Page handlers MUST return bytes. Use tools.encode '
|
unicode_err = ('Page handlers MUST return bytes. Use tools.encode '
|
||||||
|
@ -802,18 +834,18 @@ class ResponseBody(object):
|
||||||
|
|
||||||
|
|
||||||
class Response(object):
|
class Response(object):
|
||||||
|
|
||||||
"""An HTTP Response, including status, headers, and body."""
|
"""An HTTP Response, including status, headers, and body."""
|
||||||
|
|
||||||
status = ''
|
status = ''
|
||||||
"""The HTTP Status-Code and Reason-Phrase."""
|
"""The HTTP Status-Code and Reason-Phrase."""
|
||||||
|
|
||||||
header_list = []
|
header_list = []
|
||||||
"""
|
"""A list of the HTTP response headers as (name, value) tuples.
|
||||||
A list of the HTTP response headers as (name, value) tuples.
|
|
||||||
In general, you should use response.headers (a dict) instead. This
|
In general, you should use response.headers (a dict) instead. This
|
||||||
attribute is generated from response.headers and is not valid until
|
attribute is generated from response.headers and is not valid until
|
||||||
after the finalize phase."""
|
after the finalize phase.
|
||||||
|
"""
|
||||||
|
|
||||||
headers = httputil.HeaderMap()
|
headers = httputil.HeaderMap()
|
||||||
"""
|
"""
|
||||||
|
@ -833,7 +865,10 @@ class Response(object):
|
||||||
"""The body (entity) of the HTTP response."""
|
"""The body (entity) of the HTTP response."""
|
||||||
|
|
||||||
time = None
|
time = None
|
||||||
"""The value of time.time() when created. Use in HTTP dates."""
|
"""The value of time.time() when created.
|
||||||
|
|
||||||
|
Use in HTTP dates.
|
||||||
|
"""
|
||||||
|
|
||||||
stream = False
|
stream = False
|
||||||
"""If False, buffer the response body."""
|
"""If False, buffer the response body."""
|
||||||
|
@ -861,15 +896,15 @@ class Response(object):
|
||||||
return new_body
|
return new_body
|
||||||
|
|
||||||
def _flush_body(self):
|
def _flush_body(self):
|
||||||
"""
|
"""Discard self.body but consume any generator such that any
|
||||||
Discard self.body but consume any generator such that
|
finalization can occur, such as is required by caching.tee_output()."""
|
||||||
any finalization can occur, such as is required by
|
|
||||||
caching.tee_output().
|
|
||||||
"""
|
|
||||||
consume(iter(self.body))
|
consume(iter(self.body))
|
||||||
|
|
||||||
def finalize(self):
|
def finalize(self):
|
||||||
"""Transform headers (and cookies) into self.header_list. (Core)"""
|
"""Transform headers (and cookies) into self.header_list.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
code, reason, _ = httputil.valid_status(self.status)
|
code, reason, _ = httputil.valid_status(self.status)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
|
|
@ -50,7 +50,8 @@ class Server(ServerAdapter):
|
||||||
"""If given, the name of the UNIX socket to use instead of TCP/IP.
|
"""If given, the name of the UNIX socket to use instead of TCP/IP.
|
||||||
|
|
||||||
When this option is not None, the `socket_host` and `socket_port` options
|
When this option is not None, the `socket_host` and `socket_port` options
|
||||||
are ignored."""
|
are ignored.
|
||||||
|
"""
|
||||||
|
|
||||||
socket_queue_size = 5
|
socket_queue_size = 5
|
||||||
"""The 'backlog' argument to socket.listen(); specifies the maximum number
|
"""The 'backlog' argument to socket.listen(); specifies the maximum number
|
||||||
|
@ -79,17 +80,24 @@ class Server(ServerAdapter):
|
||||||
"""The number of worker threads to start up in the pool."""
|
"""The number of worker threads to start up in the pool."""
|
||||||
|
|
||||||
thread_pool_max = -1
|
thread_pool_max = -1
|
||||||
"""The maximum size of the worker-thread pool. Use -1 to indicate no limit.
|
"""The maximum size of the worker-thread pool.
|
||||||
|
|
||||||
|
Use -1 to indicate no limit.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
max_request_header_size = 500 * 1024
|
max_request_header_size = 500 * 1024
|
||||||
"""The maximum number of bytes allowable in the request headers.
|
"""The maximum number of bytes allowable in the request headers.
|
||||||
If exceeded, the HTTP server should return "413 Request Entity Too Large".
|
|
||||||
|
If exceeded, the HTTP server should return "413 Request Entity Too
|
||||||
|
Large".
|
||||||
"""
|
"""
|
||||||
|
|
||||||
max_request_body_size = 100 * 1024 * 1024
|
max_request_body_size = 100 * 1024 * 1024
|
||||||
"""The maximum number of bytes allowable in the request body. If exceeded,
|
"""The maximum number of bytes allowable in the request body.
|
||||||
the HTTP server should return "413 Request Entity Too Large"."""
|
|
||||||
|
If exceeded, the HTTP server should return "413 Request Entity Too
|
||||||
|
Large".
|
||||||
|
"""
|
||||||
|
|
||||||
instance = None
|
instance = None
|
||||||
"""If not None, this should be an HTTP server instance (such as
|
"""If not None, this should be an HTTP server instance (such as
|
||||||
|
@ -119,7 +127,8 @@ class Server(ServerAdapter):
|
||||||
the builtin WSGI server. Builtin options are: 'builtin' (to
|
the builtin WSGI server. Builtin options are: 'builtin' (to
|
||||||
use the SSL library built into recent versions of Python).
|
use the SSL library built into recent versions of Python).
|
||||||
You may also register your own classes in the
|
You may also register your own classes in the
|
||||||
cheroot.server.ssl_adapters dict."""
|
cheroot.server.ssl_adapters dict.
|
||||||
|
"""
|
||||||
|
|
||||||
statistics = False
|
statistics = False
|
||||||
"""Turns statistics-gathering on or off for aware HTTP servers."""
|
"""Turns statistics-gathering on or off for aware HTTP servers."""
|
||||||
|
@ -129,11 +138,13 @@ class Server(ServerAdapter):
|
||||||
|
|
||||||
wsgi_version = (1, 0)
|
wsgi_version = (1, 0)
|
||||||
"""The WSGI version tuple to use with the builtin WSGI server.
|
"""The WSGI version tuple to use with the builtin WSGI server.
|
||||||
The provided options are (1, 0) [which includes support for PEP 3333,
|
|
||||||
which declares it covers WSGI version 1.0.1 but still mandates the
|
The provided options are (1, 0) [which includes support for PEP
|
||||||
wsgi.version (1, 0)] and ('u', 0), an experimental unicode version.
|
3333, which declares it covers WSGI version 1.0.1 but still mandates
|
||||||
You may create and register your own experimental versions of the WSGI
|
the wsgi.version (1, 0)] and ('u', 0), an experimental unicode
|
||||||
protocol by adding custom classes to the cheroot.server.wsgi_gateways dict.
|
version. You may create and register your own experimental versions
|
||||||
|
of the WSGI protocol by adding custom classes to the
|
||||||
|
cheroot.server.wsgi_gateways dict.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
peercreds = False
|
peercreds = False
|
||||||
|
@ -184,7 +195,8 @@ class Server(ServerAdapter):
|
||||||
def bind_addr(self):
|
def bind_addr(self):
|
||||||
"""Return bind address.
|
"""Return bind address.
|
||||||
|
|
||||||
A (host, port) tuple for TCP sockets or a str for Unix domain sockts.
|
A (host, port) tuple for TCP sockets or a str for Unix domain
|
||||||
|
sockets.
|
||||||
"""
|
"""
|
||||||
if self.socket_file:
|
if self.socket_file:
|
||||||
return self.socket_file
|
return self.socket_file
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
"""CherryPy tools. A "tool" is any helper, adapted to CP.
|
"""CherryPy tools. A "tool" is any helper, adapted to CP.
|
||||||
|
|
||||||
Tools are usually designed to be used in a variety of ways (although some
|
Tools are usually designed to be used in a variety of ways (although
|
||||||
may only offer one if they choose):
|
some may only offer one if they choose):
|
||||||
|
|
||||||
Library calls
|
Library calls
|
||||||
All tools are callables that can be used wherever needed.
|
All tools are callables that can be used wherever needed.
|
||||||
|
@ -48,10 +48,10 @@ _attr_error = (
|
||||||
|
|
||||||
|
|
||||||
class Tool(object):
|
class Tool(object):
|
||||||
|
|
||||||
"""A registered function for use with CherryPy request-processing hooks.
|
"""A registered function for use with CherryPy request-processing hooks.
|
||||||
|
|
||||||
help(tool.callable) should give you more information about this Tool.
|
help(tool.callable) should give you more information about this
|
||||||
|
Tool.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
namespace = 'tools'
|
namespace = 'tools'
|
||||||
|
@ -135,8 +135,8 @@ class Tool(object):
|
||||||
def _setup(self):
|
def _setup(self):
|
||||||
"""Hook this tool into cherrypy.request.
|
"""Hook this tool into cherrypy.request.
|
||||||
|
|
||||||
The standard CherryPy request object will automatically call this
|
The standard CherryPy request object will automatically call
|
||||||
method when the tool is "turned on" in config.
|
this method when the tool is "turned on" in config.
|
||||||
"""
|
"""
|
||||||
conf = self._merged_args()
|
conf = self._merged_args()
|
||||||
p = conf.pop('priority', None)
|
p = conf.pop('priority', None)
|
||||||
|
@ -147,15 +147,15 @@ class Tool(object):
|
||||||
|
|
||||||
|
|
||||||
class HandlerTool(Tool):
|
class HandlerTool(Tool):
|
||||||
|
|
||||||
"""Tool which is called 'before main', that may skip normal handlers.
|
"""Tool which is called 'before main', that may skip normal handlers.
|
||||||
|
|
||||||
If the tool successfully handles the request (by setting response.body),
|
If the tool successfully handles the request (by setting
|
||||||
if should return True. This will cause CherryPy to skip any 'normal' page
|
response.body), if should return True. This will cause CherryPy to
|
||||||
handler. If the tool did not handle the request, it should return False
|
skip any 'normal' page handler. If the tool did not handle the
|
||||||
to tell CherryPy to continue on and call the normal page handler. If the
|
request, it should return False to tell CherryPy to continue on and
|
||||||
tool is declared AS a page handler (see the 'handler' method), returning
|
call the normal page handler. If the tool is declared AS a page
|
||||||
False will raise NotFound.
|
handler (see the 'handler' method), returning False will raise
|
||||||
|
NotFound.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, callable, name=None):
|
def __init__(self, callable, name=None):
|
||||||
|
@ -185,8 +185,8 @@ class HandlerTool(Tool):
|
||||||
def _setup(self):
|
def _setup(self):
|
||||||
"""Hook this tool into cherrypy.request.
|
"""Hook this tool into cherrypy.request.
|
||||||
|
|
||||||
The standard CherryPy request object will automatically call this
|
The standard CherryPy request object will automatically call
|
||||||
method when the tool is "turned on" in config.
|
this method when the tool is "turned on" in config.
|
||||||
"""
|
"""
|
||||||
conf = self._merged_args()
|
conf = self._merged_args()
|
||||||
p = conf.pop('priority', None)
|
p = conf.pop('priority', None)
|
||||||
|
@ -197,7 +197,6 @@ class HandlerTool(Tool):
|
||||||
|
|
||||||
|
|
||||||
class HandlerWrapperTool(Tool):
|
class HandlerWrapperTool(Tool):
|
||||||
|
|
||||||
"""Tool which wraps request.handler in a provided wrapper function.
|
"""Tool which wraps request.handler in a provided wrapper function.
|
||||||
|
|
||||||
The 'newhandler' arg must be a handler wrapper function that takes a
|
The 'newhandler' arg must be a handler wrapper function that takes a
|
||||||
|
@ -232,7 +231,6 @@ class HandlerWrapperTool(Tool):
|
||||||
|
|
||||||
|
|
||||||
class ErrorTool(Tool):
|
class ErrorTool(Tool):
|
||||||
|
|
||||||
"""Tool which is used to replace the default request.error_response."""
|
"""Tool which is used to replace the default request.error_response."""
|
||||||
|
|
||||||
def __init__(self, callable, name=None):
|
def __init__(self, callable, name=None):
|
||||||
|
@ -244,8 +242,8 @@ class ErrorTool(Tool):
|
||||||
def _setup(self):
|
def _setup(self):
|
||||||
"""Hook this tool into cherrypy.request.
|
"""Hook this tool into cherrypy.request.
|
||||||
|
|
||||||
The standard CherryPy request object will automatically call this
|
The standard CherryPy request object will automatically call
|
||||||
method when the tool is "turned on" in config.
|
this method when the tool is "turned on" in config.
|
||||||
"""
|
"""
|
||||||
cherrypy.serving.request.error_response = self._wrapper
|
cherrypy.serving.request.error_response = self._wrapper
|
||||||
|
|
||||||
|
@ -254,7 +252,6 @@ class ErrorTool(Tool):
|
||||||
|
|
||||||
|
|
||||||
class SessionTool(Tool):
|
class SessionTool(Tool):
|
||||||
|
|
||||||
"""Session Tool for CherryPy.
|
"""Session Tool for CherryPy.
|
||||||
|
|
||||||
sessions.locking
|
sessions.locking
|
||||||
|
@ -282,8 +279,8 @@ class SessionTool(Tool):
|
||||||
def _setup(self):
|
def _setup(self):
|
||||||
"""Hook this tool into cherrypy.request.
|
"""Hook this tool into cherrypy.request.
|
||||||
|
|
||||||
The standard CherryPy request object will automatically call this
|
The standard CherryPy request object will automatically call
|
||||||
method when the tool is "turned on" in config.
|
this method when the tool is "turned on" in config.
|
||||||
"""
|
"""
|
||||||
hooks = cherrypy.serving.request.hooks
|
hooks = cherrypy.serving.request.hooks
|
||||||
|
|
||||||
|
@ -325,7 +322,6 @@ class SessionTool(Tool):
|
||||||
|
|
||||||
|
|
||||||
class XMLRPCController(object):
|
class XMLRPCController(object):
|
||||||
|
|
||||||
"""A Controller (page handler collection) for XML-RPC.
|
"""A Controller (page handler collection) for XML-RPC.
|
||||||
|
|
||||||
To use it, have your controllers subclass this base class (it will
|
To use it, have your controllers subclass this base class (it will
|
||||||
|
@ -392,7 +388,6 @@ class SessionAuthTool(HandlerTool):
|
||||||
|
|
||||||
|
|
||||||
class CachingTool(Tool):
|
class CachingTool(Tool):
|
||||||
|
|
||||||
"""Caching Tool for CherryPy."""
|
"""Caching Tool for CherryPy."""
|
||||||
|
|
||||||
def _wrapper(self, **kwargs):
|
def _wrapper(self, **kwargs):
|
||||||
|
@ -416,11 +411,11 @@ class CachingTool(Tool):
|
||||||
|
|
||||||
|
|
||||||
class Toolbox(object):
|
class Toolbox(object):
|
||||||
|
|
||||||
"""A collection of Tools.
|
"""A collection of Tools.
|
||||||
|
|
||||||
This object also functions as a config namespace handler for itself.
|
This object also functions as a config namespace handler for itself.
|
||||||
Custom toolboxes should be added to each Application's toolboxes dict.
|
Custom toolboxes should be added to each Application's toolboxes
|
||||||
|
dict.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, namespace):
|
def __init__(self, namespace):
|
||||||
|
|
|
@ -10,19 +10,22 @@ from cherrypy.lib import httputil, reprconf
|
||||||
class Application(object):
|
class Application(object):
|
||||||
"""A CherryPy Application.
|
"""A CherryPy Application.
|
||||||
|
|
||||||
Servers and gateways should not instantiate Request objects directly.
|
Servers and gateways should not instantiate Request objects
|
||||||
Instead, they should ask an Application object for a request object.
|
directly. Instead, they should ask an Application object for a
|
||||||
|
request object.
|
||||||
|
|
||||||
An instance of this class may also be used as a WSGI callable
|
An instance of this class may also be used as a WSGI callable (WSGI
|
||||||
(WSGI application object) for itself.
|
application object) for itself.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
root = None
|
root = None
|
||||||
"""The top-most container of page handlers for this app. Handlers should
|
"""The top-most container of page handlers for this app.
|
||||||
be arranged in a hierarchy of attributes, matching the expected URI
|
|
||||||
hierarchy; the default dispatcher then searches this hierarchy for a
|
Handlers should be arranged in a hierarchy of attributes, matching
|
||||||
matching handler. When using a dispatcher other than the default,
|
the expected URI hierarchy; the default dispatcher then searches
|
||||||
this value may be None."""
|
this hierarchy for a matching handler. When using a dispatcher other
|
||||||
|
than the default, this value may be None.
|
||||||
|
"""
|
||||||
|
|
||||||
config = {}
|
config = {}
|
||||||
"""A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict
|
"""A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict
|
||||||
|
@ -32,10 +35,16 @@ class Application(object):
|
||||||
toolboxes = {'tools': cherrypy.tools}
|
toolboxes = {'tools': cherrypy.tools}
|
||||||
|
|
||||||
log = None
|
log = None
|
||||||
"""A LogManager instance. See _cplogging."""
|
"""A LogManager instance.
|
||||||
|
|
||||||
|
See _cplogging.
|
||||||
|
"""
|
||||||
|
|
||||||
wsgiapp = None
|
wsgiapp = None
|
||||||
"""A CPWSGIApp instance. See _cpwsgi."""
|
"""A CPWSGIApp instance.
|
||||||
|
|
||||||
|
See _cpwsgi.
|
||||||
|
"""
|
||||||
|
|
||||||
request_class = _cprequest.Request
|
request_class = _cprequest.Request
|
||||||
response_class = _cprequest.Response
|
response_class = _cprequest.Response
|
||||||
|
@ -82,12 +91,15 @@ class Application(object):
|
||||||
def script_name(self): # noqa: D401; irrelevant for properties
|
def script_name(self): # noqa: D401; irrelevant for properties
|
||||||
"""The URI "mount point" for this app.
|
"""The URI "mount point" for this app.
|
||||||
|
|
||||||
A mount point is that portion of the URI which is constant for all URIs
|
A mount point is that portion of the URI which is constant for
|
||||||
that are serviced by this application; it does not include scheme,
|
all URIs that are serviced by this application; it does not
|
||||||
host, or proxy ("virtual host") portions of the URI.
|
include scheme, host, or proxy ("virtual host") portions of the
|
||||||
|
URI.
|
||||||
|
|
||||||
For example, if script_name is "/my/cool/app", then the URL
|
For example, if script_name is "/my/cool/app", then the URL "
|
||||||
"http://www.example.com/my/cool/app/page1" might be handled by a
|
|
||||||
|
http://www.example.com/my/cool/app/page1"
|
||||||
|
might be handled by a
|
||||||
"page1" method on the root object.
|
"page1" method on the root object.
|
||||||
|
|
||||||
The value of script_name MUST NOT end in a slash. If the script_name
|
The value of script_name MUST NOT end in a slash. If the script_name
|
||||||
|
@ -171,9 +183,9 @@ class Application(object):
|
||||||
class Tree(object):
|
class Tree(object):
|
||||||
"""A registry of CherryPy applications, mounted at diverse points.
|
"""A registry of CherryPy applications, mounted at diverse points.
|
||||||
|
|
||||||
An instance of this class may also be used as a WSGI callable
|
An instance of this class may also be used as a WSGI callable (WSGI
|
||||||
(WSGI application object), in which case it dispatches to all
|
application object), in which case it dispatches to all mounted
|
||||||
mounted apps.
|
apps.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
apps = {}
|
apps = {}
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
"""WSGI interface (see PEP 333 and 3333).
|
"""WSGI interface (see PEP 333 and 3333).
|
||||||
|
|
||||||
Note that WSGI environ keys and values are 'native strings'; that is,
|
Note that WSGI environ keys and values are 'native strings'; that is,
|
||||||
whatever the type of "" is. For Python 2, that's a byte string; for Python 3,
|
whatever the type of "" is. For Python 2, that's a byte string; for
|
||||||
it's a unicode string. But PEP 3333 says: "even if Python's str type is
|
Python 3, it's a unicode string. But PEP 3333 says: "even if Python's
|
||||||
actually Unicode "under the hood", the content of native strings must
|
str type is actually Unicode "under the hood", the content of native
|
||||||
still be translatable to bytes via the Latin-1 encoding!"
|
strings must still be translatable to bytes via the Latin-1 encoding!"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys as _sys
|
import sys as _sys
|
||||||
|
@ -34,7 +34,6 @@ def downgrade_wsgi_ux_to_1x(environ):
|
||||||
|
|
||||||
|
|
||||||
class VirtualHost(object):
|
class VirtualHost(object):
|
||||||
|
|
||||||
"""Select a different WSGI application based on the Host header.
|
"""Select a different WSGI application based on the Host header.
|
||||||
|
|
||||||
This can be useful when running multiple sites within one CP server.
|
This can be useful when running multiple sites within one CP server.
|
||||||
|
@ -56,7 +55,10 @@ class VirtualHost(object):
|
||||||
cherrypy.tree.graft(vhost)
|
cherrypy.tree.graft(vhost)
|
||||||
"""
|
"""
|
||||||
default = None
|
default = None
|
||||||
"""Required. The default WSGI application."""
|
"""Required.
|
||||||
|
|
||||||
|
The default WSGI application.
|
||||||
|
"""
|
||||||
|
|
||||||
use_x_forwarded_host = True
|
use_x_forwarded_host = True
|
||||||
"""If True (the default), any "X-Forwarded-Host"
|
"""If True (the default), any "X-Forwarded-Host"
|
||||||
|
@ -65,11 +67,12 @@ class VirtualHost(object):
|
||||||
|
|
||||||
domains = {}
|
domains = {}
|
||||||
"""A dict of {host header value: application} pairs.
|
"""A dict of {host header value: application} pairs.
|
||||||
The incoming "Host" request header is looked up in this dict,
|
|
||||||
and, if a match is found, the corresponding WSGI application
|
The incoming "Host" request header is looked up in this dict, and,
|
||||||
will be called instead of the default. Note that you often need
|
if a match is found, the corresponding WSGI application will be
|
||||||
separate entries for "example.com" and "www.example.com".
|
called instead of the default. Note that you often need separate
|
||||||
In addition, "Host" headers may contain the port number.
|
entries for "example.com" and "www.example.com". In addition, "Host"
|
||||||
|
headers may contain the port number.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, default, domains=None, use_x_forwarded_host=True):
|
def __init__(self, default, domains=None, use_x_forwarded_host=True):
|
||||||
|
@ -89,7 +92,6 @@ class VirtualHost(object):
|
||||||
|
|
||||||
|
|
||||||
class InternalRedirector(object):
|
class InternalRedirector(object):
|
||||||
|
|
||||||
"""WSGI middleware that handles raised cherrypy.InternalRedirect."""
|
"""WSGI middleware that handles raised cherrypy.InternalRedirect."""
|
||||||
|
|
||||||
def __init__(self, nextapp, recursive=False):
|
def __init__(self, nextapp, recursive=False):
|
||||||
|
@ -137,7 +139,6 @@ class InternalRedirector(object):
|
||||||
|
|
||||||
|
|
||||||
class ExceptionTrapper(object):
|
class ExceptionTrapper(object):
|
||||||
|
|
||||||
"""WSGI middleware that traps exceptions."""
|
"""WSGI middleware that traps exceptions."""
|
||||||
|
|
||||||
def __init__(self, nextapp, throws=(KeyboardInterrupt, SystemExit)):
|
def __init__(self, nextapp, throws=(KeyboardInterrupt, SystemExit)):
|
||||||
|
@ -226,7 +227,6 @@ class _TrappedResponse(object):
|
||||||
|
|
||||||
|
|
||||||
class AppResponse(object):
|
class AppResponse(object):
|
||||||
|
|
||||||
"""WSGI response iterable for CherryPy applications."""
|
"""WSGI response iterable for CherryPy applications."""
|
||||||
|
|
||||||
def __init__(self, environ, start_response, cpapp):
|
def __init__(self, environ, start_response, cpapp):
|
||||||
|
@ -277,7 +277,10 @@ class AppResponse(object):
|
||||||
return next(self.iter_response)
|
return next(self.iter_response)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Close and de-reference the current request and response. (Core)"""
|
"""Close and de-reference the current request and response.
|
||||||
|
|
||||||
|
(Core)
|
||||||
|
"""
|
||||||
streaming = _cherrypy.serving.response.stream
|
streaming = _cherrypy.serving.response.stream
|
||||||
self.cpapp.release_serving()
|
self.cpapp.release_serving()
|
||||||
|
|
||||||
|
@ -380,18 +383,20 @@ class AppResponse(object):
|
||||||
|
|
||||||
|
|
||||||
class CPWSGIApp(object):
|
class CPWSGIApp(object):
|
||||||
|
|
||||||
"""A WSGI application object for a CherryPy Application."""
|
"""A WSGI application object for a CherryPy Application."""
|
||||||
|
|
||||||
pipeline = [
|
pipeline = [
|
||||||
('ExceptionTrapper', ExceptionTrapper),
|
('ExceptionTrapper', ExceptionTrapper),
|
||||||
('InternalRedirector', InternalRedirector),
|
('InternalRedirector', InternalRedirector),
|
||||||
]
|
]
|
||||||
"""A list of (name, wsgiapp) pairs. Each 'wsgiapp' MUST be a
|
"""A list of (name, wsgiapp) pairs.
|
||||||
constructor that takes an initial, positional 'nextapp' argument,
|
|
||||||
plus optional keyword arguments, and returns a WSGI application
|
Each 'wsgiapp' MUST be a constructor that takes an initial,
|
||||||
(that takes environ and start_response arguments). The 'name' can
|
positional 'nextapp' argument, plus optional keyword arguments, and
|
||||||
be any you choose, and will correspond to keys in self.config."""
|
returns a WSGI application (that takes environ and start_response
|
||||||
|
arguments). The 'name' can be any you choose, and will correspond to
|
||||||
|
keys in self.config.
|
||||||
|
"""
|
||||||
|
|
||||||
head = None
|
head = None
|
||||||
"""Rather than nest all apps in the pipeline on each call, it's only
|
"""Rather than nest all apps in the pipeline on each call, it's only
|
||||||
|
@ -399,9 +404,12 @@ class CPWSGIApp(object):
|
||||||
this to None again if you change self.pipeline after calling self."""
|
this to None again if you change self.pipeline after calling self."""
|
||||||
|
|
||||||
config = {}
|
config = {}
|
||||||
"""A dict whose keys match names listed in the pipeline. Each
|
"""A dict whose keys match names listed in the pipeline.
|
||||||
value is a further dict which will be passed to the corresponding
|
|
||||||
named WSGI callable (from the pipeline) as keyword arguments."""
|
Each value is a further dict which will be passed to the
|
||||||
|
corresponding named WSGI callable (from the pipeline) as keyword
|
||||||
|
arguments.
|
||||||
|
"""
|
||||||
|
|
||||||
response_class = AppResponse
|
response_class = AppResponse
|
||||||
"""The class to instantiate and return as the next app in the WSGI chain.
|
"""The class to instantiate and return as the next app in the WSGI chain.
|
||||||
|
@ -417,8 +425,8 @@ class CPWSGIApp(object):
|
||||||
def tail(self, environ, start_response):
|
def tail(self, environ, start_response):
|
||||||
"""WSGI application callable for the actual CherryPy application.
|
"""WSGI application callable for the actual CherryPy application.
|
||||||
|
|
||||||
You probably shouldn't call this; call self.__call__ instead,
|
You probably shouldn't call this; call self.__call__ instead, so
|
||||||
so that any WSGI middleware in self.pipeline can run first.
|
that any WSGI middleware in self.pipeline can run first.
|
||||||
"""
|
"""
|
||||||
return self.response_class(environ, start_response, self.cpapp)
|
return self.response_class(environ, start_response, self.cpapp)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
"""
|
"""WSGI server interface (see PEP 333).
|
||||||
WSGI server interface (see PEP 333).
|
|
||||||
|
|
||||||
This adds some CP-specific bits to the framework-agnostic cheroot package.
|
This adds some CP-specific bits to the framework-agnostic cheroot
|
||||||
|
package.
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -35,10 +35,11 @@ class CPWSGIHTTPRequest(cheroot.server.HTTPRequest):
|
||||||
class CPWSGIServer(cheroot.wsgi.Server):
|
class CPWSGIServer(cheroot.wsgi.Server):
|
||||||
"""Wrapper for cheroot.wsgi.Server.
|
"""Wrapper for cheroot.wsgi.Server.
|
||||||
|
|
||||||
cheroot has been designed to not reference CherryPy in any way,
|
cheroot has been designed to not reference CherryPy in any way, so
|
||||||
so that it can be used in other frameworks and applications. Therefore,
|
that it can be used in other frameworks and applications. Therefore,
|
||||||
we wrap it here, so we can set our own mount points from cherrypy.tree
|
we wrap it here, so we can set our own mount points from
|
||||||
and apply some attributes from config -> cherrypy.server -> wsgi.Server.
|
cherrypy.tree and apply some attributes from config ->
|
||||||
|
cherrypy.server -> wsgi.Server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
fmt = 'CherryPy/{cherrypy.__version__} {cheroot.wsgi.Server.version}'
|
fmt = 'CherryPy/{cherrypy.__version__} {cheroot.wsgi.Server.version}'
|
||||||
|
|
|
@ -137,7 +137,6 @@ def popargs(*args, **kwargs):
|
||||||
class Root:
|
class Root:
|
||||||
def index(self):
|
def index(self):
|
||||||
#...
|
#...
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Since keyword arg comes after *args, we have to process it ourselves
|
# Since keyword arg comes after *args, we have to process it ourselves
|
||||||
# for lower versions of python.
|
# for lower versions of python.
|
||||||
|
@ -201,16 +200,17 @@ def url(path='', qs='', script_name=None, base=None, relative=None):
|
||||||
If it does not start with a slash, this returns
|
If it does not start with a slash, this returns
|
||||||
(base + script_name [+ request.path_info] + path + qs).
|
(base + script_name [+ request.path_info] + path + qs).
|
||||||
|
|
||||||
If script_name is None, cherrypy.request will be used
|
If script_name is None, cherrypy.request will be used to find a
|
||||||
to find a script_name, if available.
|
script_name, if available.
|
||||||
|
|
||||||
If base is None, cherrypy.request.base will be used (if available).
|
If base is None, cherrypy.request.base will be used (if available).
|
||||||
Note that you can use cherrypy.tools.proxy to change this.
|
Note that you can use cherrypy.tools.proxy to change this.
|
||||||
|
|
||||||
Finally, note that this function can be used to obtain an absolute URL
|
Finally, note that this function can be used to obtain an absolute
|
||||||
for the current request path (minus the querystring) by passing no args.
|
URL for the current request path (minus the querystring) by passing
|
||||||
If you call url(qs=cherrypy.request.query_string), you should get the
|
no args. If you call url(qs=cherrypy.request.query_string), you
|
||||||
original browser URL (assuming no internal redirections).
|
should get the original browser URL (assuming no internal
|
||||||
|
redirections).
|
||||||
|
|
||||||
If relative is None or not provided, request.app.relative_urls will
|
If relative is None or not provided, request.app.relative_urls will
|
||||||
be used (if available, else False). If False, the output will be an
|
be used (if available, else False). If False, the output will be an
|
||||||
|
@ -320,8 +320,8 @@ def normalize_path(path):
|
||||||
class _ClassPropertyDescriptor(object):
|
class _ClassPropertyDescriptor(object):
|
||||||
"""Descript for read-only class-based property.
|
"""Descript for read-only class-based property.
|
||||||
|
|
||||||
Turns a classmethod-decorated func into a read-only property of that class
|
Turns a classmethod-decorated func into a read-only property of that
|
||||||
type (means the value cannot be set).
|
class type (means the value cannot be set).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, fget, fset=None):
|
def __init__(self, fget, fset=None):
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
"""
|
"""JSON support.
|
||||||
JSON support.
|
|
||||||
|
|
||||||
Expose preferred json module as json and provide encode/decode
|
Expose preferred json module as json and provide encode/decode
|
||||||
convenience functions.
|
convenience functions.
|
||||||
|
|
|
@ -6,8 +6,8 @@ def is_iterator(obj):
|
||||||
|
|
||||||
(i.e. like a generator).
|
(i.e. like a generator).
|
||||||
|
|
||||||
This will return False for objects which are iterable,
|
This will return False for objects which are iterable, but not
|
||||||
but not iterators themselves.
|
iterators themselves.
|
||||||
"""
|
"""
|
||||||
from types import GeneratorType
|
from types import GeneratorType
|
||||||
if isinstance(obj, GeneratorType):
|
if isinstance(obj, GeneratorType):
|
||||||
|
|
|
@ -18,7 +18,6 @@ as the credentials store::
|
||||||
'tools.auth_basic.accept_charset': 'UTF-8',
|
'tools.auth_basic.accept_charset': 'UTF-8',
|
||||||
}
|
}
|
||||||
app_config = { '/' : basic_auth }
|
app_config = { '/' : basic_auth }
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
|
|
|
@ -55,7 +55,7 @@ def TRACE(msg):
|
||||||
|
|
||||||
|
|
||||||
def get_ha1_dict_plain(user_password_dict):
|
def get_ha1_dict_plain(user_password_dict):
|
||||||
"""Returns a get_ha1 function which obtains a plaintext password from a
|
"""Return a get_ha1 function which obtains a plaintext password from a
|
||||||
dictionary of the form: {username : password}.
|
dictionary of the form: {username : password}.
|
||||||
|
|
||||||
If you want a simple dictionary-based authentication scheme, with plaintext
|
If you want a simple dictionary-based authentication scheme, with plaintext
|
||||||
|
@ -72,7 +72,7 @@ def get_ha1_dict_plain(user_password_dict):
|
||||||
|
|
||||||
|
|
||||||
def get_ha1_dict(user_ha1_dict):
|
def get_ha1_dict(user_ha1_dict):
|
||||||
"""Returns a get_ha1 function which obtains a HA1 password hash from a
|
"""Return a get_ha1 function which obtains a HA1 password hash from a
|
||||||
dictionary of the form: {username : HA1}.
|
dictionary of the form: {username : HA1}.
|
||||||
|
|
||||||
If you want a dictionary-based authentication scheme, but with
|
If you want a dictionary-based authentication scheme, but with
|
||||||
|
@ -87,7 +87,7 @@ def get_ha1_dict(user_ha1_dict):
|
||||||
|
|
||||||
|
|
||||||
def get_ha1_file_htdigest(filename):
|
def get_ha1_file_htdigest(filename):
|
||||||
"""Returns a get_ha1 function which obtains a HA1 password hash from a
|
"""Return a get_ha1 function which obtains a HA1 password hash from a
|
||||||
flat file with lines of the same format as that produced by the Apache
|
flat file with lines of the same format as that produced by the Apache
|
||||||
htdigest utility. For example, for realm 'wonderland', username 'alice',
|
htdigest utility. For example, for realm 'wonderland', username 'alice',
|
||||||
and password '4x5istwelve', the htdigest line would be::
|
and password '4x5istwelve', the htdigest line would be::
|
||||||
|
@ -135,7 +135,7 @@ def synthesize_nonce(s, key, timestamp=None):
|
||||||
|
|
||||||
|
|
||||||
def H(s):
|
def H(s):
|
||||||
"""The hash function H"""
|
"""The hash function H."""
|
||||||
return md5_hex(s)
|
return md5_hex(s)
|
||||||
|
|
||||||
|
|
||||||
|
@ -259,10 +259,11 @@ class HttpDigestAuthorization(object):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def is_nonce_stale(self, max_age_seconds=600):
|
def is_nonce_stale(self, max_age_seconds=600):
|
||||||
"""Returns True if a validated nonce is stale. The nonce contains a
|
"""Return True if a validated nonce is stale.
|
||||||
timestamp in plaintext and also a secure hash of the timestamp.
|
|
||||||
You should first validate the nonce to ensure the plaintext
|
The nonce contains a timestamp in plaintext and also a secure
|
||||||
timestamp is not spoofed.
|
hash of the timestamp. You should first validate the nonce to
|
||||||
|
ensure the plaintext timestamp is not spoofed.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
timestamp, hashpart = self.nonce.split(':', 1)
|
timestamp, hashpart = self.nonce.split(':', 1)
|
||||||
|
@ -275,7 +276,10 @@ class HttpDigestAuthorization(object):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def HA2(self, entity_body=''):
|
def HA2(self, entity_body=''):
|
||||||
"""Returns the H(A2) string. See :rfc:`2617` section 3.2.2.3."""
|
"""Return the H(A2) string.
|
||||||
|
|
||||||
|
See :rfc:`2617` section 3.2.2.3.
|
||||||
|
"""
|
||||||
# RFC 2617 3.2.2.3
|
# RFC 2617 3.2.2.3
|
||||||
# If the "qop" directive's value is "auth" or is unspecified,
|
# If the "qop" directive's value is "auth" or is unspecified,
|
||||||
# then A2 is:
|
# then A2 is:
|
||||||
|
@ -306,7 +310,6 @@ class HttpDigestAuthorization(object):
|
||||||
4.3. This refers to the entity the user agent sent in the
|
4.3. This refers to the entity the user agent sent in the
|
||||||
request which has the Authorization header. Typically GET
|
request which has the Authorization header. Typically GET
|
||||||
requests don't have an entity, and POST requests do.
|
requests don't have an entity, and POST requests do.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
ha2 = self.HA2(entity_body)
|
ha2 = self.HA2(entity_body)
|
||||||
# Request-Digest -- RFC 2617 3.2.2.1
|
# Request-Digest -- RFC 2617 3.2.2.1
|
||||||
|
@ -395,7 +398,6 @@ def digest_auth(realm, get_ha1, key, debug=False, accept_charset='utf-8'):
|
||||||
key
|
key
|
||||||
A secret string known only to the server, used in the synthesis
|
A secret string known only to the server, used in the synthesis
|
||||||
of nonces.
|
of nonces.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
|
|
||||||
|
@ -447,9 +449,7 @@ def digest_auth(realm, get_ha1, key, debug=False, accept_charset='utf-8'):
|
||||||
|
|
||||||
|
|
||||||
def _respond_401(realm, key, accept_charset, debug, **kwargs):
|
def _respond_401(realm, key, accept_charset, debug, **kwargs):
|
||||||
"""
|
"""Respond with 401 status and a WWW-Authenticate header."""
|
||||||
Respond with 401 status and a WWW-Authenticate header
|
|
||||||
"""
|
|
||||||
header = www_authenticate(
|
header = www_authenticate(
|
||||||
realm, key,
|
realm, key,
|
||||||
accept_charset=accept_charset,
|
accept_charset=accept_charset,
|
||||||
|
|
|
@ -42,7 +42,6 @@ from cherrypy.lib import cptools, httputil
|
||||||
|
|
||||||
|
|
||||||
class Cache(object):
|
class Cache(object):
|
||||||
|
|
||||||
"""Base class for Cache implementations."""
|
"""Base class for Cache implementations."""
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
|
@ -64,17 +63,16 @@ class Cache(object):
|
||||||
|
|
||||||
# ------------------------------ Memory Cache ------------------------------- #
|
# ------------------------------ Memory Cache ------------------------------- #
|
||||||
class AntiStampedeCache(dict):
|
class AntiStampedeCache(dict):
|
||||||
|
|
||||||
"""A storage system for cached items which reduces stampede collisions."""
|
"""A storage system for cached items which reduces stampede collisions."""
|
||||||
|
|
||||||
def wait(self, key, timeout=5, debug=False):
|
def wait(self, key, timeout=5, debug=False):
|
||||||
"""Return the cached value for the given key, or None.
|
"""Return the cached value for the given key, or None.
|
||||||
|
|
||||||
If timeout is not None, and the value is already
|
If timeout is not None, and the value is already being
|
||||||
being calculated by another thread, wait until the given timeout has
|
calculated by another thread, wait until the given timeout has
|
||||||
elapsed. If the value is available before the timeout expires, it is
|
elapsed. If the value is available before the timeout expires,
|
||||||
returned. If not, None is returned, and a sentinel placed in the cache
|
it is returned. If not, None is returned, and a sentinel placed
|
||||||
to signal other threads to wait.
|
in the cache to signal other threads to wait.
|
||||||
|
|
||||||
If timeout is None, no waiting is performed nor sentinels used.
|
If timeout is None, no waiting is performed nor sentinels used.
|
||||||
"""
|
"""
|
||||||
|
@ -127,7 +125,6 @@ class AntiStampedeCache(dict):
|
||||||
|
|
||||||
|
|
||||||
class MemoryCache(Cache):
|
class MemoryCache(Cache):
|
||||||
|
|
||||||
"""An in-memory cache for varying response content.
|
"""An in-memory cache for varying response content.
|
||||||
|
|
||||||
Each key in self.store is a URI, and each value is an AntiStampedeCache.
|
Each key in self.store is a URI, and each value is an AntiStampedeCache.
|
||||||
|
@ -381,7 +378,10 @@ def get(invalid_methods=('POST', 'PUT', 'DELETE'), debug=False, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
def tee_output():
|
def tee_output():
|
||||||
"""Tee response output to cache storage. Internal."""
|
"""Tee response output to cache storage.
|
||||||
|
|
||||||
|
Internal.
|
||||||
|
"""
|
||||||
# Used by CachingTool by attaching to request.hooks
|
# Used by CachingTool by attaching to request.hooks
|
||||||
|
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
|
@ -441,7 +441,6 @@ def expires(secs=0, force=False, debug=False):
|
||||||
* Expires
|
* Expires
|
||||||
|
|
||||||
If any are already present, none of the above response headers are set.
|
If any are already present, none of the above response headers are set.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
|
|
|
@ -22,7 +22,7 @@ it will call ``serve()`` for you.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import cgi
|
import html
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
@ -352,9 +352,9 @@ class CoverStats(object):
|
||||||
buffer.append((lineno, line))
|
buffer.append((lineno, line))
|
||||||
if empty_the_buffer:
|
if empty_the_buffer:
|
||||||
for lno, pastline in buffer:
|
for lno, pastline in buffer:
|
||||||
yield template % (lno, cgi.escape(pastline))
|
yield template % (lno, html.escape(pastline))
|
||||||
buffer = []
|
buffer = []
|
||||||
yield template % (lineno, cgi.escape(line))
|
yield template % (lineno, html.escape(line))
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def report(self, name):
|
def report(self, name):
|
||||||
|
|
|
@ -184,7 +184,6 @@ To report statistics::
|
||||||
To format statistics reports::
|
To format statistics reports::
|
||||||
|
|
||||||
See 'Reporting', above.
|
See 'Reporting', above.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
@ -254,7 +253,6 @@ def proc_time(s):
|
||||||
|
|
||||||
|
|
||||||
class ByteCountWrapper(object):
|
class ByteCountWrapper(object):
|
||||||
|
|
||||||
"""Wraps a file-like object, counting the number of bytes read."""
|
"""Wraps a file-like object, counting the number of bytes read."""
|
||||||
|
|
||||||
def __init__(self, rfile):
|
def __init__(self, rfile):
|
||||||
|
@ -307,7 +305,6 @@ def _get_threading_ident():
|
||||||
|
|
||||||
|
|
||||||
class StatsTool(cherrypy.Tool):
|
class StatsTool(cherrypy.Tool):
|
||||||
|
|
||||||
"""Record various information about the current request."""
|
"""Record various information about the current request."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -316,8 +313,8 @@ class StatsTool(cherrypy.Tool):
|
||||||
def _setup(self):
|
def _setup(self):
|
||||||
"""Hook this tool into cherrypy.request.
|
"""Hook this tool into cherrypy.request.
|
||||||
|
|
||||||
The standard CherryPy request object will automatically call this
|
The standard CherryPy request object will automatically call
|
||||||
method when the tool is "turned on" in config.
|
this method when the tool is "turned on" in config.
|
||||||
"""
|
"""
|
||||||
if appstats.get('Enabled', False):
|
if appstats.get('Enabled', False):
|
||||||
cherrypy.Tool._setup(self)
|
cherrypy.Tool._setup(self)
|
||||||
|
|
|
@ -94,8 +94,8 @@ def validate_etags(autotags=False, debug=False):
|
||||||
def validate_since():
|
def validate_since():
|
||||||
"""Validate the current Last-Modified against If-Modified-Since headers.
|
"""Validate the current Last-Modified against If-Modified-Since headers.
|
||||||
|
|
||||||
If no code has set the Last-Modified response header, then no validation
|
If no code has set the Last-Modified response header, then no
|
||||||
will be performed.
|
validation will be performed.
|
||||||
"""
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
lastmod = response.headers.get('Last-Modified')
|
lastmod = response.headers.get('Last-Modified')
|
||||||
|
@ -123,9 +123,9 @@ def validate_since():
|
||||||
def allow(methods=None, debug=False):
|
def allow(methods=None, debug=False):
|
||||||
"""Raise 405 if request.method not in methods (default ['GET', 'HEAD']).
|
"""Raise 405 if request.method not in methods (default ['GET', 'HEAD']).
|
||||||
|
|
||||||
The given methods are case-insensitive, and may be in any order.
|
The given methods are case-insensitive, and may be in any order. If
|
||||||
If only one method is allowed, you may supply a single string;
|
only one method is allowed, you may supply a single string; if more
|
||||||
if more than one, supply a list of strings.
|
than one, supply a list of strings.
|
||||||
|
|
||||||
Regardless of whether the current method is allowed or not, this
|
Regardless of whether the current method is allowed or not, this
|
||||||
also emits an 'Allow' response header, containing the given methods.
|
also emits an 'Allow' response header, containing the given methods.
|
||||||
|
@ -154,22 +154,23 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
||||||
scheme='X-Forwarded-Proto', debug=False):
|
scheme='X-Forwarded-Proto', debug=False):
|
||||||
"""Change the base URL (scheme://host[:port][/path]).
|
"""Change the base URL (scheme://host[:port][/path]).
|
||||||
|
|
||||||
For running a CP server behind Apache, lighttpd, or other HTTP server.
|
For running a CP server behind Apache, lighttpd, or other HTTP
|
||||||
|
server.
|
||||||
|
|
||||||
For Apache and lighttpd, you should leave the 'local' argument at the
|
For Apache and lighttpd, you should leave the 'local' argument at
|
||||||
default value of 'X-Forwarded-Host'. For Squid, you probably want to set
|
the default value of 'X-Forwarded-Host'. For Squid, you probably
|
||||||
tools.proxy.local = 'Origin'.
|
want to set tools.proxy.local = 'Origin'.
|
||||||
|
|
||||||
If you want the new request.base to include path info (not just the host),
|
If you want the new request.base to include path info (not just the
|
||||||
you must explicitly set base to the full base path, and ALSO set 'local'
|
host), you must explicitly set base to the full base path, and ALSO
|
||||||
to '', so that the X-Forwarded-Host request header (which never includes
|
set 'local' to '', so that the X-Forwarded-Host request header
|
||||||
path info) does not override it. Regardless, the value for 'base' MUST
|
(which never includes path info) does not override it. Regardless,
|
||||||
NOT end in a slash.
|
the value for 'base' MUST NOT end in a slash.
|
||||||
|
|
||||||
cherrypy.request.remote.ip (the IP address of the client) will be
|
cherrypy.request.remote.ip (the IP address of the client) will be
|
||||||
rewritten if the header specified by the 'remote' arg is valid.
|
rewritten if the header specified by the 'remote' arg is valid. By
|
||||||
By default, 'remote' is set to 'X-Forwarded-For'. If you do not
|
default, 'remote' is set to 'X-Forwarded-For'. If you do not want to
|
||||||
want to rewrite remote.ip, set the 'remote' arg to an empty string.
|
rewrite remote.ip, set the 'remote' arg to an empty string.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
|
@ -217,8 +218,8 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
||||||
def ignore_headers(headers=('Range',), debug=False):
|
def ignore_headers(headers=('Range',), debug=False):
|
||||||
"""Delete request headers whose field names are included in 'headers'.
|
"""Delete request headers whose field names are included in 'headers'.
|
||||||
|
|
||||||
This is a useful tool for working behind certain HTTP servers;
|
This is a useful tool for working behind certain HTTP servers; for
|
||||||
for example, Apache duplicates the work that CP does for 'Range'
|
example, Apache duplicates the work that CP does for 'Range'
|
||||||
headers, and will doubly-truncate the response.
|
headers, and will doubly-truncate the response.
|
||||||
"""
|
"""
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
|
@ -281,7 +282,6 @@ def referer(pattern, accept=True, accept_missing=False, error=403,
|
||||||
|
|
||||||
|
|
||||||
class SessionAuth(object):
|
class SessionAuth(object):
|
||||||
|
|
||||||
"""Assert that the user is logged in."""
|
"""Assert that the user is logged in."""
|
||||||
|
|
||||||
session_key = 'username'
|
session_key = 'username'
|
||||||
|
@ -319,7 +319,10 @@ Message: %(error_msg)s
|
||||||
</body></html>""") % vars()).encode('utf-8')
|
</body></html>""") % vars()).encode('utf-8')
|
||||||
|
|
||||||
def do_login(self, username, password, from_page='..', **kwargs):
|
def do_login(self, username, password, from_page='..', **kwargs):
|
||||||
"""Login. May raise redirect, or return True if request handled."""
|
"""Login.
|
||||||
|
|
||||||
|
May raise redirect, or return True if request handled.
|
||||||
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
error_msg = self.check_username_and_password(username, password)
|
error_msg = self.check_username_and_password(username, password)
|
||||||
if error_msg:
|
if error_msg:
|
||||||
|
@ -336,7 +339,10 @@ Message: %(error_msg)s
|
||||||
raise cherrypy.HTTPRedirect(from_page or '/')
|
raise cherrypy.HTTPRedirect(from_page or '/')
|
||||||
|
|
||||||
def do_logout(self, from_page='..', **kwargs):
|
def do_logout(self, from_page='..', **kwargs):
|
||||||
"""Logout. May raise redirect, or return True if request handled."""
|
"""Logout.
|
||||||
|
|
||||||
|
May raise redirect, or return True if request handled.
|
||||||
|
"""
|
||||||
sess = cherrypy.session
|
sess = cherrypy.session
|
||||||
username = sess.get(self.session_key)
|
username = sess.get(self.session_key)
|
||||||
sess[self.session_key] = None
|
sess[self.session_key] = None
|
||||||
|
@ -346,7 +352,9 @@ Message: %(error_msg)s
|
||||||
raise cherrypy.HTTPRedirect(from_page)
|
raise cherrypy.HTTPRedirect(from_page)
|
||||||
|
|
||||||
def do_check(self):
|
def do_check(self):
|
||||||
"""Assert username. Raise redirect, or return True if request handled.
|
"""Assert username.
|
||||||
|
|
||||||
|
Raise redirect, or return True if request handled.
|
||||||
"""
|
"""
|
||||||
sess = cherrypy.session
|
sess = cherrypy.session
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
|
@ -408,8 +416,7 @@ def session_auth(**kwargs):
|
||||||
|
|
||||||
Any attribute of the SessionAuth class may be overridden
|
Any attribute of the SessionAuth class may be overridden
|
||||||
via a keyword arg to this function:
|
via a keyword arg to this function:
|
||||||
|
""" + '\n' + '\n '.join(
|
||||||
""" + '\n '.join(
|
|
||||||
'{!s}: {!s}'.format(k, type(getattr(SessionAuth, k)).__name__)
|
'{!s}: {!s}'.format(k, type(getattr(SessionAuth, k)).__name__)
|
||||||
for k in dir(SessionAuth)
|
for k in dir(SessionAuth)
|
||||||
if not k.startswith('__')
|
if not k.startswith('__')
|
||||||
|
@ -490,8 +497,8 @@ def trailing_slash(missing=True, extra=False, status=None, debug=False):
|
||||||
def flatten(debug=False):
|
def flatten(debug=False):
|
||||||
"""Wrap response.body in a generator that recursively iterates over body.
|
"""Wrap response.body in a generator that recursively iterates over body.
|
||||||
|
|
||||||
This allows cherrypy.response.body to consist of 'nested generators';
|
This allows cherrypy.response.body to consist of 'nested
|
||||||
that is, a set of generators that yield generators.
|
generators'; that is, a set of generators that yield generators.
|
||||||
"""
|
"""
|
||||||
def flattener(input):
|
def flattener(input):
|
||||||
numchunks = 0
|
numchunks = 0
|
||||||
|
|
|
@ -261,9 +261,7 @@ class ResponseEncoder:
|
||||||
|
|
||||||
|
|
||||||
def prepare_iter(value):
|
def prepare_iter(value):
|
||||||
"""
|
"""Ensure response body is iterable and resolves to False when empty."""
|
||||||
Ensure response body is iterable and resolves to False when empty.
|
|
||||||
"""
|
|
||||||
if isinstance(value, text_or_bytes):
|
if isinstance(value, text_or_bytes):
|
||||||
# strings get wrapped in a list because iterating over a single
|
# strings get wrapped in a list because iterating over a single
|
||||||
# item list is much faster than iterating over every character
|
# item list is much faster than iterating over every character
|
||||||
|
@ -360,7 +358,6 @@ def gzip(compress_level=5, mime_types=['text/html', 'text/plain'],
|
||||||
* No 'gzip' or 'x-gzip' is present in the Accept-Encoding header
|
* No 'gzip' or 'x-gzip' is present in the Accept-Encoding header
|
||||||
* No 'gzip' or 'x-gzip' with a qvalue > 0 is present
|
* No 'gzip' or 'x-gzip' with a qvalue > 0 is present
|
||||||
* The 'identity' value is given with a qvalue > 0.
|
* The 'identity' value is given with a qvalue > 0.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
|
|
|
@ -14,7 +14,6 @@ from cherrypy.process.plugins import SimplePlugin
|
||||||
|
|
||||||
|
|
||||||
class ReferrerTree(object):
|
class ReferrerTree(object):
|
||||||
|
|
||||||
"""An object which gathers all referrers of an object to a given depth."""
|
"""An object which gathers all referrers of an object to a given depth."""
|
||||||
|
|
||||||
peek_length = 40
|
peek_length = 40
|
||||||
|
@ -132,7 +131,6 @@ def get_context(obj):
|
||||||
|
|
||||||
|
|
||||||
class GCRoot(object):
|
class GCRoot(object):
|
||||||
|
|
||||||
"""A CherryPy page handler for testing reference leaks."""
|
"""A CherryPy page handler for testing reference leaks."""
|
||||||
|
|
||||||
classes = [
|
classes = [
|
||||||
|
|
39
lib/cherrypy/lib/headers.py
Normal file
39
lib/cherrypy/lib/headers.py
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
"""headers."""
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_param(s):
|
||||||
|
while s[:1] == ';':
|
||||||
|
s = s[1:]
|
||||||
|
end = s.find(';')
|
||||||
|
while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
|
||||||
|
end = s.find(';', end + 1)
|
||||||
|
if end < 0:
|
||||||
|
end = len(s)
|
||||||
|
f = s[:end]
|
||||||
|
yield f.strip()
|
||||||
|
s = s[end:]
|
||||||
|
|
||||||
|
|
||||||
|
def parse_header(line):
|
||||||
|
"""Parse a Content-type like header.
|
||||||
|
|
||||||
|
Return the main content-type and a dictionary of options.
|
||||||
|
|
||||||
|
Copied from removed stdlib cgi module. See
|
||||||
|
`cherrypy/cherrypy#2014 (comment)
|
||||||
|
<https://github.com/cherrypy/cherrypy/issues/2014#issuecomment-1883774891>`_
|
||||||
|
for background.
|
||||||
|
"""
|
||||||
|
parts = _parse_param(';' + line)
|
||||||
|
key = parts.__next__()
|
||||||
|
pdict = {}
|
||||||
|
for p in parts:
|
||||||
|
i = p.find('=')
|
||||||
|
if i >= 0:
|
||||||
|
name = p[:i].strip().lower()
|
||||||
|
value = p[i + 1:].strip()
|
||||||
|
if len(value) >= 2 and value[0] == value[-1] == '"':
|
||||||
|
value = value[1:-1]
|
||||||
|
value = value.replace('\\\\', '\\').replace('\\"', '"')
|
||||||
|
pdict[name] = value
|
||||||
|
return key, pdict
|
|
@ -12,7 +12,6 @@ import email.utils
|
||||||
import re
|
import re
|
||||||
import builtins
|
import builtins
|
||||||
from binascii import b2a_base64
|
from binascii import b2a_base64
|
||||||
from cgi import parse_header
|
|
||||||
from email.header import decode_header
|
from email.header import decode_header
|
||||||
from http.server import BaseHTTPRequestHandler
|
from http.server import BaseHTTPRequestHandler
|
||||||
from urllib.parse import unquote_plus
|
from urllib.parse import unquote_plus
|
||||||
|
@ -21,6 +20,7 @@ import jaraco.collections
|
||||||
|
|
||||||
import cherrypy
|
import cherrypy
|
||||||
from cherrypy._cpcompat import ntob, ntou
|
from cherrypy._cpcompat import ntob, ntou
|
||||||
|
from .headers import parse_header
|
||||||
|
|
||||||
response_codes = BaseHTTPRequestHandler.responses.copy()
|
response_codes = BaseHTTPRequestHandler.responses.copy()
|
||||||
|
|
||||||
|
@ -71,10 +71,10 @@ def protocol_from_http(protocol_str):
|
||||||
def get_ranges(headervalue, content_length):
|
def get_ranges(headervalue, content_length):
|
||||||
"""Return a list of (start, stop) indices from a Range header, or None.
|
"""Return a list of (start, stop) indices from a Range header, or None.
|
||||||
|
|
||||||
Each (start, stop) tuple will be composed of two ints, which are suitable
|
Each (start, stop) tuple will be composed of two ints, which are
|
||||||
for use in a slicing operation. That is, the header "Range: bytes=3-6",
|
suitable for use in a slicing operation. That is, the header "Range:
|
||||||
if applied against a Python string, is requesting resource[3:7]. This
|
bytes=3-6", if applied against a Python string, is requesting
|
||||||
function will return the list [(3, 7)].
|
resource[3:7]. This function will return the list [(3, 7)].
|
||||||
|
|
||||||
If this function returns an empty list, you should return HTTP 416.
|
If this function returns an empty list, you should return HTTP 416.
|
||||||
"""
|
"""
|
||||||
|
@ -127,7 +127,6 @@ def get_ranges(headervalue, content_length):
|
||||||
|
|
||||||
|
|
||||||
class HeaderElement(object):
|
class HeaderElement(object):
|
||||||
|
|
||||||
"""An element (with parameters) from an HTTP header's element list."""
|
"""An element (with parameters) from an HTTP header's element list."""
|
||||||
|
|
||||||
def __init__(self, value, params=None):
|
def __init__(self, value, params=None):
|
||||||
|
@ -169,14 +168,14 @@ q_separator = re.compile(r'; *q *=')
|
||||||
|
|
||||||
|
|
||||||
class AcceptElement(HeaderElement):
|
class AcceptElement(HeaderElement):
|
||||||
|
|
||||||
"""An element (with parameters) from an Accept* header's element list.
|
"""An element (with parameters) from an Accept* header's element list.
|
||||||
|
|
||||||
AcceptElement objects are comparable; the more-preferred object will be
|
AcceptElement objects are comparable; the more-preferred object will
|
||||||
"less than" the less-preferred object. They are also therefore sortable;
|
be "less than" the less-preferred object. They are also therefore
|
||||||
if you sort a list of AcceptElement objects, they will be listed in
|
sortable; if you sort a list of AcceptElement objects, they will be
|
||||||
priority order; the most preferred value will be first. Yes, it should
|
listed in priority order; the most preferred value will be first.
|
||||||
have been the other way around, but it's too late to fix now.
|
Yes, it should have been the other way around, but it's too late to
|
||||||
|
fix now.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -249,8 +248,7 @@ def header_elements(fieldname, fieldvalue):
|
||||||
|
|
||||||
|
|
||||||
def decode_TEXT(value):
|
def decode_TEXT(value):
|
||||||
r"""
|
r"""Decode :rfc:`2047` TEXT.
|
||||||
Decode :rfc:`2047` TEXT
|
|
||||||
|
|
||||||
>>> decode_TEXT("=?utf-8?q?f=C3=BCr?=") == b'f\xfcr'.decode('latin-1')
|
>>> decode_TEXT("=?utf-8?q?f=C3=BCr?=") == b'f\xfcr'.decode('latin-1')
|
||||||
True
|
True
|
||||||
|
@ -265,9 +263,7 @@ def decode_TEXT(value):
|
||||||
|
|
||||||
|
|
||||||
def decode_TEXT_maybe(value):
|
def decode_TEXT_maybe(value):
|
||||||
"""
|
"""Decode the text but only if '=?' appears in it."""
|
||||||
Decode the text but only if '=?' appears in it.
|
|
||||||
"""
|
|
||||||
return decode_TEXT(value) if '=?' in value else value
|
return decode_TEXT(value) if '=?' in value else value
|
||||||
|
|
||||||
|
|
||||||
|
@ -388,7 +384,6 @@ def parse_query_string(query_string, keep_blank_values=True, encoding='utf-8'):
|
||||||
|
|
||||||
|
|
||||||
class CaseInsensitiveDict(jaraco.collections.KeyTransformingDict):
|
class CaseInsensitiveDict(jaraco.collections.KeyTransformingDict):
|
||||||
|
|
||||||
"""A case-insensitive dict subclass.
|
"""A case-insensitive dict subclass.
|
||||||
|
|
||||||
Each key is changed on entry to title case.
|
Each key is changed on entry to title case.
|
||||||
|
@ -417,7 +412,6 @@ else:
|
||||||
|
|
||||||
|
|
||||||
class HeaderMap(CaseInsensitiveDict):
|
class HeaderMap(CaseInsensitiveDict):
|
||||||
|
|
||||||
"""A dict subclass for HTTP request and response headers.
|
"""A dict subclass for HTTP request and response headers.
|
||||||
|
|
||||||
Each key is changed on entry to str(key).title(). This allows headers
|
Each key is changed on entry to str(key).title(). This allows headers
|
||||||
|
@ -494,7 +488,6 @@ class HeaderMap(CaseInsensitiveDict):
|
||||||
|
|
||||||
|
|
||||||
class Host(object):
|
class Host(object):
|
||||||
|
|
||||||
"""An internet address.
|
"""An internet address.
|
||||||
|
|
||||||
name
|
name
|
||||||
|
|
|
@ -7,22 +7,22 @@ class NeverExpires(object):
|
||||||
|
|
||||||
|
|
||||||
class Timer(object):
|
class Timer(object):
|
||||||
"""
|
"""A simple timer that will indicate when an expiration time has passed."""
|
||||||
A simple timer that will indicate when an expiration time has passed.
|
|
||||||
"""
|
|
||||||
def __init__(self, expiration):
|
def __init__(self, expiration):
|
||||||
'Create a timer that expires at `expiration` (UTC datetime)'
|
'Create a timer that expires at `expiration` (UTC datetime)'
|
||||||
self.expiration = expiration
|
self.expiration = expiration
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def after(cls, elapsed):
|
def after(cls, elapsed):
|
||||||
"""
|
"""Return a timer that will expire after `elapsed` passes."""
|
||||||
Return a timer that will expire after `elapsed` passes.
|
return cls(
|
||||||
"""
|
datetime.datetime.now(datetime.timezone.utc) + elapsed,
|
||||||
return cls(datetime.datetime.utcnow() + elapsed)
|
)
|
||||||
|
|
||||||
def expired(self):
|
def expired(self):
|
||||||
return datetime.datetime.utcnow() >= self.expiration
|
return datetime.datetime.now(
|
||||||
|
datetime.timezone.utc,
|
||||||
|
) >= self.expiration
|
||||||
|
|
||||||
|
|
||||||
class LockTimeout(Exception):
|
class LockTimeout(Exception):
|
||||||
|
@ -30,9 +30,7 @@ class LockTimeout(Exception):
|
||||||
|
|
||||||
|
|
||||||
class LockChecker(object):
|
class LockChecker(object):
|
||||||
"""
|
"""Keep track of the time and detect if a timeout has expired."""
|
||||||
Keep track of the time and detect if a timeout has expired
|
|
||||||
"""
|
|
||||||
def __init__(self, session_id, timeout):
|
def __init__(self, session_id, timeout):
|
||||||
self.session_id = session_id
|
self.session_id = session_id
|
||||||
if timeout:
|
if timeout:
|
||||||
|
|
|
@ -30,7 +30,6 @@ to get a quick sanity-check on overall CP performance. Use the
|
||||||
``--profile`` flag when running the test suite. Then, use the ``serve()``
|
``--profile`` flag when running the test suite. Then, use the ``serve()``
|
||||||
function to browse the results in a web browser. If you run this
|
function to browse the results in a web browser. If you run this
|
||||||
module from the command line, it will call ``serve()`` for you.
|
module from the command line, it will call ``serve()`` for you.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import io
|
import io
|
||||||
|
|
|
@ -27,18 +27,17 @@ from cherrypy._cpcompat import text_or_bytes
|
||||||
|
|
||||||
|
|
||||||
class NamespaceSet(dict):
|
class NamespaceSet(dict):
|
||||||
|
|
||||||
"""A dict of config namespace names and handlers.
|
"""A dict of config namespace names and handlers.
|
||||||
|
|
||||||
Each config entry should begin with a namespace name; the corresponding
|
Each config entry should begin with a namespace name; the
|
||||||
namespace handler will be called once for each config entry in that
|
corresponding namespace handler will be called once for each config
|
||||||
namespace, and will be passed two arguments: the config key (with the
|
entry in that namespace, and will be passed two arguments: the
|
||||||
namespace removed) and the config value.
|
config key (with the namespace removed) and the config value.
|
||||||
|
|
||||||
Namespace handlers may be any Python callable; they may also be
|
Namespace handlers may be any Python callable; they may also be
|
||||||
context managers, in which case their __enter__
|
context managers, in which case their __enter__ method should return
|
||||||
method should return a callable to be used as the handler.
|
a callable to be used as the handler. See cherrypy.tools (the
|
||||||
See cherrypy.tools (the Toolbox class) for an example.
|
Toolbox class) for an example.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __call__(self, config):
|
def __call__(self, config):
|
||||||
|
@ -48,9 +47,10 @@ class NamespaceSet(dict):
|
||||||
A flat dict, where keys use dots to separate
|
A flat dict, where keys use dots to separate
|
||||||
namespaces, and values are arbitrary.
|
namespaces, and values are arbitrary.
|
||||||
|
|
||||||
The first name in each config key is used to look up the corresponding
|
The first name in each config key is used to look up the
|
||||||
namespace handler. For example, a config entry of {'tools.gzip.on': v}
|
corresponding namespace handler. For example, a config entry of
|
||||||
will call the 'tools' namespace handler with the args: ('gzip.on', v)
|
{'tools.gzip.on': v} will call the 'tools' namespace handler
|
||||||
|
with the args: ('gzip.on', v)
|
||||||
"""
|
"""
|
||||||
# Separate the given config into namespaces
|
# Separate the given config into namespaces
|
||||||
ns_confs = {}
|
ns_confs = {}
|
||||||
|
@ -103,7 +103,6 @@ class NamespaceSet(dict):
|
||||||
|
|
||||||
|
|
||||||
class Config(dict):
|
class Config(dict):
|
||||||
|
|
||||||
"""A dict-like set of configuration data, with defaults and namespaces.
|
"""A dict-like set of configuration data, with defaults and namespaces.
|
||||||
|
|
||||||
May take a file, filename, or dict.
|
May take a file, filename, or dict.
|
||||||
|
@ -167,7 +166,7 @@ class Parser(configparser.ConfigParser):
|
||||||
self._read(fp, filename)
|
self._read(fp, filename)
|
||||||
|
|
||||||
def as_dict(self, raw=False, vars=None):
|
def as_dict(self, raw=False, vars=None):
|
||||||
"""Convert an INI file to a dictionary"""
|
"""Convert an INI file to a dictionary."""
|
||||||
# Load INI file into a dict
|
# Load INI file into a dict
|
||||||
result = {}
|
result = {}
|
||||||
for section in self.sections():
|
for section in self.sections():
|
||||||
|
|
|
@ -120,7 +120,6 @@ missing = object()
|
||||||
|
|
||||||
|
|
||||||
class Session(object):
|
class Session(object):
|
||||||
|
|
||||||
"""A CherryPy dict-like Session object (one per request)."""
|
"""A CherryPy dict-like Session object (one per request)."""
|
||||||
|
|
||||||
_id = None
|
_id = None
|
||||||
|
@ -148,9 +147,11 @@ class Session(object):
|
||||||
to session data."""
|
to session data."""
|
||||||
|
|
||||||
loaded = False
|
loaded = False
|
||||||
|
"""If True, data has been retrieved from storage.
|
||||||
|
|
||||||
|
This should happen automatically on the first attempt to access
|
||||||
|
session data.
|
||||||
"""
|
"""
|
||||||
If True, data has been retrieved from storage. This should happen
|
|
||||||
automatically on the first attempt to access session data."""
|
|
||||||
|
|
||||||
clean_thread = None
|
clean_thread = None
|
||||||
'Class-level Monitor which calls self.clean_up.'
|
'Class-level Monitor which calls self.clean_up.'
|
||||||
|
@ -165,9 +166,10 @@ class Session(object):
|
||||||
'True if the session requested by the client did not exist.'
|
'True if the session requested by the client did not exist.'
|
||||||
|
|
||||||
regenerated = False
|
regenerated = False
|
||||||
|
"""True if the application called session.regenerate().
|
||||||
|
|
||||||
|
This is not set by internal calls to regenerate the session id.
|
||||||
"""
|
"""
|
||||||
True if the application called session.regenerate(). This is not set by
|
|
||||||
internal calls to regenerate the session id."""
|
|
||||||
|
|
||||||
debug = False
|
debug = False
|
||||||
'If True, log debug information.'
|
'If True, log debug information.'
|
||||||
|
@ -335,8 +337,9 @@ class Session(object):
|
||||||
|
|
||||||
def pop(self, key, default=missing):
|
def pop(self, key, default=missing):
|
||||||
"""Remove the specified key and return the corresponding value.
|
"""Remove the specified key and return the corresponding value.
|
||||||
If key is not found, default is returned if given,
|
|
||||||
otherwise KeyError is raised.
|
If key is not found, default is returned if given, otherwise
|
||||||
|
KeyError is raised.
|
||||||
"""
|
"""
|
||||||
if not self.loaded:
|
if not self.loaded:
|
||||||
self.load()
|
self.load()
|
||||||
|
@ -351,13 +354,19 @@ class Session(object):
|
||||||
return key in self._data
|
return key in self._data
|
||||||
|
|
||||||
def get(self, key, default=None):
|
def get(self, key, default=None):
|
||||||
"""D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None."""
|
"""D.get(k[,d]) -> D[k] if k in D, else d.
|
||||||
|
|
||||||
|
d defaults to None.
|
||||||
|
"""
|
||||||
if not self.loaded:
|
if not self.loaded:
|
||||||
self.load()
|
self.load()
|
||||||
return self._data.get(key, default)
|
return self._data.get(key, default)
|
||||||
|
|
||||||
def update(self, d):
|
def update(self, d):
|
||||||
"""D.update(E) -> None. Update D from E: for k in E: D[k] = E[k]."""
|
"""D.update(E) -> None.
|
||||||
|
|
||||||
|
Update D from E: for k in E: D[k] = E[k].
|
||||||
|
"""
|
||||||
if not self.loaded:
|
if not self.loaded:
|
||||||
self.load()
|
self.load()
|
||||||
self._data.update(d)
|
self._data.update(d)
|
||||||
|
@ -369,7 +378,10 @@ class Session(object):
|
||||||
return self._data.setdefault(key, default)
|
return self._data.setdefault(key, default)
|
||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
"""D.clear() -> None. Remove all items from D."""
|
"""D.clear() -> None.
|
||||||
|
|
||||||
|
Remove all items from D.
|
||||||
|
"""
|
||||||
if not self.loaded:
|
if not self.loaded:
|
||||||
self.load()
|
self.load()
|
||||||
self._data.clear()
|
self._data.clear()
|
||||||
|
@ -492,7 +504,8 @@ class FileSession(Session):
|
||||||
"""Set up the storage system for file-based sessions.
|
"""Set up the storage system for file-based sessions.
|
||||||
|
|
||||||
This should only be called once per process; this will be done
|
This should only be called once per process; this will be done
|
||||||
automatically when using sessions.init (as the built-in Tool does).
|
automatically when using sessions.init (as the built-in Tool
|
||||||
|
does).
|
||||||
"""
|
"""
|
||||||
# The 'storage_path' arg is required for file-based sessions.
|
# The 'storage_path' arg is required for file-based sessions.
|
||||||
kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
|
kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
|
||||||
|
@ -616,7 +629,8 @@ class MemcachedSession(Session):
|
||||||
"""Set up the storage system for memcached-based sessions.
|
"""Set up the storage system for memcached-based sessions.
|
||||||
|
|
||||||
This should only be called once per process; this will be done
|
This should only be called once per process; this will be done
|
||||||
automatically when using sessions.init (as the built-in Tool does).
|
automatically when using sessions.init (as the built-in Tool
|
||||||
|
does).
|
||||||
"""
|
"""
|
||||||
for k, v in kwargs.items():
|
for k, v in kwargs.items():
|
||||||
setattr(cls, k, v)
|
setattr(cls, k, v)
|
||||||
|
|
|
@ -56,15 +56,15 @@ def serve_file(path, content_type=None, disposition=None, name=None,
|
||||||
debug=False):
|
debug=False):
|
||||||
"""Set status, headers, and body in order to serve the given path.
|
"""Set status, headers, and body in order to serve the given path.
|
||||||
|
|
||||||
The Content-Type header will be set to the content_type arg, if provided.
|
The Content-Type header will be set to the content_type arg, if
|
||||||
If not provided, the Content-Type will be guessed by the file extension
|
provided. If not provided, the Content-Type will be guessed by the
|
||||||
of the 'path' argument.
|
file extension of the 'path' argument.
|
||||||
|
|
||||||
If disposition is not None, the Content-Disposition header will be set
|
If disposition is not None, the Content-Disposition header will be
|
||||||
to "<disposition>; filename=<name>; filename*=utf-8''<name>"
|
set to "<disposition>; filename=<name>; filename*=utf-8''<name>" as
|
||||||
as described in :rfc:`6266#appendix-D`.
|
described in :rfc:`6266#appendix-D`. If name is None, it will be set
|
||||||
If name is None, it will be set to the basename of path.
|
to the basename of path. If disposition is None, no Content-
|
||||||
If disposition is None, no Content-Disposition header will be written.
|
Disposition header will be written.
|
||||||
"""
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,6 @@ _module__file__base = os.getcwd()
|
||||||
|
|
||||||
|
|
||||||
class SimplePlugin(object):
|
class SimplePlugin(object):
|
||||||
|
|
||||||
"""Plugin base class which auto-subscribes methods for known channels."""
|
"""Plugin base class which auto-subscribes methods for known channels."""
|
||||||
|
|
||||||
bus = None
|
bus = None
|
||||||
|
@ -59,7 +58,6 @@ class SimplePlugin(object):
|
||||||
|
|
||||||
|
|
||||||
class SignalHandler(object):
|
class SignalHandler(object):
|
||||||
|
|
||||||
"""Register bus channels (and listeners) for system signals.
|
"""Register bus channels (and listeners) for system signals.
|
||||||
|
|
||||||
You can modify what signals your application listens for, and what it does
|
You can modify what signals your application listens for, and what it does
|
||||||
|
@ -171,8 +169,8 @@ class SignalHandler(object):
|
||||||
If the optional 'listener' argument is provided, it will be
|
If the optional 'listener' argument is provided, it will be
|
||||||
subscribed as a listener for the given signal's channel.
|
subscribed as a listener for the given signal's channel.
|
||||||
|
|
||||||
If the given signal name or number is not available on the current
|
If the given signal name or number is not available on the
|
||||||
platform, ValueError is raised.
|
current platform, ValueError is raised.
|
||||||
"""
|
"""
|
||||||
if isinstance(signal, text_or_bytes):
|
if isinstance(signal, text_or_bytes):
|
||||||
signum = getattr(_signal, signal, None)
|
signum = getattr(_signal, signal, None)
|
||||||
|
@ -218,11 +216,10 @@ except ImportError:
|
||||||
|
|
||||||
|
|
||||||
class DropPrivileges(SimplePlugin):
|
class DropPrivileges(SimplePlugin):
|
||||||
|
|
||||||
"""Drop privileges. uid/gid arguments not available on Windows.
|
"""Drop privileges. uid/gid arguments not available on Windows.
|
||||||
|
|
||||||
Special thanks to `Gavin Baker
|
Special thanks to `Gavin Baker
|
||||||
<http://antonym.org/2005/12/dropping-privileges-in-python.html>`_
|
<http://antonym.org/2005/12/dropping-privileges-in-python.html>`_.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, bus, umask=None, uid=None, gid=None):
|
def __init__(self, bus, umask=None, uid=None, gid=None):
|
||||||
|
@ -234,7 +231,10 @@ class DropPrivileges(SimplePlugin):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def uid(self):
|
def uid(self):
|
||||||
"""The uid under which to run. Availability: Unix."""
|
"""The uid under which to run.
|
||||||
|
|
||||||
|
Availability: Unix.
|
||||||
|
"""
|
||||||
return self._uid
|
return self._uid
|
||||||
|
|
||||||
@uid.setter
|
@uid.setter
|
||||||
|
@ -250,7 +250,10 @@ class DropPrivileges(SimplePlugin):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def gid(self):
|
def gid(self):
|
||||||
"""The gid under which to run. Availability: Unix."""
|
"""The gid under which to run.
|
||||||
|
|
||||||
|
Availability: Unix.
|
||||||
|
"""
|
||||||
return self._gid
|
return self._gid
|
||||||
|
|
||||||
@gid.setter
|
@gid.setter
|
||||||
|
@ -332,7 +335,6 @@ class DropPrivileges(SimplePlugin):
|
||||||
|
|
||||||
|
|
||||||
class Daemonizer(SimplePlugin):
|
class Daemonizer(SimplePlugin):
|
||||||
|
|
||||||
"""Daemonize the running script.
|
"""Daemonize the running script.
|
||||||
|
|
||||||
Use this with a Web Site Process Bus via::
|
Use this with a Web Site Process Bus via::
|
||||||
|
@ -423,7 +425,6 @@ class Daemonizer(SimplePlugin):
|
||||||
|
|
||||||
|
|
||||||
class PIDFile(SimplePlugin):
|
class PIDFile(SimplePlugin):
|
||||||
|
|
||||||
"""Maintain a PID file via a WSPBus."""
|
"""Maintain a PID file via a WSPBus."""
|
||||||
|
|
||||||
def __init__(self, bus, pidfile):
|
def __init__(self, bus, pidfile):
|
||||||
|
@ -453,12 +454,11 @@ class PIDFile(SimplePlugin):
|
||||||
|
|
||||||
|
|
||||||
class PerpetualTimer(threading.Timer):
|
class PerpetualTimer(threading.Timer):
|
||||||
|
|
||||||
"""A responsive subclass of threading.Timer whose run() method repeats.
|
"""A responsive subclass of threading.Timer whose run() method repeats.
|
||||||
|
|
||||||
Use this timer only when you really need a very interruptible timer;
|
Use this timer only when you really need a very interruptible timer;
|
||||||
this checks its 'finished' condition up to 20 times a second, which can
|
this checks its 'finished' condition up to 20 times a second, which
|
||||||
results in pretty high CPU usage
|
can results in pretty high CPU usage
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
@ -483,14 +483,14 @@ class PerpetualTimer(threading.Timer):
|
||||||
|
|
||||||
|
|
||||||
class BackgroundTask(threading.Thread):
|
class BackgroundTask(threading.Thread):
|
||||||
|
|
||||||
"""A subclass of threading.Thread whose run() method repeats.
|
"""A subclass of threading.Thread whose run() method repeats.
|
||||||
|
|
||||||
Use this class for most repeating tasks. It uses time.sleep() to wait
|
Use this class for most repeating tasks. It uses time.sleep() to
|
||||||
for each interval, which isn't very responsive; that is, even if you call
|
wait for each interval, which isn't very responsive; that is, even
|
||||||
self.cancel(), you'll have to wait until the sleep() call finishes before
|
if you call self.cancel(), you'll have to wait until the sleep()
|
||||||
the thread stops. To compensate, it defaults to being daemonic, which means
|
call finishes before the thread stops. To compensate, it defaults to
|
||||||
it won't delay stopping the whole process.
|
being daemonic, which means it won't delay stopping the whole
|
||||||
|
process.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, interval, function, args=[], kwargs={}, bus=None):
|
def __init__(self, interval, function, args=[], kwargs={}, bus=None):
|
||||||
|
@ -525,7 +525,6 @@ class BackgroundTask(threading.Thread):
|
||||||
|
|
||||||
|
|
||||||
class Monitor(SimplePlugin):
|
class Monitor(SimplePlugin):
|
||||||
|
|
||||||
"""WSPBus listener to periodically run a callback in its own thread."""
|
"""WSPBus listener to periodically run a callback in its own thread."""
|
||||||
|
|
||||||
callback = None
|
callback = None
|
||||||
|
@ -582,7 +581,6 @@ class Monitor(SimplePlugin):
|
||||||
|
|
||||||
|
|
||||||
class Autoreloader(Monitor):
|
class Autoreloader(Monitor):
|
||||||
|
|
||||||
"""Monitor which re-executes the process when files change.
|
"""Monitor which re-executes the process when files change.
|
||||||
|
|
||||||
This :ref:`plugin<plugins>` restarts the process (via :func:`os.execv`)
|
This :ref:`plugin<plugins>` restarts the process (via :func:`os.execv`)
|
||||||
|
@ -699,20 +697,20 @@ class Autoreloader(Monitor):
|
||||||
|
|
||||||
|
|
||||||
class ThreadManager(SimplePlugin):
|
class ThreadManager(SimplePlugin):
|
||||||
|
|
||||||
"""Manager for HTTP request threads.
|
"""Manager for HTTP request threads.
|
||||||
|
|
||||||
If you have control over thread creation and destruction, publish to
|
If you have control over thread creation and destruction, publish to
|
||||||
the 'acquire_thread' and 'release_thread' channels (for each thread).
|
the 'acquire_thread' and 'release_thread' channels (for each
|
||||||
This will register/unregister the current thread and publish to
|
thread). This will register/unregister the current thread and
|
||||||
'start_thread' and 'stop_thread' listeners in the bus as needed.
|
publish to 'start_thread' and 'stop_thread' listeners in the bus as
|
||||||
|
needed.
|
||||||
|
|
||||||
If threads are created and destroyed by code you do not control
|
If threads are created and destroyed by code you do not control
|
||||||
(e.g., Apache), then, at the beginning of every HTTP request,
|
(e.g., Apache), then, at the beginning of every HTTP request,
|
||||||
publish to 'acquire_thread' only. You should not publish to
|
publish to 'acquire_thread' only. You should not publish to
|
||||||
'release_thread' in this case, since you do not know whether
|
'release_thread' in this case, since you do not know whether the
|
||||||
the thread will be re-used or not. The bus will call
|
thread will be re-used or not. The bus will call 'stop_thread'
|
||||||
'stop_thread' listeners for you when it stops.
|
listeners for you when it stops.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
threads = None
|
threads = None
|
||||||
|
|
|
@ -132,7 +132,6 @@ class Timeouts:
|
||||||
|
|
||||||
|
|
||||||
class ServerAdapter(object):
|
class ServerAdapter(object):
|
||||||
|
|
||||||
"""Adapter for an HTTP server.
|
"""Adapter for an HTTP server.
|
||||||
|
|
||||||
If you need to start more than one HTTP server (to serve on multiple
|
If you need to start more than one HTTP server (to serve on multiple
|
||||||
|
@ -188,9 +187,7 @@ class ServerAdapter(object):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def description(self):
|
def description(self):
|
||||||
"""
|
"""A description about where this server is bound."""
|
||||||
A description about where this server is bound.
|
|
||||||
"""
|
|
||||||
if self.bind_addr is None:
|
if self.bind_addr is None:
|
||||||
on_what = 'unknown interface (dynamic?)'
|
on_what = 'unknown interface (dynamic?)'
|
||||||
elif isinstance(self.bind_addr, tuple):
|
elif isinstance(self.bind_addr, tuple):
|
||||||
|
@ -292,7 +289,6 @@ class ServerAdapter(object):
|
||||||
|
|
||||||
|
|
||||||
class FlupCGIServer(object):
|
class FlupCGIServer(object):
|
||||||
|
|
||||||
"""Adapter for a flup.server.cgi.WSGIServer."""
|
"""Adapter for a flup.server.cgi.WSGIServer."""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
@ -316,7 +312,6 @@ class FlupCGIServer(object):
|
||||||
|
|
||||||
|
|
||||||
class FlupFCGIServer(object):
|
class FlupFCGIServer(object):
|
||||||
|
|
||||||
"""Adapter for a flup.server.fcgi.WSGIServer."""
|
"""Adapter for a flup.server.fcgi.WSGIServer."""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
@ -362,7 +357,6 @@ class FlupFCGIServer(object):
|
||||||
|
|
||||||
|
|
||||||
class FlupSCGIServer(object):
|
class FlupSCGIServer(object):
|
||||||
|
|
||||||
"""Adapter for a flup.server.scgi.WSGIServer."""
|
"""Adapter for a flup.server.scgi.WSGIServer."""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
"""Windows service. Requires pywin32."""
|
"""Windows service.
|
||||||
|
|
||||||
|
Requires pywin32.
|
||||||
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import win32api
|
import win32api
|
||||||
|
@ -11,7 +14,6 @@ from cherrypy.process import wspbus, plugins
|
||||||
|
|
||||||
|
|
||||||
class ConsoleCtrlHandler(plugins.SimplePlugin):
|
class ConsoleCtrlHandler(plugins.SimplePlugin):
|
||||||
|
|
||||||
"""A WSPBus plugin for handling Win32 console events (like Ctrl-C)."""
|
"""A WSPBus plugin for handling Win32 console events (like Ctrl-C)."""
|
||||||
|
|
||||||
def __init__(self, bus):
|
def __init__(self, bus):
|
||||||
|
@ -69,10 +71,10 @@ class ConsoleCtrlHandler(plugins.SimplePlugin):
|
||||||
|
|
||||||
|
|
||||||
class Win32Bus(wspbus.Bus):
|
class Win32Bus(wspbus.Bus):
|
||||||
|
|
||||||
"""A Web Site Process Bus implementation for Win32.
|
"""A Web Site Process Bus implementation for Win32.
|
||||||
|
|
||||||
Instead of time.sleep, this bus blocks using native win32event objects.
|
Instead of time.sleep, this bus blocks using native win32event
|
||||||
|
objects.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -120,7 +122,6 @@ class Win32Bus(wspbus.Bus):
|
||||||
|
|
||||||
|
|
||||||
class _ControlCodes(dict):
|
class _ControlCodes(dict):
|
||||||
|
|
||||||
"""Control codes used to "signal" a service via ControlService.
|
"""Control codes used to "signal" a service via ControlService.
|
||||||
|
|
||||||
User-defined control codes are in the range 128-255. We generally use
|
User-defined control codes are in the range 128-255. We generally use
|
||||||
|
@ -152,7 +153,6 @@ def signal_child(service, command):
|
||||||
|
|
||||||
|
|
||||||
class PyWebService(win32serviceutil.ServiceFramework):
|
class PyWebService(win32serviceutil.ServiceFramework):
|
||||||
|
|
||||||
"""Python Web Service."""
|
"""Python Web Service."""
|
||||||
|
|
||||||
_svc_name_ = 'Python Web Service'
|
_svc_name_ = 'Python Web Service'
|
||||||
|
|
|
@ -57,7 +57,6 @@ the new state.::
|
||||||
| \ |
|
| \ |
|
||||||
| V V
|
| V V
|
||||||
STARTED <-- STARTING
|
STARTED <-- STARTING
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import atexit
|
import atexit
|
||||||
|
@ -65,7 +64,7 @@ import atexit
|
||||||
try:
|
try:
|
||||||
import ctypes
|
import ctypes
|
||||||
except ImportError:
|
except ImportError:
|
||||||
"""Google AppEngine is shipped without ctypes
|
"""Google AppEngine is shipped without ctypes.
|
||||||
|
|
||||||
:seealso: http://stackoverflow.com/a/6523777/70170
|
:seealso: http://stackoverflow.com/a/6523777/70170
|
||||||
"""
|
"""
|
||||||
|
@ -165,8 +164,8 @@ class Bus(object):
|
||||||
All listeners for a given channel are guaranteed to be called even
|
All listeners for a given channel are guaranteed to be called even
|
||||||
if others at the same channel fail. Each failure is logged, but
|
if others at the same channel fail. Each failure is logged, but
|
||||||
execution proceeds on to the next listener. The only way to stop all
|
execution proceeds on to the next listener. The only way to stop all
|
||||||
processing from inside a listener is to raise SystemExit and stop the
|
processing from inside a listener is to raise SystemExit and stop
|
||||||
whole server.
|
the whole server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
states = states
|
states = states
|
||||||
|
@ -312,8 +311,9 @@ class Bus(object):
|
||||||
def restart(self):
|
def restart(self):
|
||||||
"""Restart the process (may close connections).
|
"""Restart the process (may close connections).
|
||||||
|
|
||||||
This method does not restart the process from the calling thread;
|
This method does not restart the process from the calling
|
||||||
instead, it stops the bus and asks the main thread to call execv.
|
thread; instead, it stops the bus and asks the main thread to
|
||||||
|
call execv.
|
||||||
"""
|
"""
|
||||||
self.execv = True
|
self.execv = True
|
||||||
self.exit()
|
self.exit()
|
||||||
|
@ -327,10 +327,11 @@ class Bus(object):
|
||||||
"""Wait for the EXITING state, KeyboardInterrupt or SystemExit.
|
"""Wait for the EXITING state, KeyboardInterrupt or SystemExit.
|
||||||
|
|
||||||
This function is intended to be called only by the main thread.
|
This function is intended to be called only by the main thread.
|
||||||
After waiting for the EXITING state, it also waits for all threads
|
After waiting for the EXITING state, it also waits for all
|
||||||
to terminate, and then calls os.execv if self.execv is True. This
|
threads to terminate, and then calls os.execv if self.execv is
|
||||||
design allows another thread to call bus.restart, yet have the main
|
True. This design allows another thread to call bus.restart, yet
|
||||||
thread perform the actual execv call (required on some platforms).
|
have the main thread perform the actual execv call (required on
|
||||||
|
some platforms).
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
self.wait(states.EXITING, interval=interval, channel='main')
|
self.wait(states.EXITING, interval=interval, channel='main')
|
||||||
|
@ -379,13 +380,14 @@ class Bus(object):
|
||||||
def _do_execv(self):
|
def _do_execv(self):
|
||||||
"""Re-execute the current process.
|
"""Re-execute the current process.
|
||||||
|
|
||||||
This must be called from the main thread, because certain platforms
|
This must be called from the main thread, because certain
|
||||||
(OS X) don't allow execv to be called in a child thread very well.
|
platforms (OS X) don't allow execv to be called in a child
|
||||||
|
thread very well.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
args = self._get_true_argv()
|
args = self._get_true_argv()
|
||||||
except NotImplementedError:
|
except NotImplementedError:
|
||||||
"""It's probably win32 or GAE"""
|
"""It's probably win32 or GAE."""
|
||||||
args = [sys.executable] + self._get_interpreter_argv() + sys.argv
|
args = [sys.executable] + self._get_interpreter_argv() + sys.argv
|
||||||
|
|
||||||
self.log('Re-spawning %s' % ' '.join(args))
|
self.log('Re-spawning %s' % ' '.join(args))
|
||||||
|
@ -472,7 +474,7 @@ class Bus(object):
|
||||||
c_ind = None
|
c_ind = None
|
||||||
|
|
||||||
if is_module:
|
if is_module:
|
||||||
"""It's containing `-m -m` sequence of arguments"""
|
"""It's containing `-m -m` sequence of arguments."""
|
||||||
if is_command and c_ind < m_ind:
|
if is_command and c_ind < m_ind:
|
||||||
"""There's `-c -c` before `-m`"""
|
"""There's `-c -c` before `-m`"""
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
|
@ -481,7 +483,7 @@ class Bus(object):
|
||||||
# Survive module argument here
|
# Survive module argument here
|
||||||
original_module = sys.argv[0]
|
original_module = sys.argv[0]
|
||||||
if not os.access(original_module, os.R_OK):
|
if not os.access(original_module, os.R_OK):
|
||||||
"""There's no such module exist"""
|
"""There's no such module exist."""
|
||||||
raise AttributeError(
|
raise AttributeError(
|
||||||
"{} doesn't seem to be a module "
|
"{} doesn't seem to be a module "
|
||||||
'accessible by current user'.format(original_module))
|
'accessible by current user'.format(original_module))
|
||||||
|
@ -489,7 +491,7 @@ class Bus(object):
|
||||||
# ... and substitute it with the original module path:
|
# ... and substitute it with the original module path:
|
||||||
_argv.insert(m_ind, original_module)
|
_argv.insert(m_ind, original_module)
|
||||||
elif is_command:
|
elif is_command:
|
||||||
"""It's containing just `-c -c` sequence of arguments"""
|
"""It's containing just `-c -c` sequence of arguments."""
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Cannot reconstruct command from '-c'. "
|
"Cannot reconstruct command from '-c'. "
|
||||||
'Ref: https://github.com/cherrypy/cherrypy/issues/1545')
|
'Ref: https://github.com/cherrypy/cherrypy/issues/1545')
|
||||||
|
@ -512,13 +514,13 @@ class Bus(object):
|
||||||
"""Prepend current working dir to PATH environment variable if needed.
|
"""Prepend current working dir to PATH environment variable if needed.
|
||||||
|
|
||||||
If sys.path[0] is an empty string, the interpreter was likely
|
If sys.path[0] is an empty string, the interpreter was likely
|
||||||
invoked with -m and the effective path is about to change on
|
invoked with -m and the effective path is about to change on re-
|
||||||
re-exec. Add the current directory to $PYTHONPATH to ensure
|
exec. Add the current directory to $PYTHONPATH to ensure that
|
||||||
that the new process sees the same path.
|
the new process sees the same path.
|
||||||
|
|
||||||
This issue cannot be addressed in the general case because
|
This issue cannot be addressed in the general case because
|
||||||
Python cannot reliably reconstruct the
|
Python cannot reliably reconstruct the original command line (
|
||||||
original command line (http://bugs.python.org/issue14208).
|
http://bugs.python.org/issue14208).
|
||||||
|
|
||||||
(This idea filched from tornado.autoreload)
|
(This idea filched from tornado.autoreload)
|
||||||
"""
|
"""
|
||||||
|
@ -536,10 +538,10 @@ class Bus(object):
|
||||||
"""Set the CLOEXEC flag on all open files (except stdin/out/err).
|
"""Set the CLOEXEC flag on all open files (except stdin/out/err).
|
||||||
|
|
||||||
If self.max_cloexec_files is an integer (the default), then on
|
If self.max_cloexec_files is an integer (the default), then on
|
||||||
platforms which support it, it represents the max open files setting
|
platforms which support it, it represents the max open files
|
||||||
for the operating system. This function will be called just before
|
setting for the operating system. This function will be called
|
||||||
the process is restarted via os.execv() to prevent open files
|
just before the process is restarted via os.execv() to prevent
|
||||||
from persisting into the new process.
|
open files from persisting into the new process.
|
||||||
|
|
||||||
Set self.max_cloexec_files to 0 to disable this behavior.
|
Set self.max_cloexec_files to 0 to disable this behavior.
|
||||||
"""
|
"""
|
||||||
|
@ -578,7 +580,10 @@ class Bus(object):
|
||||||
return t
|
return t
|
||||||
|
|
||||||
def log(self, msg='', level=20, traceback=False):
|
def log(self, msg='', level=20, traceback=False):
|
||||||
"""Log the given message. Append the last traceback if requested."""
|
"""Log the given message.
|
||||||
|
|
||||||
|
Append the last traceback if requested.
|
||||||
|
"""
|
||||||
if traceback:
|
if traceback:
|
||||||
msg += '\n' + ''.join(_traceback.format_exception(*sys.exc_info()))
|
msg += '\n' + ''.join(_traceback.format_exception(*sys.exc_info()))
|
||||||
self.publish('log', msg, level)
|
self.publish('log', msg, level)
|
||||||
|
|
|
@ -9,7 +9,6 @@ Even before any tweaking, this should serve a few demonstration pages.
|
||||||
Change to this directory and run:
|
Change to this directory and run:
|
||||||
|
|
||||||
cherryd -c site.conf
|
cherryd -c site.conf
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import cherrypy
|
import cherrypy
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
"""
|
"""Regression test suite for CherryPy."""
|
||||||
Regression test suite for CherryPy.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
"""Test module for the @-decorator syntax, which is version-specific"""
|
"""Test module for the @-decorator syntax, which is version-specific."""
|
||||||
|
|
||||||
import cherrypy
|
import cherrypy
|
||||||
from cherrypy import expose, tools
|
from cherrypy import expose, tools
|
||||||
|
|
|
@ -1,24 +1,24 @@
|
||||||
"""CherryPy Benchmark Tool
|
"""CherryPy Benchmark Tool.
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
benchmark.py [options]
|
benchmark.py [options]
|
||||||
|
|
||||||
--null: use a null Request object (to bench the HTTP server only)
|
--null: use a null Request object (to bench the HTTP server only)
|
||||||
--notests: start the server but do not run the tests; this allows
|
--notests: start the server but do not run the tests; this allows
|
||||||
you to check the tested pages with a browser
|
you to check the tested pages with a browser
|
||||||
--help: show this help message
|
--help: show this help message
|
||||||
--cpmodpy: run tests via apache on 54583 (with the builtin _cpmodpy)
|
--cpmodpy: run tests via apache on 54583 (with the builtin _cpmodpy)
|
||||||
--modpython: run tests via apache on 54583 (with modpython_gateway)
|
--modpython: run tests via apache on 54583 (with modpython_gateway)
|
||||||
--ab=path: Use the ab script/executable at 'path' (see below)
|
--ab=path: Use the ab script/executable at 'path' (see below)
|
||||||
--apache=path: Use the apache script/exe at 'path' (see below)
|
--apache=path: Use the apache script/exe at 'path' (see below)
|
||||||
|
|
||||||
To run the benchmarks, the Apache Benchmark tool "ab" must either be on
|
To run the benchmarks, the Apache Benchmark tool "ab" must either be on
|
||||||
your system path, or specified via the --ab=path option.
|
your system path, or specified via the --ab=path option.
|
||||||
|
|
||||||
To run the modpython tests, the "apache" executable or script must be
|
To run the modpython tests, the "apache" executable or script must be
|
||||||
on your system path, or provided via the --apache=path option. On some
|
on your system path, or provided via the --apache=path option. On some
|
||||||
platforms, "apache" may be called "apachectl" or "apache2ctl"--create
|
platforms, "apache" may be called "apachectl" or "apache2ctl"--create
|
||||||
a symlink to them if needed.
|
a symlink to them if needed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import getopt
|
import getopt
|
||||||
|
@ -106,7 +106,6 @@ def init():
|
||||||
|
|
||||||
|
|
||||||
class NullRequest:
|
class NullRequest:
|
||||||
|
|
||||||
"""A null HTTP request class, returning 200 and an empty body."""
|
"""A null HTTP request class, returning 200 and an empty body."""
|
||||||
|
|
||||||
def __init__(self, local, remote, scheme='http'):
|
def __init__(self, local, remote, scheme='http'):
|
||||||
|
@ -131,65 +130,66 @@ class NullResponse:
|
||||||
|
|
||||||
|
|
||||||
class ABSession:
|
class ABSession:
|
||||||
|
|
||||||
"""A session of 'ab', the Apache HTTP server benchmarking tool.
|
"""A session of 'ab', the Apache HTTP server benchmarking tool.
|
||||||
|
|
||||||
Example output from ab:
|
Example output from ab:
|
||||||
|
|
||||||
This is ApacheBench, Version 2.0.40-dev <$Revision: 1.121.2.1 $> apache-2.0
|
This is ApacheBench, Version 2.0.40-dev <$Revision: 1.121.2.1 $> apache-2.0
|
||||||
Copyright (c) 1996 Adam Twiss, Zeus Technology Ltd, http://www.zeustech.net/
|
Copyright (c) 1996 Adam Twiss, Zeus Technology Ltd,
|
||||||
Copyright (c) 1998-2002 The Apache Software Foundation, http://www.apache.org/
|
http://www.zeustech.net/
|
||||||
|
Copyright (c) 1998-2002 The Apache Software Foundation,
|
||||||
|
http://www.apache.org/
|
||||||
|
|
||||||
Benchmarking 127.0.0.1 (be patient)
|
Benchmarking 127.0.0.1 (be patient)
|
||||||
Completed 100 requests
|
Completed 100 requests
|
||||||
Completed 200 requests
|
Completed 200 requests
|
||||||
Completed 300 requests
|
Completed 300 requests
|
||||||
Completed 400 requests
|
Completed 400 requests
|
||||||
Completed 500 requests
|
Completed 500 requests
|
||||||
Completed 600 requests
|
Completed 600 requests
|
||||||
Completed 700 requests
|
Completed 700 requests
|
||||||
Completed 800 requests
|
Completed 800 requests
|
||||||
Completed 900 requests
|
Completed 900 requests
|
||||||
|
|
||||||
|
|
||||||
Server Software: CherryPy/3.1beta
|
Server Software: CherryPy/3.1beta
|
||||||
Server Hostname: 127.0.0.1
|
Server Hostname: 127.0.0.1
|
||||||
Server Port: 54583
|
Server Port: 54583
|
||||||
|
|
||||||
Document Path: /static/index.html
|
Document Path: /static/index.html
|
||||||
Document Length: 14 bytes
|
Document Length: 14 bytes
|
||||||
|
|
||||||
Concurrency Level: 10
|
Concurrency Level: 10
|
||||||
Time taken for tests: 9.643867 seconds
|
Time taken for tests: 9.643867 seconds
|
||||||
Complete requests: 1000
|
Complete requests: 1000
|
||||||
Failed requests: 0
|
Failed requests: 0
|
||||||
Write errors: 0
|
Write errors: 0
|
||||||
Total transferred: 189000 bytes
|
Total transferred: 189000 bytes
|
||||||
HTML transferred: 14000 bytes
|
HTML transferred: 14000 bytes
|
||||||
Requests per second: 103.69 [#/sec] (mean)
|
Requests per second: 103.69 [#/sec] (mean)
|
||||||
Time per request: 96.439 [ms] (mean)
|
Time per request: 96.439 [ms] (mean)
|
||||||
Time per request: 9.644 [ms] (mean, across all concurrent requests)
|
Time per request: 9.644 [ms] (mean, across all concurrent requests)
|
||||||
Transfer rate: 19.08 [Kbytes/sec] received
|
Transfer rate: 19.08 [Kbytes/sec] received
|
||||||
|
|
||||||
Connection Times (ms)
|
Connection Times (ms)
|
||||||
min mean[+/-sd] median max
|
min mean[+/-sd] median max
|
||||||
Connect: 0 0 2.9 0 10
|
Connect: 0 0 2.9 0 10
|
||||||
Processing: 20 94 7.3 90 130
|
Processing: 20 94 7.3 90 130
|
||||||
Waiting: 0 43 28.1 40 100
|
Waiting: 0 43 28.1 40 100
|
||||||
Total: 20 95 7.3 100 130
|
Total: 20 95 7.3 100 130
|
||||||
|
|
||||||
Percentage of the requests served within a certain time (ms)
|
Percentage of the requests served within a certain time (ms)
|
||||||
50% 100
|
50% 100
|
||||||
66% 100
|
66% 100
|
||||||
75% 100
|
75% 100
|
||||||
80% 100
|
80% 100
|
||||||
90% 100
|
90% 100
|
||||||
95% 100
|
95% 100
|
||||||
98% 100
|
98% 100
|
||||||
99% 110
|
99% 110
|
||||||
100% 130 (longest request)
|
100% 130 (longest request)
|
||||||
Finished 1000 requests
|
Finished 1000 requests
|
||||||
"""
|
"""
|
||||||
|
|
||||||
parse_patterns = [
|
parse_patterns = [
|
||||||
('complete_requests', 'Completed',
|
('complete_requests', 'Completed',
|
||||||
|
@ -403,7 +403,6 @@ if __name__ == '__main__':
|
||||||
print('Starting CherryPy app server...')
|
print('Starting CherryPy app server...')
|
||||||
|
|
||||||
class NullWriter(object):
|
class NullWriter(object):
|
||||||
|
|
||||||
"""Suppresses the printing of socket errors."""
|
"""Suppresses the printing of socket errors."""
|
||||||
|
|
||||||
def write(self, data):
|
def write(self, data):
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
"""Demonstration app for cherrypy.checker.
|
"""Demonstration app for cherrypy.checker.
|
||||||
|
|
||||||
This application is intentionally broken and badly designed.
|
This application is intentionally broken and badly designed. To
|
||||||
To demonstrate the output of the CherryPy Checker, simply execute
|
demonstrate the output of the CherryPy Checker, simply execute this
|
||||||
this module.
|
module.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
|
@ -28,7 +28,6 @@ serverpem = os.path.join(os.getcwd(), thisdir, 'test.pem')
|
||||||
|
|
||||||
|
|
||||||
class Supervisor(object):
|
class Supervisor(object):
|
||||||
|
|
||||||
"""Base class for modeling and controlling servers during testing."""
|
"""Base class for modeling and controlling servers during testing."""
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
|
@ -43,14 +42,13 @@ def log_to_stderr(msg, level):
|
||||||
|
|
||||||
|
|
||||||
class LocalSupervisor(Supervisor):
|
class LocalSupervisor(Supervisor):
|
||||||
|
|
||||||
"""Base class for modeling/controlling servers which run in the same
|
"""Base class for modeling/controlling servers which run in the same
|
||||||
process.
|
process.
|
||||||
|
|
||||||
When the server side runs in a different process, start/stop can dump all
|
When the server side runs in a different process, start/stop can
|
||||||
state between each test module easily. When the server side runs in the
|
dump all state between each test module easily. When the server side
|
||||||
same process as the client, however, we have to do a bit more work to
|
runs in the same process as the client, however, we have to do a bit
|
||||||
ensure config and mounted apps are reset between tests.
|
more work to ensure config and mounted apps are reset between tests.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
using_apache = False
|
using_apache = False
|
||||||
|
@ -99,7 +97,6 @@ class LocalSupervisor(Supervisor):
|
||||||
|
|
||||||
|
|
||||||
class NativeServerSupervisor(LocalSupervisor):
|
class NativeServerSupervisor(LocalSupervisor):
|
||||||
|
|
||||||
"""Server supervisor for the builtin HTTP server."""
|
"""Server supervisor for the builtin HTTP server."""
|
||||||
|
|
||||||
httpserver_class = 'cherrypy._cpnative_server.CPHTTPServer'
|
httpserver_class = 'cherrypy._cpnative_server.CPHTTPServer'
|
||||||
|
@ -111,7 +108,6 @@ class NativeServerSupervisor(LocalSupervisor):
|
||||||
|
|
||||||
|
|
||||||
class LocalWSGISupervisor(LocalSupervisor):
|
class LocalWSGISupervisor(LocalSupervisor):
|
||||||
|
|
||||||
"""Server supervisor for the builtin WSGI server."""
|
"""Server supervisor for the builtin WSGI server."""
|
||||||
|
|
||||||
httpserver_class = 'cherrypy._cpwsgi_server.CPWSGIServer'
|
httpserver_class = 'cherrypy._cpwsgi_server.CPWSGIServer'
|
||||||
|
@ -311,8 +307,7 @@ class CPWebCase(webtest.WebCase):
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
def getPage(self, url, *args, **kwargs):
|
def getPage(self, url, *args, **kwargs):
|
||||||
"""Open the url.
|
"""Open the url."""
|
||||||
"""
|
|
||||||
if self.script_name:
|
if self.script_name:
|
||||||
url = httputil.urljoin(self.script_name, url)
|
url = httputil.urljoin(self.script_name, url)
|
||||||
return webtest.WebCase.getPage(self, url, *args, **kwargs)
|
return webtest.WebCase.getPage(self, url, *args, **kwargs)
|
||||||
|
@ -323,8 +318,9 @@ class CPWebCase(webtest.WebCase):
|
||||||
def assertErrorPage(self, status, message=None, pattern=''):
|
def assertErrorPage(self, status, message=None, pattern=''):
|
||||||
"""Compare the response body with a built in error page.
|
"""Compare the response body with a built in error page.
|
||||||
|
|
||||||
The function will optionally look for the regexp pattern,
|
The function will optionally look for the regexp pattern, within
|
||||||
within the exception embedded in the error page."""
|
the exception embedded in the error page.
|
||||||
|
"""
|
||||||
|
|
||||||
# This will never contain a traceback
|
# This will never contain a traceback
|
||||||
page = cherrypy._cperror.get_error_page(status, message=message)
|
page = cherrypy._cperror.get_error_page(status, message=message)
|
||||||
|
@ -453,19 +449,17 @@ server.ssl_private_key: r'%s'
|
||||||
'-c', self.config_file,
|
'-c', self.config_file,
|
||||||
'-p', self.pid_file,
|
'-p', self.pid_file,
|
||||||
]
|
]
|
||||||
r"""
|
r"""Command for running cherryd server with autoreload enabled.
|
||||||
Command for running cherryd server with autoreload enabled
|
|
||||||
|
|
||||||
Using
|
Using
|
||||||
|
|
||||||
```
|
```
|
||||||
['-c',
|
['-c',
|
||||||
"__requires__ = 'CherryPy'; \
|
"__requires__ = 'CherryPy'; \
|
||||||
import pkg_resources, re, sys; \
|
import importlib.metadata, re, sys; \
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]); \
|
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]); \
|
||||||
sys.exit(\
|
sys.exit(\
|
||||||
pkg_resources.load_entry_point(\
|
importlib.metadata.distribution('cherrypy').entry_points[0])"]
|
||||||
'CherryPy', 'console_scripts', 'cherryd')())"]
|
|
||||||
```
|
```
|
||||||
|
|
||||||
doesn't work as it's impossible to reconstruct the `-c`'s contents.
|
doesn't work as it's impossible to reconstruct the `-c`'s contents.
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
"""logtest, a unittest.TestCase helper for testing log output."""
|
"""Logtest, a unittest.TestCase helper for testing log output."""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
@ -32,7 +32,6 @@ except ImportError:
|
||||||
|
|
||||||
|
|
||||||
class LogCase(object):
|
class LogCase(object):
|
||||||
|
|
||||||
"""unittest.TestCase mixin for testing log messages.
|
"""unittest.TestCase mixin for testing log messages.
|
||||||
|
|
||||||
logfile: a filename for the desired log. Yes, I know modes are evil,
|
logfile: a filename for the desired log. Yes, I know modes are evil,
|
||||||
|
@ -116,7 +115,8 @@ class LogCase(object):
|
||||||
"""Return lines from self.logfile in the marked region.
|
"""Return lines from self.logfile in the marked region.
|
||||||
|
|
||||||
If marker is None, self.lastmarker is used. If the log hasn't
|
If marker is None, self.lastmarker is used. If the log hasn't
|
||||||
been marked (using self.markLog), the entire log will be returned.
|
been marked (using self.markLog), the entire log will be
|
||||||
|
returned.
|
||||||
"""
|
"""
|
||||||
# Give the logger time to finish writing?
|
# Give the logger time to finish writing?
|
||||||
# time.sleep(0.5)
|
# time.sleep(0.5)
|
||||||
|
@ -146,9 +146,10 @@ class LogCase(object):
|
||||||
def assertInLog(self, line, marker=None):
|
def assertInLog(self, line, marker=None):
|
||||||
"""Fail if the given (partial) line is not in the log.
|
"""Fail if the given (partial) line is not in the log.
|
||||||
|
|
||||||
The log will be searched from the given marker to the next marker.
|
The log will be searched from the given marker to the next
|
||||||
If marker is None, self.lastmarker is used. If the log hasn't
|
marker. If marker is None, self.lastmarker is used. If the log
|
||||||
been marked (using self.markLog), the entire log will be searched.
|
hasn't been marked (using self.markLog), the entire log will be
|
||||||
|
searched.
|
||||||
"""
|
"""
|
||||||
data = self._read_marked_region(marker)
|
data = self._read_marked_region(marker)
|
||||||
for logline in data:
|
for logline in data:
|
||||||
|
@ -160,9 +161,10 @@ class LogCase(object):
|
||||||
def assertNotInLog(self, line, marker=None):
|
def assertNotInLog(self, line, marker=None):
|
||||||
"""Fail if the given (partial) line is in the log.
|
"""Fail if the given (partial) line is in the log.
|
||||||
|
|
||||||
The log will be searched from the given marker to the next marker.
|
The log will be searched from the given marker to the next
|
||||||
If marker is None, self.lastmarker is used. If the log hasn't
|
marker. If marker is None, self.lastmarker is used. If the log
|
||||||
been marked (using self.markLog), the entire log will be searched.
|
hasn't been marked (using self.markLog), the entire log will be
|
||||||
|
searched.
|
||||||
"""
|
"""
|
||||||
data = self._read_marked_region(marker)
|
data = self._read_marked_region(marker)
|
||||||
for logline in data:
|
for logline in data:
|
||||||
|
@ -173,9 +175,10 @@ class LogCase(object):
|
||||||
def assertValidUUIDv4(self, marker=None):
|
def assertValidUUIDv4(self, marker=None):
|
||||||
"""Fail if the given UUIDv4 is not valid.
|
"""Fail if the given UUIDv4 is not valid.
|
||||||
|
|
||||||
The log will be searched from the given marker to the next marker.
|
The log will be searched from the given marker to the next
|
||||||
If marker is None, self.lastmarker is used. If the log hasn't
|
marker. If marker is None, self.lastmarker is used. If the log
|
||||||
been marked (using self.markLog), the entire log will be searched.
|
hasn't been marked (using self.markLog), the entire log will be
|
||||||
|
searched.
|
||||||
"""
|
"""
|
||||||
data = self._read_marked_region(marker)
|
data = self._read_marked_region(marker)
|
||||||
data = [
|
data = [
|
||||||
|
@ -200,9 +203,10 @@ class LogCase(object):
|
||||||
def assertLog(self, sliceargs, lines, marker=None):
|
def assertLog(self, sliceargs, lines, marker=None):
|
||||||
"""Fail if log.readlines()[sliceargs] is not contained in 'lines'.
|
"""Fail if log.readlines()[sliceargs] is not contained in 'lines'.
|
||||||
|
|
||||||
The log will be searched from the given marker to the next marker.
|
The log will be searched from the given marker to the next
|
||||||
If marker is None, self.lastmarker is used. If the log hasn't
|
marker. If marker is None, self.lastmarker is used. If the log
|
||||||
been marked (using self.markLog), the entire log will be searched.
|
hasn't been marked (using self.markLog), the entire log will be
|
||||||
|
searched.
|
||||||
"""
|
"""
|
||||||
data = self._read_marked_region(marker)
|
data = self._read_marked_region(marker)
|
||||||
if isinstance(sliceargs, int):
|
if isinstance(sliceargs, int):
|
||||||
|
|
|
@ -94,7 +94,6 @@ SetEnv testmod %(testmod)s
|
||||||
|
|
||||||
|
|
||||||
class ModWSGISupervisor(helper.Supervisor):
|
class ModWSGISupervisor(helper.Supervisor):
|
||||||
|
|
||||||
"""Server Controller for ModWSGI and CherryPy."""
|
"""Server Controller for ModWSGI and CherryPy."""
|
||||||
|
|
||||||
using_apache = True
|
using_apache = True
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
import calendar
|
import calendar
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from datetime import timezone as _timezone
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import cherrypy
|
import cherrypy
|
||||||
|
@ -123,9 +124,12 @@ class Root(object):
|
||||||
'reqcookie': cherrypy.request.cookie.output(),
|
'reqcookie': cherrypy.request.cookie.output(),
|
||||||
'sessiondata': list(cherrypy.session.items()),
|
'sessiondata': list(cherrypy.session.items()),
|
||||||
'servertime': (
|
'servertime': (
|
||||||
datetime.utcnow().strftime('%Y/%m/%d %H:%M') + ' UTC'
|
datetime.now(_timezone.utc).strftime('%Y/%m/%d %H:%M UTC')
|
||||||
|
),
|
||||||
|
'serverunixtime':
|
||||||
|
calendar.timegm(
|
||||||
|
datetime.utcnow(_timezone.utc).timetuple(),
|
||||||
),
|
),
|
||||||
'serverunixtime': calendar.timegm(datetime.utcnow().timetuple()),
|
|
||||||
'cpversion': cherrypy.__version__,
|
'cpversion': cherrypy.__version__,
|
||||||
'pyversion': sys.version,
|
'pyversion': sys.version,
|
||||||
'expires': expires,
|
'expires': expires,
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
"""Basic tests for the CherryPy core: request handling."""
|
"""Basic tests for the CherryPy core: request handling."""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
@ -48,7 +47,6 @@ class CoreRequestHandlingTest(helper.CPWebCase):
|
||||||
root.expose_dec = ExposeExamples()
|
root.expose_dec = ExposeExamples()
|
||||||
|
|
||||||
class TestType(type):
|
class TestType(type):
|
||||||
|
|
||||||
"""Metaclass which automatically exposes all functions in each
|
"""Metaclass which automatically exposes all functions in each
|
||||||
subclass, and adds an instance of the subclass as an attribute
|
subclass, and adds an instance of the subclass as an attribute
|
||||||
of root.
|
of root.
|
||||||
|
|
|
@ -97,9 +97,7 @@ def setup_server():
|
||||||
class UserContainerNode(object):
|
class UserContainerNode(object):
|
||||||
|
|
||||||
def POST(self, name):
|
def POST(self, name):
|
||||||
"""
|
"""Allow the creation of a new Object."""
|
||||||
Allow the creation of a new Object
|
|
||||||
"""
|
|
||||||
return 'POST %d' % make_user(name)
|
return 'POST %d' % make_user(name)
|
||||||
|
|
||||||
def GET(self):
|
def GET(self):
|
||||||
|
@ -125,15 +123,11 @@ def setup_server():
|
||||||
raise cherrypy.HTTPError(404)
|
raise cherrypy.HTTPError(404)
|
||||||
|
|
||||||
def GET(self, *args, **kwargs):
|
def GET(self, *args, **kwargs):
|
||||||
"""
|
"""Return the appropriate representation of the instance."""
|
||||||
Return the appropriate representation of the instance.
|
|
||||||
"""
|
|
||||||
return str(self.user)
|
return str(self.user)
|
||||||
|
|
||||||
def POST(self, name):
|
def POST(self, name):
|
||||||
"""
|
"""Update the fields of the user instance."""
|
||||||
Update the fields of the user instance.
|
|
||||||
"""
|
|
||||||
self.user.name = name
|
self.user.name = name
|
||||||
return 'POST %d' % self.user.id
|
return 'POST %d' % self.user.id
|
||||||
|
|
||||||
|
@ -151,9 +145,7 @@ def setup_server():
|
||||||
return 'PUT %d' % make_user(name, self.id)
|
return 'PUT %d' % make_user(name, self.id)
|
||||||
|
|
||||||
def DELETE(self):
|
def DELETE(self):
|
||||||
"""
|
"""Delete the user specified at the id."""
|
||||||
Delete the user specified at the id.
|
|
||||||
"""
|
|
||||||
id = self.user.id
|
id = self.user.id
|
||||||
del user_lookup[self.user.id]
|
del user_lookup[self.user.id]
|
||||||
del self.user
|
del self.user
|
||||||
|
@ -199,7 +191,6 @@ def setup_server():
|
||||||
return 'IndexOnly index'
|
return 'IndexOnly index'
|
||||||
|
|
||||||
class DecoratedPopArgs:
|
class DecoratedPopArgs:
|
||||||
|
|
||||||
"""Test _cp_dispatch with @cherrypy.popargs."""
|
"""Test _cp_dispatch with @cherrypy.popargs."""
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
|
@ -213,7 +204,6 @@ def setup_server():
|
||||||
'a', 'b', handler=ABHandler())(DecoratedPopArgs)
|
'a', 'b', handler=ABHandler())(DecoratedPopArgs)
|
||||||
|
|
||||||
class NonDecoratedPopArgs:
|
class NonDecoratedPopArgs:
|
||||||
|
|
||||||
"""Test _cp_dispatch = cherrypy.popargs()"""
|
"""Test _cp_dispatch = cherrypy.popargs()"""
|
||||||
|
|
||||||
_cp_dispatch = cherrypy.popargs('a')
|
_cp_dispatch = cherrypy.popargs('a')
|
||||||
|
@ -223,8 +213,7 @@ def setup_server():
|
||||||
return 'index: ' + str(a)
|
return 'index: ' + str(a)
|
||||||
|
|
||||||
class ParameterizedHandler:
|
class ParameterizedHandler:
|
||||||
|
"""Special handler created for each request."""
|
||||||
"""Special handler created for each request"""
|
|
||||||
|
|
||||||
def __init__(self, a):
|
def __init__(self, a):
|
||||||
self.a = a
|
self.a = a
|
||||||
|
@ -238,8 +227,7 @@ def setup_server():
|
||||||
return self.a
|
return self.a
|
||||||
|
|
||||||
class ParameterizedPopArgs:
|
class ParameterizedPopArgs:
|
||||||
|
"""Test cherrypy.popargs() with a function call handler."""
|
||||||
"""Test cherrypy.popargs() with a function call handler"""
|
|
||||||
ParameterizedPopArgs = cherrypy.popargs(
|
ParameterizedPopArgs = cherrypy.popargs(
|
||||||
'a', handler=ParameterizedHandler)(ParameterizedPopArgs)
|
'a', handler=ParameterizedHandler)(ParameterizedPopArgs)
|
||||||
|
|
||||||
|
|
|
@ -16,9 +16,7 @@ from cherrypy.test import helper
|
||||||
|
|
||||||
|
|
||||||
def is_ascii(text):
|
def is_ascii(text):
|
||||||
"""
|
"""Return True if the text encodes as ascii."""
|
||||||
Return True if the text encodes as ascii.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
text.encode('ascii')
|
text.encode('ascii')
|
||||||
return True
|
return True
|
||||||
|
@ -28,9 +26,9 @@ def is_ascii(text):
|
||||||
|
|
||||||
|
|
||||||
def encode_filename(filename):
|
def encode_filename(filename):
|
||||||
"""
|
"""Given a filename to be used in a multipart/form-data, encode the name.
|
||||||
Given a filename to be used in a multipart/form-data,
|
|
||||||
encode the name. Return the key and encoded filename.
|
Return the key and encoded filename.
|
||||||
"""
|
"""
|
||||||
if is_ascii(filename):
|
if is_ascii(filename):
|
||||||
return 'filename', '"{filename}"'.format(**locals())
|
return 'filename', '"{filename}"'.format(**locals())
|
||||||
|
@ -114,7 +112,7 @@ class HTTPTests(helper.CPWebCase):
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def post_filename(self, myfile):
|
def post_filename(self, myfile):
|
||||||
'''Return the name of the file which was uploaded.'''
|
"""Return the name of the file which was uploaded."""
|
||||||
return myfile.filename
|
return myfile.filename
|
||||||
|
|
||||||
cherrypy.tree.mount(Root())
|
cherrypy.tree.mount(Root())
|
||||||
|
|
|
@ -199,7 +199,7 @@ def test_custom_log_format(log_tracker, monkeypatch, server):
|
||||||
|
|
||||||
|
|
||||||
def test_utc_in_timez(monkeypatch):
|
def test_utc_in_timez(monkeypatch):
|
||||||
"""Test that ``LazyRfc3339UtcTime`` is rendered as ``str`` using UTC timestamp."""
|
"""Test ``LazyRfc3339UtcTime`` renders as ``str`` UTC timestamp."""
|
||||||
utcoffset8_local_time_in_naive_utc = (
|
utcoffset8_local_time_in_naive_utc = (
|
||||||
datetime.datetime(
|
datetime.datetime(
|
||||||
year=2020,
|
year=2020,
|
||||||
|
@ -216,7 +216,7 @@ def test_utc_in_timez(monkeypatch):
|
||||||
|
|
||||||
class mock_datetime:
|
class mock_datetime:
|
||||||
@classmethod
|
@classmethod
|
||||||
def utcnow(cls):
|
def now(cls, tz):
|
||||||
return utcoffset8_local_time_in_naive_utc
|
return utcoffset8_local_time_in_naive_utc
|
||||||
|
|
||||||
monkeypatch.setattr('datetime.datetime', mock_datetime)
|
monkeypatch.setattr('datetime.datetime', mock_datetime)
|
||||||
|
|
|
@ -6,8 +6,7 @@ __metaclass__ = type
|
||||||
|
|
||||||
class TestAutoreloader:
|
class TestAutoreloader:
|
||||||
def test_file_for_file_module_when_None(self):
|
def test_file_for_file_module_when_None(self):
|
||||||
"""No error when module.__file__ is None.
|
"""No error when ``module.__file__`` is :py:data:`None`."""
|
||||||
"""
|
|
||||||
class test_module:
|
class test_module:
|
||||||
__file__ = None
|
__file__ = None
|
||||||
|
|
||||||
|
|
|
@ -275,7 +275,6 @@ class RequestObjectTests(helper.CPWebCase):
|
||||||
return 'success'
|
return 'success'
|
||||||
|
|
||||||
class Divorce(Test):
|
class Divorce(Test):
|
||||||
|
|
||||||
"""HTTP Method handlers shouldn't collide with normal method names.
|
"""HTTP Method handlers shouldn't collide with normal method names.
|
||||||
For example, a GET-handler shouldn't collide with a method named
|
For example, a GET-handler shouldn't collide with a method named
|
||||||
'get'.
|
'get'.
|
||||||
|
@ -757,8 +756,8 @@ class RequestObjectTests(helper.CPWebCase):
|
||||||
self.assertBody('application/json')
|
self.assertBody('application/json')
|
||||||
|
|
||||||
def test_dangerous_host(self):
|
def test_dangerous_host(self):
|
||||||
"""
|
"""Dangerous characters like newlines should be elided.
|
||||||
Dangerous characters like newlines should be elided.
|
|
||||||
Ref #1974.
|
Ref #1974.
|
||||||
"""
|
"""
|
||||||
# foo\nbar
|
# foo\nbar
|
||||||
|
|
|
@ -4,7 +4,7 @@ import threading
|
||||||
import time
|
import time
|
||||||
from http.client import HTTPConnection
|
from http.client import HTTPConnection
|
||||||
|
|
||||||
from distutils.spawn import find_executable
|
from shutil import which
|
||||||
import pytest
|
import pytest
|
||||||
from path import Path
|
from path import Path
|
||||||
from more_itertools import consume
|
from more_itertools import consume
|
||||||
|
@ -146,9 +146,14 @@ class SessionTest(helper.CPWebCase):
|
||||||
def teardown_class(cls):
|
def teardown_class(cls):
|
||||||
"""Clean up sessions."""
|
"""Clean up sessions."""
|
||||||
super(cls, cls).teardown_class()
|
super(cls, cls).teardown_class()
|
||||||
|
try:
|
||||||
|
files_to_clean = localDir.iterdir() # Python 3.8+
|
||||||
|
except AttributeError:
|
||||||
|
files_to_clean = localDir.listdir() # Python 3.6-3.7
|
||||||
|
|
||||||
consume(
|
consume(
|
||||||
file.remove_p()
|
file.remove_p()
|
||||||
for file in localDir.listdir()
|
for file in files_to_clean
|
||||||
if file.basename().startswith(
|
if file.basename().startswith(
|
||||||
sessions.FileSession.SESSION_PREFIX
|
sessions.FileSession.SESSION_PREFIX
|
||||||
)
|
)
|
||||||
|
@ -402,7 +407,7 @@ class SessionTest(helper.CPWebCase):
|
||||||
|
|
||||||
|
|
||||||
def is_memcached_present():
|
def is_memcached_present():
|
||||||
executable = find_executable('memcached')
|
executable = which('memcached')
|
||||||
return bool(executable)
|
return bool(executable)
|
||||||
|
|
||||||
|
|
||||||
|
@ -418,9 +423,7 @@ def memcached_client_present():
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def memcached_instance(request, watcher_getter, memcached_server_present):
|
def memcached_instance(request, watcher_getter, memcached_server_present):
|
||||||
"""
|
"""Start up an instance of memcached."""
|
||||||
Start up an instance of memcached.
|
|
||||||
"""
|
|
||||||
|
|
||||||
port = portend.find_available_local_port()
|
port = portend.find_available_local_port()
|
||||||
|
|
||||||
|
|
|
@ -433,14 +433,13 @@ test_case_name: "test_signal_handler_unsubscribe"
|
||||||
|
|
||||||
|
|
||||||
def test_safe_wait_INADDR_ANY(): # pylint: disable=invalid-name
|
def test_safe_wait_INADDR_ANY(): # pylint: disable=invalid-name
|
||||||
"""
|
"""Wait on INADDR_ANY should not raise IOError.
|
||||||
Wait on INADDR_ANY should not raise IOError
|
|
||||||
|
|
||||||
In cases where the loopback interface does not exist, CherryPy cannot
|
In cases where the loopback interface does not exist, CherryPy
|
||||||
effectively determine if a port binding to INADDR_ANY was effected.
|
cannot effectively determine if a port binding to INADDR_ANY was
|
||||||
In this situation, CherryPy should assume that it failed to detect
|
effected. In this situation, CherryPy should assume that it failed
|
||||||
the binding (not that the binding failed) and only warn that it could
|
to detect the binding (not that the binding failed) and only warn
|
||||||
not verify it.
|
that it could not verify it.
|
||||||
"""
|
"""
|
||||||
# At such a time that CherryPy can reliably determine one or more
|
# At such a time that CherryPy can reliably determine one or more
|
||||||
# viable IP addresses of the host, this test may be removed.
|
# viable IP addresses of the host, this test may be removed.
|
||||||
|
|
|
@ -460,9 +460,7 @@ class SessionAuthTest(unittest.TestCase):
|
||||||
|
|
||||||
class TestHooks:
|
class TestHooks:
|
||||||
def test_priorities(self):
|
def test_priorities(self):
|
||||||
"""
|
"""Hooks should sort by priority order."""
|
||||||
Hooks should sort by priority order.
|
|
||||||
"""
|
|
||||||
Hook = cherrypy._cprequest.Hook
|
Hook = cherrypy._cprequest.Hook
|
||||||
hooks = [
|
hooks = [
|
||||||
Hook(None, priority=48),
|
Hook(None, priority=48),
|
||||||
|
|
|
@ -9,18 +9,14 @@ class TutorialTest(helper.CPWebCase):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def setup_server(cls):
|
def setup_server(cls):
|
||||||
"""
|
"""Mount something so the engine starts."""
|
||||||
Mount something so the engine starts.
|
|
||||||
"""
|
|
||||||
class Dummy:
|
class Dummy:
|
||||||
pass
|
pass
|
||||||
cherrypy.tree.mount(Dummy())
|
cherrypy.tree.mount(Dummy())
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_module(name):
|
def load_module(name):
|
||||||
"""
|
"""Import or reload tutorial module as needed."""
|
||||||
Import or reload tutorial module as needed.
|
|
||||||
"""
|
|
||||||
target = 'cherrypy.tutorial.' + name
|
target = 'cherrypy.tutorial.' + name
|
||||||
if target in sys.modules:
|
if target in sys.modules:
|
||||||
module = importlib.reload(sys.modules[target])
|
module = importlib.reload(sys.modules[target])
|
||||||
|
|
|
@ -21,9 +21,7 @@ USOCKET_PATH = usocket_path()
|
||||||
|
|
||||||
|
|
||||||
class USocketHTTPConnection(HTTPConnection):
|
class USocketHTTPConnection(HTTPConnection):
|
||||||
"""
|
"""HTTPConnection over a unix socket."""
|
||||||
HTTPConnection over a unix socket.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
HTTPConnection.__init__(self, 'localhost')
|
HTTPConnection.__init__(self, 'localhost')
|
||||||
|
|
|
@ -11,8 +11,7 @@ import cherrypy
|
||||||
|
|
||||||
|
|
||||||
class HelloWorld:
|
class HelloWorld:
|
||||||
|
"""Sample request handler class."""
|
||||||
""" Sample request handler class. """
|
|
||||||
|
|
||||||
# Expose the index method through the web. CherryPy will never
|
# Expose the index method through the web. CherryPy will never
|
||||||
# publish methods that don't have the exposed attribute set to True.
|
# publish methods that don't have the exposed attribute set to True.
|
||||||
|
|
|
@ -3,4 +3,4 @@
|
||||||
from .more import * # noqa
|
from .more import * # noqa
|
||||||
from .recipes import * # noqa
|
from .recipes import * # noqa
|
||||||
|
|
||||||
__version__ = '10.2.0'
|
__version__ = '10.3.0'
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import math
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from collections import Counter, defaultdict, deque, abc
|
from collections import Counter, defaultdict, deque, abc
|
||||||
|
@ -6,6 +7,7 @@ from functools import cached_property, partial, reduce, wraps
|
||||||
from heapq import heapify, heapreplace, heappop
|
from heapq import heapify, heapreplace, heappop
|
||||||
from itertools import (
|
from itertools import (
|
||||||
chain,
|
chain,
|
||||||
|
combinations,
|
||||||
compress,
|
compress,
|
||||||
count,
|
count,
|
||||||
cycle,
|
cycle,
|
||||||
|
@ -19,7 +21,7 @@ from itertools import (
|
||||||
zip_longest,
|
zip_longest,
|
||||||
product,
|
product,
|
||||||
)
|
)
|
||||||
from math import exp, factorial, floor, log, perm, comb
|
from math import comb, e, exp, factorial, floor, fsum, log, perm, tau
|
||||||
from queue import Empty, Queue
|
from queue import Empty, Queue
|
||||||
from random import random, randrange, uniform
|
from random import random, randrange, uniform
|
||||||
from operator import itemgetter, mul, sub, gt, lt, ge, le
|
from operator import itemgetter, mul, sub, gt, lt, ge, le
|
||||||
|
@ -61,11 +63,13 @@ __all__ = [
|
||||||
'consumer',
|
'consumer',
|
||||||
'count_cycle',
|
'count_cycle',
|
||||||
'countable',
|
'countable',
|
||||||
|
'dft',
|
||||||
'difference',
|
'difference',
|
||||||
'distinct_combinations',
|
'distinct_combinations',
|
||||||
'distinct_permutations',
|
'distinct_permutations',
|
||||||
'distribute',
|
'distribute',
|
||||||
'divide',
|
'divide',
|
||||||
|
'doublestarmap',
|
||||||
'duplicates_everseen',
|
'duplicates_everseen',
|
||||||
'duplicates_justseen',
|
'duplicates_justseen',
|
||||||
'classify_unique',
|
'classify_unique',
|
||||||
|
@ -77,6 +81,7 @@ __all__ = [
|
||||||
'groupby_transform',
|
'groupby_transform',
|
||||||
'ichunked',
|
'ichunked',
|
||||||
'iequals',
|
'iequals',
|
||||||
|
'idft',
|
||||||
'ilen',
|
'ilen',
|
||||||
'interleave',
|
'interleave',
|
||||||
'interleave_evenly',
|
'interleave_evenly',
|
||||||
|
@ -86,6 +91,7 @@ __all__ = [
|
||||||
'islice_extended',
|
'islice_extended',
|
||||||
'iterate',
|
'iterate',
|
||||||
'iter_suppress',
|
'iter_suppress',
|
||||||
|
'join_mappings',
|
||||||
'last',
|
'last',
|
||||||
'locate',
|
'locate',
|
||||||
'longest_common_prefix',
|
'longest_common_prefix',
|
||||||
|
@ -109,6 +115,7 @@ __all__ = [
|
||||||
'partitions',
|
'partitions',
|
||||||
'peekable',
|
'peekable',
|
||||||
'permutation_index',
|
'permutation_index',
|
||||||
|
'powerset_of_sets',
|
||||||
'product_index',
|
'product_index',
|
||||||
'raise_',
|
'raise_',
|
||||||
'repeat_each',
|
'repeat_each',
|
||||||
|
@ -148,6 +155,9 @@ __all__ = [
|
||||||
'zip_offset',
|
'zip_offset',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# math.sumprod is available for Python 3.12+
|
||||||
|
_fsumprod = getattr(math, 'sumprod', lambda x, y: fsum(map(mul, x, y)))
|
||||||
|
|
||||||
|
|
||||||
def chunked(iterable, n, strict=False):
|
def chunked(iterable, n, strict=False):
|
||||||
"""Break *iterable* into lists of length *n*:
|
"""Break *iterable* into lists of length *n*:
|
||||||
|
@ -550,10 +560,10 @@ def one(iterable, too_short=None, too_long=None):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
first_value = next(it)
|
first_value = next(it)
|
||||||
except StopIteration as e:
|
except StopIteration as exc:
|
||||||
raise (
|
raise (
|
||||||
too_short or ValueError('too few items in iterable (expected 1)')
|
too_short or ValueError('too few items in iterable (expected 1)')
|
||||||
) from e
|
) from exc
|
||||||
|
|
||||||
try:
|
try:
|
||||||
second_value = next(it)
|
second_value = next(it)
|
||||||
|
@ -840,26 +850,31 @@ def windowed(seq, n, fillvalue=None, step=1):
|
||||||
if n < 0:
|
if n < 0:
|
||||||
raise ValueError('n must be >= 0')
|
raise ValueError('n must be >= 0')
|
||||||
if n == 0:
|
if n == 0:
|
||||||
yield tuple()
|
yield ()
|
||||||
return
|
return
|
||||||
if step < 1:
|
if step < 1:
|
||||||
raise ValueError('step must be >= 1')
|
raise ValueError('step must be >= 1')
|
||||||
|
|
||||||
window = deque(maxlen=n)
|
iterable = iter(seq)
|
||||||
i = n
|
|
||||||
for _ in map(window.append, seq):
|
|
||||||
i -= 1
|
|
||||||
if not i:
|
|
||||||
i = step
|
|
||||||
yield tuple(window)
|
|
||||||
|
|
||||||
size = len(window)
|
# Generate first window
|
||||||
if size == 0:
|
window = deque(islice(iterable, n), maxlen=n)
|
||||||
|
|
||||||
|
# Deal with the first window not being full
|
||||||
|
if not window:
|
||||||
return
|
return
|
||||||
elif size < n:
|
if len(window) < n:
|
||||||
yield tuple(chain(window, repeat(fillvalue, n - size)))
|
yield tuple(window) + ((fillvalue,) * (n - len(window)))
|
||||||
elif 0 < i < min(step, n):
|
return
|
||||||
window += (fillvalue,) * i
|
yield tuple(window)
|
||||||
|
|
||||||
|
# Create the filler for the next windows. The padding ensures
|
||||||
|
# we have just enough elements to fill the last window.
|
||||||
|
padding = (fillvalue,) * (n - 1 if step >= n else step - 1)
|
||||||
|
filler = map(window.append, chain(iterable, padding))
|
||||||
|
|
||||||
|
# Generate the rest of the windows
|
||||||
|
for _ in islice(filler, step - 1, None, step):
|
||||||
yield tuple(window)
|
yield tuple(window)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1151,8 +1166,8 @@ def interleave_evenly(iterables, lengths=None):
|
||||||
|
|
||||||
# those iterables for which the error is negative are yielded
|
# those iterables for which the error is negative are yielded
|
||||||
# ("diagonal step" in Bresenham)
|
# ("diagonal step" in Bresenham)
|
||||||
for i, e in enumerate(errors):
|
for i, e_ in enumerate(errors):
|
||||||
if e < 0:
|
if e_ < 0:
|
||||||
yield next(iters_secondary[i])
|
yield next(iters_secondary[i])
|
||||||
to_yield -= 1
|
to_yield -= 1
|
||||||
errors[i] += delta_primary
|
errors[i] += delta_primary
|
||||||
|
@ -1184,26 +1199,38 @@ def collapse(iterable, base_type=None, levels=None):
|
||||||
['a', ['b'], 'c', ['d']]
|
['a', ['b'], 'c', ['d']]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
stack = deque()
|
||||||
|
# Add our first node group, treat the iterable as a single node
|
||||||
|
stack.appendleft((0, repeat(iterable, 1)))
|
||||||
|
|
||||||
def walk(node, level):
|
while stack:
|
||||||
if (
|
node_group = stack.popleft()
|
||||||
((levels is not None) and (level > levels))
|
level, nodes = node_group
|
||||||
or isinstance(node, (str, bytes))
|
|
||||||
or ((base_type is not None) and isinstance(node, base_type))
|
|
||||||
):
|
|
||||||
yield node
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
# Check if beyond max level
|
||||||
tree = iter(node)
|
if levels is not None and level > levels:
|
||||||
except TypeError:
|
yield from nodes
|
||||||
yield node
|
continue
|
||||||
return
|
|
||||||
else:
|
|
||||||
for child in tree:
|
|
||||||
yield from walk(child, level + 1)
|
|
||||||
|
|
||||||
yield from walk(iterable, 0)
|
for node in nodes:
|
||||||
|
# Check if done iterating
|
||||||
|
if isinstance(node, (str, bytes)) or (
|
||||||
|
(base_type is not None) and isinstance(node, base_type)
|
||||||
|
):
|
||||||
|
yield node
|
||||||
|
# Otherwise try to create child nodes
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
tree = iter(node)
|
||||||
|
except TypeError:
|
||||||
|
yield node
|
||||||
|
else:
|
||||||
|
# Save our current location
|
||||||
|
stack.appendleft(node_group)
|
||||||
|
# Append the new child node
|
||||||
|
stack.appendleft((level + 1, tree))
|
||||||
|
# Break to process child node
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
def side_effect(func, iterable, chunk_size=None, before=None, after=None):
|
def side_effect(func, iterable, chunk_size=None, before=None, after=None):
|
||||||
|
@ -1516,28 +1543,41 @@ def padded(iterable, fillvalue=None, n=None, next_multiple=False):
|
||||||
[1, 2, 3, '?', '?']
|
[1, 2, 3, '?', '?']
|
||||||
|
|
||||||
If *next_multiple* is ``True``, *fillvalue* will be emitted until the
|
If *next_multiple* is ``True``, *fillvalue* will be emitted until the
|
||||||
number of items emitted is a multiple of *n*::
|
number of items emitted is a multiple of *n*:
|
||||||
|
|
||||||
>>> list(padded([1, 2, 3, 4], n=3, next_multiple=True))
|
>>> list(padded([1, 2, 3, 4], n=3, next_multiple=True))
|
||||||
[1, 2, 3, 4, None, None]
|
[1, 2, 3, 4, None, None]
|
||||||
|
|
||||||
If *n* is ``None``, *fillvalue* will be emitted indefinitely.
|
If *n* is ``None``, *fillvalue* will be emitted indefinitely.
|
||||||
|
|
||||||
|
To create an *iterable* of exactly size *n*, you can truncate with
|
||||||
|
:func:`islice`.
|
||||||
|
|
||||||
|
>>> list(islice(padded([1, 2, 3], '?'), 5))
|
||||||
|
[1, 2, 3, '?', '?']
|
||||||
|
>>> list(islice(padded([1, 2, 3, 4, 5, 6, 7, 8], '?'), 5))
|
||||||
|
[1, 2, 3, 4, 5]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
it = iter(iterable)
|
iterable = iter(iterable)
|
||||||
|
iterable_with_repeat = chain(iterable, repeat(fillvalue))
|
||||||
|
|
||||||
if n is None:
|
if n is None:
|
||||||
yield from chain(it, repeat(fillvalue))
|
return iterable_with_repeat
|
||||||
elif n < 1:
|
elif n < 1:
|
||||||
raise ValueError('n must be at least 1')
|
raise ValueError('n must be at least 1')
|
||||||
else:
|
elif next_multiple:
|
||||||
item_count = 0
|
|
||||||
for item in it:
|
|
||||||
yield item
|
|
||||||
item_count += 1
|
|
||||||
|
|
||||||
remaining = (n - item_count) % n if next_multiple else n - item_count
|
def slice_generator():
|
||||||
for _ in range(remaining):
|
for first in iterable:
|
||||||
yield fillvalue
|
yield (first,)
|
||||||
|
yield islice(iterable_with_repeat, n - 1)
|
||||||
|
|
||||||
|
# While elements exist produce slices of size n
|
||||||
|
return chain.from_iterable(slice_generator())
|
||||||
|
else:
|
||||||
|
# Ensure the first batch is at least size n then iterate
|
||||||
|
return chain(islice(iterable_with_repeat, n), iterable)
|
||||||
|
|
||||||
|
|
||||||
def repeat_each(iterable, n=2):
|
def repeat_each(iterable, n=2):
|
||||||
|
@ -1592,7 +1632,9 @@ def distribute(n, iterable):
|
||||||
[[1], [2], [3], [], []]
|
[[1], [2], [3], [], []]
|
||||||
|
|
||||||
This function uses :func:`itertools.tee` and may require significant
|
This function uses :func:`itertools.tee` and may require significant
|
||||||
storage. If you need the order items in the smaller iterables to match the
|
storage.
|
||||||
|
|
||||||
|
If you need the order items in the smaller iterables to match the
|
||||||
original iterable, see :func:`divide`.
|
original iterable, see :func:`divide`.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -1840,9 +1882,9 @@ def divide(n, iterable):
|
||||||
>>> [list(c) for c in children]
|
>>> [list(c) for c in children]
|
||||||
[[1], [2], [3], [], []]
|
[[1], [2], [3], [], []]
|
||||||
|
|
||||||
This function will exhaust the iterable before returning and may require
|
This function will exhaust the iterable before returning.
|
||||||
significant storage. If order is not important, see :func:`distribute`,
|
If order is not important, see :func:`distribute`, which does not first
|
||||||
which does not first pull the iterable into memory.
|
pull the iterable into memory.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if n < 1:
|
if n < 1:
|
||||||
|
@ -3296,25 +3338,38 @@ def only(iterable, default=None, too_long=None):
|
||||||
return first_value
|
return first_value
|
||||||
|
|
||||||
|
|
||||||
class _IChunk:
|
def _ichunk(iterable, n):
|
||||||
def __init__(self, iterable, n):
|
cache = deque()
|
||||||
self._it = islice(iterable, n)
|
chunk = islice(iterable, n)
|
||||||
self._cache = deque()
|
|
||||||
|
|
||||||
def fill_cache(self):
|
def generator():
|
||||||
self._cache.extend(self._it)
|
while True:
|
||||||
|
if cache:
|
||||||
def __iter__(self):
|
yield cache.popleft()
|
||||||
return self
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
try:
|
|
||||||
return next(self._it)
|
|
||||||
except StopIteration:
|
|
||||||
if self._cache:
|
|
||||||
return self._cache.popleft()
|
|
||||||
else:
|
else:
|
||||||
raise
|
try:
|
||||||
|
item = next(chunk)
|
||||||
|
except StopIteration:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
yield item
|
||||||
|
|
||||||
|
def materialize_next(n=1):
|
||||||
|
# if n not specified materialize everything
|
||||||
|
if n is None:
|
||||||
|
cache.extend(chunk)
|
||||||
|
return len(cache)
|
||||||
|
|
||||||
|
to_cache = n - len(cache)
|
||||||
|
|
||||||
|
# materialize up to n
|
||||||
|
if to_cache > 0:
|
||||||
|
cache.extend(islice(chunk, to_cache))
|
||||||
|
|
||||||
|
# return number materialized up to n
|
||||||
|
return min(n, len(cache))
|
||||||
|
|
||||||
|
return (generator(), materialize_next)
|
||||||
|
|
||||||
|
|
||||||
def ichunked(iterable, n):
|
def ichunked(iterable, n):
|
||||||
|
@ -3338,19 +3393,19 @@ def ichunked(iterable, n):
|
||||||
[8, 9, 10, 11]
|
[8, 9, 10, 11]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
source = peekable(iter(iterable))
|
iterable = iter(iterable)
|
||||||
ichunk_marker = object()
|
|
||||||
while True:
|
while True:
|
||||||
|
# Create new chunk
|
||||||
|
chunk, materialize_next = _ichunk(iterable, n)
|
||||||
|
|
||||||
# Check to see whether we're at the end of the source iterable
|
# Check to see whether we're at the end of the source iterable
|
||||||
item = source.peek(ichunk_marker)
|
if not materialize_next():
|
||||||
if item is ichunk_marker:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
chunk = _IChunk(source, n)
|
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
# Advance the source iterable and fill previous chunk's cache
|
# Fill previous chunk's cache
|
||||||
chunk.fill_cache()
|
materialize_next(None)
|
||||||
|
|
||||||
|
|
||||||
def iequals(*iterables):
|
def iequals(*iterables):
|
||||||
|
@ -3864,6 +3919,7 @@ def nth_permutation(iterable, r, index):
|
||||||
raise ValueError
|
raise ValueError
|
||||||
else:
|
else:
|
||||||
c = perm(n, r)
|
c = perm(n, r)
|
||||||
|
assert c > 0 # factortial(n)>0, and r<n so perm(n,r) is never zero
|
||||||
|
|
||||||
if index < 0:
|
if index < 0:
|
||||||
index += c
|
index += c
|
||||||
|
@ -3871,9 +3927,6 @@ def nth_permutation(iterable, r, index):
|
||||||
if not 0 <= index < c:
|
if not 0 <= index < c:
|
||||||
raise IndexError
|
raise IndexError
|
||||||
|
|
||||||
if c == 0:
|
|
||||||
return tuple()
|
|
||||||
|
|
||||||
result = [0] * r
|
result = [0] * r
|
||||||
q = index * factorial(n) // c if r < n else index
|
q = index * factorial(n) // c if r < n else index
|
||||||
for d in range(1, n + 1):
|
for d in range(1, n + 1):
|
||||||
|
@ -3946,6 +3999,12 @@ def value_chain(*args):
|
||||||
>>> list(value_chain('12', '34', ['56', '78']))
|
>>> list(value_chain('12', '34', ['56', '78']))
|
||||||
['12', '34', '56', '78']
|
['12', '34', '56', '78']
|
||||||
|
|
||||||
|
Pre- or postpend a single element to an iterable:
|
||||||
|
|
||||||
|
>>> list(value_chain(1, [2, 3, 4, 5, 6]))
|
||||||
|
[1, 2, 3, 4, 5, 6]
|
||||||
|
>>> list(value_chain([1, 2, 3, 4, 5], 6))
|
||||||
|
[1, 2, 3, 4, 5, 6]
|
||||||
|
|
||||||
Multiple levels of nesting are not flattened.
|
Multiple levels of nesting are not flattened.
|
||||||
|
|
||||||
|
@ -4154,53 +4213,41 @@ def chunked_even(iterable, n):
|
||||||
[[1, 2, 3], [4, 5, 6], [7]]
|
[[1, 2, 3], [4, 5, 6], [7]]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
iterable = iter(iterable)
|
||||||
|
|
||||||
len_method = getattr(iterable, '__len__', None)
|
# Initialize a buffer to process the chunks while keeping
|
||||||
|
# some back to fill any underfilled chunks
|
||||||
|
min_buffer = (n - 1) * (n - 2)
|
||||||
|
buffer = list(islice(iterable, min_buffer))
|
||||||
|
|
||||||
if len_method is None:
|
# Append items until we have a completed chunk
|
||||||
return _chunked_even_online(iterable, n)
|
for _ in islice(map(buffer.append, iterable), n, None, n):
|
||||||
else:
|
yield buffer[:n]
|
||||||
return _chunked_even_finite(iterable, len_method(), n)
|
del buffer[:n]
|
||||||
|
|
||||||
|
# Check if any chunks need addition processing
|
||||||
def _chunked_even_online(iterable, n):
|
if not buffer:
|
||||||
buffer = []
|
|
||||||
maxbuf = n + (n - 2) * (n - 1)
|
|
||||||
for x in iterable:
|
|
||||||
buffer.append(x)
|
|
||||||
if len(buffer) == maxbuf:
|
|
||||||
yield buffer[:n]
|
|
||||||
buffer = buffer[n:]
|
|
||||||
yield from _chunked_even_finite(buffer, len(buffer), n)
|
|
||||||
|
|
||||||
|
|
||||||
def _chunked_even_finite(iterable, N, n):
|
|
||||||
if N < 1:
|
|
||||||
return
|
return
|
||||||
|
length = len(buffer)
|
||||||
|
|
||||||
# Lists are either size `full_size <= n` or `partial_size = full_size - 1`
|
# Chunks are either size `full_size <= n` or `partial_size = full_size - 1`
|
||||||
q, r = divmod(N, n)
|
q, r = divmod(length, n)
|
||||||
num_lists = q + (1 if r > 0 else 0)
|
num_lists = q + (1 if r > 0 else 0)
|
||||||
q, r = divmod(N, num_lists)
|
q, r = divmod(length, num_lists)
|
||||||
full_size = q + (1 if r > 0 else 0)
|
full_size = q + (1 if r > 0 else 0)
|
||||||
partial_size = full_size - 1
|
partial_size = full_size - 1
|
||||||
num_full = N - partial_size * num_lists
|
num_full = length - partial_size * num_lists
|
||||||
num_partial = num_lists - num_full
|
|
||||||
|
|
||||||
# Yield num_full lists of full_size
|
# Yield chunks of full size
|
||||||
partial_start_idx = num_full * full_size
|
partial_start_idx = num_full * full_size
|
||||||
if full_size > 0:
|
if full_size > 0:
|
||||||
for i in range(0, partial_start_idx, full_size):
|
for i in range(0, partial_start_idx, full_size):
|
||||||
yield list(islice(iterable, i, i + full_size))
|
yield buffer[i : i + full_size]
|
||||||
|
|
||||||
# Yield num_partial lists of partial_size
|
# Yield chunks of partial size
|
||||||
if partial_size > 0:
|
if partial_size > 0:
|
||||||
for i in range(
|
for i in range(partial_start_idx, length, partial_size):
|
||||||
partial_start_idx,
|
yield buffer[i : i + partial_size]
|
||||||
partial_start_idx + (num_partial * partial_size),
|
|
||||||
partial_size,
|
|
||||||
):
|
|
||||||
yield list(islice(iterable, i, i + partial_size))
|
|
||||||
|
|
||||||
|
|
||||||
def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
|
def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
|
||||||
|
@ -4419,12 +4466,12 @@ def minmax(iterable_or_value, *others, key=None, default=_marker):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
lo = hi = next(it)
|
lo = hi = next(it)
|
||||||
except StopIteration as e:
|
except StopIteration as exc:
|
||||||
if default is _marker:
|
if default is _marker:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'`minmax()` argument is an empty iterable. '
|
'`minmax()` argument is an empty iterable. '
|
||||||
'Provide a `default` value to suppress this error.'
|
'Provide a `default` value to suppress this error.'
|
||||||
) from e
|
) from exc
|
||||||
return default
|
return default
|
||||||
|
|
||||||
# Different branches depending on the presence of key. This saves a lot
|
# Different branches depending on the presence of key. This saves a lot
|
||||||
|
@ -4654,3 +4701,106 @@ def filter_map(func, iterable):
|
||||||
y = func(x)
|
y = func(x)
|
||||||
if y is not None:
|
if y is not None:
|
||||||
yield y
|
yield y
|
||||||
|
|
||||||
|
|
||||||
|
def powerset_of_sets(iterable):
|
||||||
|
"""Yields all possible subsets of the iterable.
|
||||||
|
|
||||||
|
>>> list(powerset_of_sets([1, 2, 3])) # doctest: +SKIP
|
||||||
|
[set(), {1}, {2}, {3}, {1, 2}, {1, 3}, {2, 3}, {1, 2, 3}]
|
||||||
|
>>> list(powerset_of_sets([1, 1, 0])) # doctest: +SKIP
|
||||||
|
[set(), {1}, {0}, {0, 1}]
|
||||||
|
|
||||||
|
:func:`powerset_of_sets` takes care to minimize the number
|
||||||
|
of hash operations performed.
|
||||||
|
"""
|
||||||
|
sets = tuple(map(set, dict.fromkeys(map(frozenset, zip(iterable)))))
|
||||||
|
for r in range(len(sets) + 1):
|
||||||
|
yield from starmap(set().union, combinations(sets, r))
|
||||||
|
|
||||||
|
|
||||||
|
def join_mappings(**field_to_map):
|
||||||
|
"""
|
||||||
|
Joins multiple mappings together using their common keys.
|
||||||
|
|
||||||
|
>>> user_scores = {'elliot': 50, 'claris': 60}
|
||||||
|
>>> user_times = {'elliot': 30, 'claris': 40}
|
||||||
|
>>> join_mappings(score=user_scores, time=user_times)
|
||||||
|
{'elliot': {'score': 50, 'time': 30}, 'claris': {'score': 60, 'time': 40}}
|
||||||
|
"""
|
||||||
|
ret = defaultdict(dict)
|
||||||
|
|
||||||
|
for field_name, mapping in field_to_map.items():
|
||||||
|
for key, value in mapping.items():
|
||||||
|
ret[key][field_name] = value
|
||||||
|
|
||||||
|
return dict(ret)
|
||||||
|
|
||||||
|
|
||||||
|
def _complex_sumprod(v1, v2):
|
||||||
|
"""High precision sumprod() for complex numbers.
|
||||||
|
Used by :func:`dft` and :func:`idft`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
r1 = chain((p.real for p in v1), (-p.imag for p in v1))
|
||||||
|
r2 = chain((q.real for q in v2), (q.imag for q in v2))
|
||||||
|
i1 = chain((p.real for p in v1), (p.imag for p in v1))
|
||||||
|
i2 = chain((q.imag for q in v2), (q.real for q in v2))
|
||||||
|
return complex(_fsumprod(r1, r2), _fsumprod(i1, i2))
|
||||||
|
|
||||||
|
|
||||||
|
def dft(xarr):
|
||||||
|
"""Discrete Fourier Tranform. *xarr* is a sequence of complex numbers.
|
||||||
|
Yields the components of the corresponding transformed output vector.
|
||||||
|
|
||||||
|
>>> import cmath
|
||||||
|
>>> xarr = [1, 2-1j, -1j, -1+2j]
|
||||||
|
>>> Xarr = [2, -2-2j, -2j, 4+4j]
|
||||||
|
>>> all(map(cmath.isclose, dft(xarr), Xarr))
|
||||||
|
True
|
||||||
|
|
||||||
|
See :func:`idft` for the inverse Discrete Fourier Transform.
|
||||||
|
"""
|
||||||
|
N = len(xarr)
|
||||||
|
roots_of_unity = [e ** (n / N * tau * -1j) for n in range(N)]
|
||||||
|
for k in range(N):
|
||||||
|
coeffs = [roots_of_unity[k * n % N] for n in range(N)]
|
||||||
|
yield _complex_sumprod(xarr, coeffs)
|
||||||
|
|
||||||
|
|
||||||
|
def idft(Xarr):
|
||||||
|
"""Inverse Discrete Fourier Tranform. *Xarr* is a sequence of
|
||||||
|
complex numbers. Yields the components of the corresponding
|
||||||
|
inverse-transformed output vector.
|
||||||
|
|
||||||
|
>>> import cmath
|
||||||
|
>>> xarr = [1, 2-1j, -1j, -1+2j]
|
||||||
|
>>> Xarr = [2, -2-2j, -2j, 4+4j]
|
||||||
|
>>> all(map(cmath.isclose, idft(Xarr), xarr))
|
||||||
|
True
|
||||||
|
|
||||||
|
See :func:`dft` for the Discrete Fourier Transform.
|
||||||
|
"""
|
||||||
|
N = len(Xarr)
|
||||||
|
roots_of_unity = [e ** (n / N * tau * 1j) for n in range(N)]
|
||||||
|
for k in range(N):
|
||||||
|
coeffs = [roots_of_unity[k * n % N] for n in range(N)]
|
||||||
|
yield _complex_sumprod(Xarr, coeffs) / N
|
||||||
|
|
||||||
|
|
||||||
|
def doublestarmap(func, iterable):
|
||||||
|
"""Apply *func* to every item of *iterable* by dictionary unpacking
|
||||||
|
the item into *func*.
|
||||||
|
|
||||||
|
The difference between :func:`itertools.starmap` and :func:`doublestarmap`
|
||||||
|
parallels the distinction between ``func(*a)`` and ``func(**a)``.
|
||||||
|
|
||||||
|
>>> iterable = [{'a': 1, 'b': 2}, {'a': 40, 'b': 60}]
|
||||||
|
>>> list(doublestarmap(lambda a, b: a + b, iterable))
|
||||||
|
[3, 100]
|
||||||
|
|
||||||
|
``TypeError`` will be raised if *func*'s signature doesn't match the
|
||||||
|
mapping contained in *iterable* or if *iterable* does not contain mappings.
|
||||||
|
"""
|
||||||
|
for item in iterable:
|
||||||
|
yield func(**item)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
"""Stubs for more_itertools.more"""
|
"""Stubs for more_itertools.more"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from types import TracebackType
|
from types import TracebackType
|
||||||
|
@ -9,8 +10,10 @@ from typing import (
|
||||||
ContextManager,
|
ContextManager,
|
||||||
Generic,
|
Generic,
|
||||||
Hashable,
|
Hashable,
|
||||||
|
Mapping,
|
||||||
Iterable,
|
Iterable,
|
||||||
Iterator,
|
Iterator,
|
||||||
|
Mapping,
|
||||||
overload,
|
overload,
|
||||||
Reversible,
|
Reversible,
|
||||||
Sequence,
|
Sequence,
|
||||||
|
@ -602,6 +605,7 @@ class countable(Generic[_T], Iterator[_T]):
|
||||||
def __init__(self, iterable: Iterable[_T]) -> None: ...
|
def __init__(self, iterable: Iterable[_T]) -> None: ...
|
||||||
def __iter__(self) -> countable[_T]: ...
|
def __iter__(self) -> countable[_T]: ...
|
||||||
def __next__(self) -> _T: ...
|
def __next__(self) -> _T: ...
|
||||||
|
items_seen: int
|
||||||
|
|
||||||
def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ...
|
def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ...
|
||||||
def zip_broadcast(
|
def zip_broadcast(
|
||||||
|
@ -693,3 +697,13 @@ def filter_map(
|
||||||
func: Callable[[_T], _V | None],
|
func: Callable[[_T], _V | None],
|
||||||
iterable: Iterable[_T],
|
iterable: Iterable[_T],
|
||||||
) -> Iterator[_V]: ...
|
) -> Iterator[_V]: ...
|
||||||
|
def powerset_of_sets(iterable: Iterable[_T]) -> Iterator[set[_T]]: ...
|
||||||
|
def join_mappings(
|
||||||
|
**field_to_map: Mapping[_T, _V]
|
||||||
|
) -> dict[_T, dict[str, _V]]: ...
|
||||||
|
def doublestarmap(
|
||||||
|
func: Callable[..., _T],
|
||||||
|
iterable: Iterable[Mapping[str, Any]],
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def dft(xarr: Sequence[complex]) -> Iterator[complex]: ...
|
||||||
|
def idft(Xarr: Sequence[complex]) -> Iterator[complex]: ...
|
||||||
|
|
|
@ -7,6 +7,7 @@ Some backward-compatible usability improvements have been made.
|
||||||
.. [1] http://docs.python.org/library/itertools.html#recipes
|
.. [1] http://docs.python.org/library/itertools.html#recipes
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import math
|
import math
|
||||||
import operator
|
import operator
|
||||||
|
|
||||||
|
@ -74,6 +75,7 @@ __all__ = [
|
||||||
'totient',
|
'totient',
|
||||||
'transpose',
|
'transpose',
|
||||||
'triplewise',
|
'triplewise',
|
||||||
|
'unique',
|
||||||
'unique_everseen',
|
'unique_everseen',
|
||||||
'unique_justseen',
|
'unique_justseen',
|
||||||
]
|
]
|
||||||
|
@ -198,7 +200,7 @@ def nth(iterable, n, default=None):
|
||||||
return next(islice(iterable, n, None), default)
|
return next(islice(iterable, n, None), default)
|
||||||
|
|
||||||
|
|
||||||
def all_equal(iterable):
|
def all_equal(iterable, key=None):
|
||||||
"""
|
"""
|
||||||
Returns ``True`` if all the elements are equal to each other.
|
Returns ``True`` if all the elements are equal to each other.
|
||||||
|
|
||||||
|
@ -207,9 +209,16 @@ def all_equal(iterable):
|
||||||
>>> all_equal('aaab')
|
>>> all_equal('aaab')
|
||||||
False
|
False
|
||||||
|
|
||||||
|
A function that accepts a single argument and returns a transformed version
|
||||||
|
of each input item can be specified with *key*:
|
||||||
|
|
||||||
|
>>> all_equal('AaaA', key=str.casefold)
|
||||||
|
True
|
||||||
|
>>> all_equal([1, 2, 3], key=lambda x: x < 10)
|
||||||
|
True
|
||||||
|
|
||||||
"""
|
"""
|
||||||
g = groupby(iterable)
|
return len(list(islice(groupby(iterable, key), 2))) <= 1
|
||||||
return next(g, True) and not next(g, False)
|
|
||||||
|
|
||||||
|
|
||||||
def quantify(iterable, pred=bool):
|
def quantify(iterable, pred=bool):
|
||||||
|
@ -410,16 +419,11 @@ def roundrobin(*iterables):
|
||||||
iterables is small).
|
iterables is small).
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Recipe credited to George Sakkis
|
# Algorithm credited to George Sakkis
|
||||||
pending = len(iterables)
|
iterators = map(iter, iterables)
|
||||||
nexts = cycle(iter(it).__next__ for it in iterables)
|
for num_active in range(len(iterables), 0, -1):
|
||||||
while pending:
|
iterators = cycle(islice(iterators, num_active))
|
||||||
try:
|
yield from map(next, iterators)
|
||||||
for next in nexts:
|
|
||||||
yield next()
|
|
||||||
except StopIteration:
|
|
||||||
pending -= 1
|
|
||||||
nexts = cycle(islice(nexts, pending))
|
|
||||||
|
|
||||||
|
|
||||||
def partition(pred, iterable):
|
def partition(pred, iterable):
|
||||||
|
@ -458,16 +462,14 @@ def powerset(iterable):
|
||||||
|
|
||||||
:func:`powerset` will operate on iterables that aren't :class:`set`
|
:func:`powerset` will operate on iterables that aren't :class:`set`
|
||||||
instances, so repeated elements in the input will produce repeated elements
|
instances, so repeated elements in the input will produce repeated elements
|
||||||
in the output. Use :func:`unique_everseen` on the input to avoid generating
|
in the output.
|
||||||
duplicates:
|
|
||||||
|
|
||||||
>>> seq = [1, 1, 0]
|
>>> seq = [1, 1, 0]
|
||||||
>>> list(powerset(seq))
|
>>> list(powerset(seq))
|
||||||
[(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
|
[(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
|
||||||
>>> from more_itertools import unique_everseen
|
|
||||||
>>> list(powerset(unique_everseen(seq)))
|
|
||||||
[(), (1,), (0,), (1, 0)]
|
|
||||||
|
|
||||||
|
For a variant that efficiently yields actual :class:`set` instances, see
|
||||||
|
:func:`powerset_of_sets`.
|
||||||
"""
|
"""
|
||||||
s = list(iterable)
|
s = list(iterable)
|
||||||
return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
|
return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
|
||||||
|
@ -533,6 +535,25 @@ def unique_justseen(iterable, key=None):
|
||||||
return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
|
return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
|
||||||
|
|
||||||
|
|
||||||
|
def unique(iterable, key=None, reverse=False):
|
||||||
|
"""Yields unique elements in sorted order.
|
||||||
|
|
||||||
|
>>> list(unique([[1, 2], [3, 4], [1, 2]]))
|
||||||
|
[[1, 2], [3, 4]]
|
||||||
|
|
||||||
|
*key* and *reverse* are passed to :func:`sorted`.
|
||||||
|
|
||||||
|
>>> list(unique('ABBcCAD', str.casefold))
|
||||||
|
['A', 'B', 'c', 'D']
|
||||||
|
>>> list(unique('ABBcCAD', str.casefold, reverse=True))
|
||||||
|
['D', 'c', 'B', 'A']
|
||||||
|
|
||||||
|
The elements in *iterable* need not be hashable, but they must be
|
||||||
|
comparable for sorting to work.
|
||||||
|
"""
|
||||||
|
return unique_justseen(sorted(iterable, key=key, reverse=reverse), key=key)
|
||||||
|
|
||||||
|
|
||||||
def iter_except(func, exception, first=None):
|
def iter_except(func, exception, first=None):
|
||||||
"""Yields results from a function repeatedly until an exception is raised.
|
"""Yields results from a function repeatedly until an exception is raised.
|
||||||
|
|
||||||
|
@ -827,8 +848,6 @@ def iter_index(iterable, value, start=0, stop=None):
|
||||||
"""Yield the index of each place in *iterable* that *value* occurs,
|
"""Yield the index of each place in *iterable* that *value* occurs,
|
||||||
beginning with index *start* and ending before index *stop*.
|
beginning with index *start* and ending before index *stop*.
|
||||||
|
|
||||||
See :func:`locate` for a more general means of finding the indexes
|
|
||||||
associated with particular values.
|
|
||||||
|
|
||||||
>>> list(iter_index('AABCADEAF', 'A'))
|
>>> list(iter_index('AABCADEAF', 'A'))
|
||||||
[0, 1, 4, 7]
|
[0, 1, 4, 7]
|
||||||
|
@ -836,6 +855,19 @@ def iter_index(iterable, value, start=0, stop=None):
|
||||||
[1, 4, 7]
|
[1, 4, 7]
|
||||||
>>> list(iter_index('AABCADEAF', 'A', 1, 7)) # stop index is not inclusive
|
>>> list(iter_index('AABCADEAF', 'A', 1, 7)) # stop index is not inclusive
|
||||||
[1, 4]
|
[1, 4]
|
||||||
|
|
||||||
|
The behavior for non-scalar *values* matches the built-in Python types.
|
||||||
|
|
||||||
|
>>> list(iter_index('ABCDABCD', 'AB'))
|
||||||
|
[0, 4]
|
||||||
|
>>> list(iter_index([0, 1, 2, 3, 0, 1, 2, 3], [0, 1]))
|
||||||
|
[]
|
||||||
|
>>> list(iter_index([[0, 1], [2, 3], [0, 1], [2, 3]], [0, 1]))
|
||||||
|
[0, 2]
|
||||||
|
|
||||||
|
See :func:`locate` for a more general means of finding the indexes
|
||||||
|
associated with particular values.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
seq_index = getattr(iterable, 'index', None)
|
seq_index = getattr(iterable, 'index', None)
|
||||||
if seq_index is None:
|
if seq_index is None:
|
||||||
|
@ -1006,7 +1038,9 @@ def totient(n):
|
||||||
>>> totient(12)
|
>>> totient(12)
|
||||||
4
|
4
|
||||||
"""
|
"""
|
||||||
for p in unique_justseen(factor(n)):
|
# The itertools docs use unique_justseen instead of set; see
|
||||||
|
# https://github.com/more-itertools/more-itertools/issues/823
|
||||||
|
for p in set(factor(n)):
|
||||||
n = n // p * (p - 1)
|
n = n // p * (p - 1)
|
||||||
|
|
||||||
return n
|
return n
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
"""Stubs for more_itertools.recipes"""
|
"""Stubs for more_itertools.recipes"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import (
|
from typing import (
|
||||||
|
@ -28,7 +29,9 @@ def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ...
|
||||||
def nth(iterable: Iterable[_T], n: int) -> _T | None: ...
|
def nth(iterable: Iterable[_T], n: int) -> _T | None: ...
|
||||||
@overload
|
@overload
|
||||||
def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
|
def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
|
||||||
def all_equal(iterable: Iterable[_T]) -> bool: ...
|
def all_equal(
|
||||||
|
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
|
||||||
|
) -> bool: ...
|
||||||
def quantify(
|
def quantify(
|
||||||
iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
|
iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
|
||||||
) -> int: ...
|
) -> int: ...
|
||||||
|
@ -58,6 +61,11 @@ def unique_everseen(
|
||||||
def unique_justseen(
|
def unique_justseen(
|
||||||
iterable: Iterable[_T], key: Callable[[_T], object] | None = ...
|
iterable: Iterable[_T], key: Callable[[_T], object] | None = ...
|
||||||
) -> Iterator[_T]: ...
|
) -> Iterator[_T]: ...
|
||||||
|
def unique(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
key: Callable[[_T], object] | None = ...,
|
||||||
|
reverse: bool = False,
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
@overload
|
@overload
|
||||||
def iter_except(
|
def iter_except(
|
||||||
func: Callable[[], _T],
|
func: Callable[[], _T],
|
||||||
|
|
|
@ -3,14 +3,24 @@ Classes for calling functions a schedule. Has time zone support.
|
||||||
|
|
||||||
For example, to run a job at 08:00 every morning in 'Asia/Calcutta':
|
For example, to run a job at 08:00 every morning in 'Asia/Calcutta':
|
||||||
|
|
||||||
|
>>> from tests.compat.py38 import zoneinfo
|
||||||
>>> job = lambda: print("time is now", datetime.datetime())
|
>>> job = lambda: print("time is now", datetime.datetime())
|
||||||
>>> time = datetime.time(8, tzinfo=pytz.timezone('Asia/Calcutta'))
|
>>> time = datetime.time(8, tzinfo=zoneinfo.ZoneInfo('Asia/Calcutta'))
|
||||||
>>> cmd = PeriodicCommandFixedDelay.daily_at(time, job)
|
>>> cmd = PeriodicCommandFixedDelay.daily_at(time, job)
|
||||||
>>> sched = InvokeScheduler()
|
>>> sched = InvokeScheduler()
|
||||||
>>> sched.add(cmd)
|
>>> sched.add(cmd)
|
||||||
>>> while True: # doctest: +SKIP
|
>>> while True: # doctest: +SKIP
|
||||||
... sched.run_pending()
|
... sched.run_pending()
|
||||||
... time.sleep(.1)
|
... time.sleep(.1)
|
||||||
|
|
||||||
|
By default, the scheduler uses timezone-aware times in UTC. A
|
||||||
|
client may override the default behavior by overriding ``now``
|
||||||
|
and ``from_timestamp`` functions.
|
||||||
|
|
||||||
|
>>> now()
|
||||||
|
datetime.datetime(...utc)
|
||||||
|
>>> from_timestamp(1718723533.7685602)
|
||||||
|
datetime.datetime(...utc)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
@ -18,27 +28,7 @@ import numbers
|
||||||
import abc
|
import abc
|
||||||
import bisect
|
import bisect
|
||||||
|
|
||||||
import pytz
|
from .utc import now, fromtimestamp as from_timestamp
|
||||||
|
|
||||||
|
|
||||||
def now():
|
|
||||||
"""
|
|
||||||
Provide the current timezone-aware datetime.
|
|
||||||
|
|
||||||
A client may override this function to change the default behavior,
|
|
||||||
such as to use local time or timezone-naïve times.
|
|
||||||
"""
|
|
||||||
return datetime.datetime.now(pytz.utc)
|
|
||||||
|
|
||||||
|
|
||||||
def from_timestamp(ts):
|
|
||||||
"""
|
|
||||||
Convert a numeric timestamp to a timezone-aware datetime.
|
|
||||||
|
|
||||||
A client may override this function to change the default behavior,
|
|
||||||
such as to use local time or timezone-naïve times.
|
|
||||||
"""
|
|
||||||
return datetime.datetime.fromtimestamp(ts, pytz.utc)
|
|
||||||
|
|
||||||
|
|
||||||
class DelayedCommand(datetime.datetime):
|
class DelayedCommand(datetime.datetime):
|
||||||
|
@ -106,18 +96,7 @@ class PeriodicCommand(DelayedCommand):
|
||||||
"""
|
"""
|
||||||
Add delay to self, localized
|
Add delay to self, localized
|
||||||
"""
|
"""
|
||||||
return self._localize(self + self.delay)
|
return self + self.delay
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _localize(dt):
|
|
||||||
"""
|
|
||||||
Rely on pytz.localize to ensure new result honors DST.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
tz = dt.tzinfo
|
|
||||||
return tz.localize(dt.replace(tzinfo=None))
|
|
||||||
except AttributeError:
|
|
||||||
return dt
|
|
||||||
|
|
||||||
def next(self):
|
def next(self):
|
||||||
cmd = self.__class__.from_datetime(self._next_time())
|
cmd = self.__class__.from_datetime(self._next_time())
|
||||||
|
@ -127,9 +106,7 @@ class PeriodicCommand(DelayedCommand):
|
||||||
|
|
||||||
def __setattr__(self, key, value):
|
def __setattr__(self, key, value):
|
||||||
if key == 'delay' and not value > datetime.timedelta():
|
if key == 'delay' and not value > datetime.timedelta():
|
||||||
raise ValueError(
|
raise ValueError("A PeriodicCommand must have a positive, non-zero delay.")
|
||||||
"A PeriodicCommand must have a positive, " "non-zero delay."
|
|
||||||
)
|
|
||||||
super().__setattr__(key, value)
|
super().__setattr__(key, value)
|
||||||
|
|
||||||
|
|
||||||
|
@ -172,7 +149,7 @@ class PeriodicCommandFixedDelay(PeriodicCommand):
|
||||||
when -= daily
|
when -= daily
|
||||||
while when < now():
|
while when < now():
|
||||||
when += daily
|
when += daily
|
||||||
return cls.at_time(cls._localize(when), daily, target)
|
return cls.at_time(when, daily, target)
|
||||||
|
|
||||||
|
|
||||||
class Scheduler:
|
class Scheduler:
|
||||||
|
|
|
@ -21,6 +21,13 @@ datetime.timezone.utc
|
||||||
|
|
||||||
>>> time(0, 0).tzinfo
|
>>> time(0, 0).tzinfo
|
||||||
datetime.timezone.utc
|
datetime.timezone.utc
|
||||||
|
|
||||||
|
Now should be affected by freezegun.
|
||||||
|
|
||||||
|
>>> freezer = getfixture('freezer')
|
||||||
|
>>> freezer.move_to('1999-12-31 17:00:00 -0700')
|
||||||
|
>>> print(now())
|
||||||
|
2000-01-01 00:00:00+00:00
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as std
|
import datetime as std
|
||||||
|
@ -30,7 +37,10 @@ import functools
|
||||||
__all__ = ['now', 'fromtimestamp', 'datetime', 'time']
|
__all__ = ['now', 'fromtimestamp', 'datetime', 'time']
|
||||||
|
|
||||||
|
|
||||||
now = functools.partial(std.datetime.now, std.timezone.utc)
|
def now():
|
||||||
|
return std.datetime.now(std.timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
fromtimestamp = functools.partial(std.datetime.fromtimestamp, tz=std.timezone.utc)
|
fromtimestamp = functools.partial(std.datetime.fromtimestamp, tz=std.timezone.utc)
|
||||||
datetime = functools.partial(std.datetime, tzinfo=std.timezone.utc)
|
datetime = functools.partial(std.datetime, tzinfo=std.timezone.utc)
|
||||||
time = functools.partial(std.time, tzinfo=std.timezone.utc)
|
time = functools.partial(std.time, tzinfo=std.timezone.utc)
|
||||||
|
|
|
@ -32,22 +32,24 @@ from typing import (
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
from unittest.mock import Mock
|
from unittest.mock import Mock
|
||||||
|
from weakref import WeakKeyDictionary
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import typing_extensions
|
import typing_extensions
|
||||||
except ImportError:
|
except ImportError:
|
||||||
typing_extensions = None # type: ignore[assignment]
|
typing_extensions = None # type: ignore[assignment]
|
||||||
|
|
||||||
|
# Must use this because typing.is_typeddict does not recognize
|
||||||
|
# TypedDict from typing_extensions, and as of version 4.12.0
|
||||||
|
# typing_extensions.TypedDict is different from typing.TypedDict
|
||||||
|
# on all versions.
|
||||||
|
from typing_extensions import is_typeddict
|
||||||
|
|
||||||
from ._config import ForwardRefPolicy
|
from ._config import ForwardRefPolicy
|
||||||
from ._exceptions import TypeCheckError, TypeHintWarning
|
from ._exceptions import TypeCheckError, TypeHintWarning
|
||||||
from ._memo import TypeCheckMemo
|
from ._memo import TypeCheckMemo
|
||||||
from ._utils import evaluate_forwardref, get_stacklevel, get_type_name, qualified_name
|
from ._utils import evaluate_forwardref, get_stacklevel, get_type_name, qualified_name
|
||||||
|
|
||||||
if sys.version_info >= (3, 13):
|
|
||||||
from typing import is_typeddict
|
|
||||||
else:
|
|
||||||
from typing_extensions import is_typeddict
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
if sys.version_info >= (3, 11):
|
||||||
from typing import (
|
from typing import (
|
||||||
Annotated,
|
Annotated,
|
||||||
|
@ -87,6 +89,9 @@ generic_alias_types: tuple[type, ...] = (type(List), type(List[Any]))
|
||||||
if sys.version_info >= (3, 9):
|
if sys.version_info >= (3, 9):
|
||||||
generic_alias_types += (types.GenericAlias,)
|
generic_alias_types += (types.GenericAlias,)
|
||||||
|
|
||||||
|
protocol_check_cache: WeakKeyDictionary[
|
||||||
|
type[Any], dict[type[Any], TypeCheckError | None]
|
||||||
|
] = WeakKeyDictionary()
|
||||||
|
|
||||||
# Sentinel
|
# Sentinel
|
||||||
_missing = object()
|
_missing = object()
|
||||||
|
@ -649,19 +654,96 @@ def check_protocol(
|
||||||
args: tuple[Any, ...],
|
args: tuple[Any, ...],
|
||||||
memo: TypeCheckMemo,
|
memo: TypeCheckMemo,
|
||||||
) -> None:
|
) -> None:
|
||||||
# TODO: implement proper compatibility checking and support non-runtime protocols
|
subject: type[Any] = value if isclass(value) else type(value)
|
||||||
if getattr(origin_type, "_is_runtime_protocol", False):
|
|
||||||
if not isinstance(value, origin_type):
|
if subject in protocol_check_cache:
|
||||||
raise TypeCheckError(
|
result_map = protocol_check_cache[subject]
|
||||||
f"is not compatible with the {origin_type.__qualname__} protocol"
|
if origin_type in result_map:
|
||||||
|
if exc := result_map[origin_type]:
|
||||||
|
raise exc
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Collect a set of methods and non-method attributes present in the protocol
|
||||||
|
ignored_attrs = set(dir(typing.Protocol)) | {
|
||||||
|
"__annotations__",
|
||||||
|
"__non_callable_proto_members__",
|
||||||
|
}
|
||||||
|
expected_methods: dict[str, tuple[Any, Any]] = {}
|
||||||
|
expected_noncallable_members: dict[str, Any] = {}
|
||||||
|
for attrname in dir(origin_type):
|
||||||
|
# Skip attributes present in typing.Protocol
|
||||||
|
if attrname in ignored_attrs:
|
||||||
|
continue
|
||||||
|
|
||||||
|
member = getattr(origin_type, attrname)
|
||||||
|
if callable(member):
|
||||||
|
signature = inspect.signature(member)
|
||||||
|
argtypes = [
|
||||||
|
(p.annotation if p.annotation is not Parameter.empty else Any)
|
||||||
|
for p in signature.parameters.values()
|
||||||
|
if p.kind is not Parameter.KEYWORD_ONLY
|
||||||
|
] or Ellipsis
|
||||||
|
return_annotation = (
|
||||||
|
signature.return_annotation
|
||||||
|
if signature.return_annotation is not Parameter.empty
|
||||||
|
else Any
|
||||||
)
|
)
|
||||||
|
expected_methods[attrname] = argtypes, return_annotation
|
||||||
|
else:
|
||||||
|
expected_noncallable_members[attrname] = member
|
||||||
|
|
||||||
|
for attrname, annotation in typing.get_type_hints(origin_type).items():
|
||||||
|
expected_noncallable_members[attrname] = annotation
|
||||||
|
|
||||||
|
subject_annotations = typing.get_type_hints(subject)
|
||||||
|
|
||||||
|
# Check that all required methods are present and their signatures are compatible
|
||||||
|
result_map = protocol_check_cache.setdefault(subject, {})
|
||||||
|
try:
|
||||||
|
for attrname, callable_args in expected_methods.items():
|
||||||
|
try:
|
||||||
|
method = getattr(subject, attrname)
|
||||||
|
except AttributeError:
|
||||||
|
if attrname in subject_annotations:
|
||||||
|
raise TypeCheckError(
|
||||||
|
f"is not compatible with the {origin_type.__qualname__} protocol "
|
||||||
|
f"because its {attrname!r} attribute is not a method"
|
||||||
|
) from None
|
||||||
|
else:
|
||||||
|
raise TypeCheckError(
|
||||||
|
f"is not compatible with the {origin_type.__qualname__} protocol "
|
||||||
|
f"because it has no method named {attrname!r}"
|
||||||
|
) from None
|
||||||
|
|
||||||
|
if not callable(method):
|
||||||
|
raise TypeCheckError(
|
||||||
|
f"is not compatible with the {origin_type.__qualname__} protocol "
|
||||||
|
f"because its {attrname!r} attribute is not a callable"
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: raise exception on added keyword-only arguments without defaults
|
||||||
|
try:
|
||||||
|
check_callable(method, Callable, callable_args, memo)
|
||||||
|
except TypeCheckError as exc:
|
||||||
|
raise TypeCheckError(
|
||||||
|
f"is not compatible with the {origin_type.__qualname__} protocol "
|
||||||
|
f"because its {attrname!r} method {exc}"
|
||||||
|
) from None
|
||||||
|
|
||||||
|
# Check that all required non-callable members are present
|
||||||
|
for attrname in expected_noncallable_members:
|
||||||
|
# TODO: implement assignability checks for non-callable members
|
||||||
|
if attrname not in subject_annotations and not hasattr(subject, attrname):
|
||||||
|
raise TypeCheckError(
|
||||||
|
f"is not compatible with the {origin_type.__qualname__} protocol "
|
||||||
|
f"because it has no attribute named {attrname!r}"
|
||||||
|
)
|
||||||
|
except TypeCheckError as exc:
|
||||||
|
result_map[origin_type] = exc
|
||||||
|
raise
|
||||||
else:
|
else:
|
||||||
warnings.warn(
|
result_map[origin_type] = None
|
||||||
f"Typeguard cannot check the {origin_type.__qualname__} protocol because "
|
|
||||||
f"it is a non-runtime protocol. If you would like to type check this "
|
|
||||||
f"protocol, please use @typing.runtime_checkable",
|
|
||||||
stacklevel=get_stacklevel(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def check_byteslike(
|
def check_byteslike(
|
||||||
|
@ -852,7 +934,8 @@ def builtin_checker_lookup(
|
||||||
elif is_typeddict(origin_type):
|
elif is_typeddict(origin_type):
|
||||||
return check_typed_dict
|
return check_typed_dict
|
||||||
elif isclass(origin_type) and issubclass(
|
elif isclass(origin_type) and issubclass(
|
||||||
origin_type, Tuple # type: ignore[arg-type]
|
origin_type,
|
||||||
|
Tuple, # type: ignore[arg-type]
|
||||||
):
|
):
|
||||||
# NamedTuple
|
# NamedTuple
|
||||||
return check_tuple
|
return check_tuple
|
||||||
|
|
|
@ -2,21 +2,22 @@ from __future__ import annotations
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any, Literal
|
from typing import TYPE_CHECKING, Any, Literal
|
||||||
|
|
||||||
from pytest import Config, Parser
|
|
||||||
|
|
||||||
from typeguard._config import CollectionCheckStrategy, ForwardRefPolicy, global_config
|
from typeguard._config import CollectionCheckStrategy, ForwardRefPolicy, global_config
|
||||||
from typeguard._exceptions import InstrumentationWarning
|
from typeguard._exceptions import InstrumentationWarning
|
||||||
from typeguard._importhook import install_import_hook
|
from typeguard._importhook import install_import_hook
|
||||||
from typeguard._utils import qualified_name, resolve_reference
|
from typeguard._utils import qualified_name, resolve_reference
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pytest import Config, Parser
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser: Parser) -> None:
|
def pytest_addoption(parser: Parser) -> None:
|
||||||
def add_ini_option(
|
def add_ini_option(
|
||||||
opt_type: (
|
opt_type: (
|
||||||
Literal["string", "paths", "pathlist", "args", "linelist", "bool"] | None
|
Literal["string", "paths", "pathlist", "args", "linelist", "bool"] | None
|
||||||
)
|
),
|
||||||
) -> None:
|
) -> None:
|
||||||
parser.addini(
|
parser.addini(
|
||||||
group.options[-1].names()[0][2:],
|
group.options[-1].names()[0][2:],
|
||||||
|
|
|
@ -28,7 +28,7 @@ def suppress_type_checks() -> ContextManager[None]: ...
|
||||||
|
|
||||||
|
|
||||||
def suppress_type_checks(
|
def suppress_type_checks(
|
||||||
func: Callable[P, T] | None = None
|
func: Callable[P, T] | None = None,
|
||||||
) -> Callable[P, T] | ContextManager[None]:
|
) -> Callable[P, T] | ContextManager[None]:
|
||||||
"""
|
"""
|
||||||
Temporarily suppress all type checking.
|
Temporarily suppress all type checking.
|
||||||
|
|
|
@ -11,11 +11,21 @@ from weakref import WeakValueDictionary
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._memo import TypeCheckMemo
|
from ._memo import TypeCheckMemo
|
||||||
|
|
||||||
if sys.version_info >= (3, 10):
|
if sys.version_info >= (3, 13):
|
||||||
from typing import get_args, get_origin
|
from typing import get_args, get_origin
|
||||||
|
|
||||||
def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
|
def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
|
||||||
return forwardref._evaluate(memo.globals, memo.locals, frozenset())
|
return forwardref._evaluate(
|
||||||
|
memo.globals, memo.locals, type_params=(), recursive_guard=frozenset()
|
||||||
|
)
|
||||||
|
|
||||||
|
elif sys.version_info >= (3, 10):
|
||||||
|
from typing import get_args, get_origin
|
||||||
|
|
||||||
|
def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
|
||||||
|
return forwardref._evaluate(
|
||||||
|
memo.globals, memo.locals, recursive_guard=frozenset()
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
from typing_extensions import get_args, get_origin
|
from typing_extensions import get_args, get_origin
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import abc
|
import abc
|
||||||
import collections
|
import collections
|
||||||
import collections.abc
|
import collections.abc
|
||||||
|
import contextlib
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
import operator
|
import operator
|
||||||
|
@ -116,6 +117,7 @@ __all__ = [
|
||||||
'MutableMapping',
|
'MutableMapping',
|
||||||
'MutableSequence',
|
'MutableSequence',
|
||||||
'MutableSet',
|
'MutableSet',
|
||||||
|
'NoDefault',
|
||||||
'Optional',
|
'Optional',
|
||||||
'Pattern',
|
'Pattern',
|
||||||
'Reversible',
|
'Reversible',
|
||||||
|
@ -134,6 +136,7 @@ __all__ = [
|
||||||
# for backward compatibility
|
# for backward compatibility
|
||||||
PEP_560 = True
|
PEP_560 = True
|
||||||
GenericMeta = type
|
GenericMeta = type
|
||||||
|
_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
|
||||||
|
|
||||||
# The functions below are modified copies of typing internal helpers.
|
# The functions below are modified copies of typing internal helpers.
|
||||||
# They are needed by _ProtocolMeta and they provide support for PEP 646.
|
# They are needed by _ProtocolMeta and they provide support for PEP 646.
|
||||||
|
@ -406,17 +409,96 @@ Coroutine = typing.Coroutine
|
||||||
AsyncIterable = typing.AsyncIterable
|
AsyncIterable = typing.AsyncIterable
|
||||||
AsyncIterator = typing.AsyncIterator
|
AsyncIterator = typing.AsyncIterator
|
||||||
Deque = typing.Deque
|
Deque = typing.Deque
|
||||||
ContextManager = typing.ContextManager
|
|
||||||
AsyncContextManager = typing.AsyncContextManager
|
|
||||||
DefaultDict = typing.DefaultDict
|
DefaultDict = typing.DefaultDict
|
||||||
OrderedDict = typing.OrderedDict
|
OrderedDict = typing.OrderedDict
|
||||||
Counter = typing.Counter
|
Counter = typing.Counter
|
||||||
ChainMap = typing.ChainMap
|
ChainMap = typing.ChainMap
|
||||||
AsyncGenerator = typing.AsyncGenerator
|
|
||||||
Text = typing.Text
|
Text = typing.Text
|
||||||
TYPE_CHECKING = typing.TYPE_CHECKING
|
TYPE_CHECKING = typing.TYPE_CHECKING
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 13, 0, "beta"):
|
||||||
|
from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
|
||||||
|
else:
|
||||||
|
def _is_dunder(attr):
|
||||||
|
return attr.startswith('__') and attr.endswith('__')
|
||||||
|
|
||||||
|
# Python <3.9 doesn't have typing._SpecialGenericAlias
|
||||||
|
_special_generic_alias_base = getattr(
|
||||||
|
typing, "_SpecialGenericAlias", typing._GenericAlias
|
||||||
|
)
|
||||||
|
|
||||||
|
class _SpecialGenericAlias(_special_generic_alias_base, _root=True):
|
||||||
|
def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
|
||||||
|
if _special_generic_alias_base is typing._GenericAlias:
|
||||||
|
# Python <3.9
|
||||||
|
self.__origin__ = origin
|
||||||
|
self._nparams = nparams
|
||||||
|
super().__init__(origin, nparams, special=True, inst=inst, name=name)
|
||||||
|
else:
|
||||||
|
# Python >= 3.9
|
||||||
|
super().__init__(origin, nparams, inst=inst, name=name)
|
||||||
|
self._defaults = defaults
|
||||||
|
|
||||||
|
def __setattr__(self, attr, val):
|
||||||
|
allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
|
||||||
|
if _special_generic_alias_base is typing._GenericAlias:
|
||||||
|
# Python <3.9
|
||||||
|
allowed_attrs.add("__origin__")
|
||||||
|
if _is_dunder(attr) or attr in allowed_attrs:
|
||||||
|
object.__setattr__(self, attr, val)
|
||||||
|
else:
|
||||||
|
setattr(self.__origin__, attr, val)
|
||||||
|
|
||||||
|
@typing._tp_cache
|
||||||
|
def __getitem__(self, params):
|
||||||
|
if not isinstance(params, tuple):
|
||||||
|
params = (params,)
|
||||||
|
msg = "Parameters to generic types must be types."
|
||||||
|
params = tuple(typing._type_check(p, msg) for p in params)
|
||||||
|
if (
|
||||||
|
self._defaults
|
||||||
|
and len(params) < self._nparams
|
||||||
|
and len(params) + len(self._defaults) >= self._nparams
|
||||||
|
):
|
||||||
|
params = (*params, *self._defaults[len(params) - self._nparams:])
|
||||||
|
actual_len = len(params)
|
||||||
|
|
||||||
|
if actual_len != self._nparams:
|
||||||
|
if self._defaults:
|
||||||
|
expected = f"at least {self._nparams - len(self._defaults)}"
|
||||||
|
else:
|
||||||
|
expected = str(self._nparams)
|
||||||
|
if not self._nparams:
|
||||||
|
raise TypeError(f"{self} is not a generic class")
|
||||||
|
raise TypeError(
|
||||||
|
f"Too {'many' if actual_len > self._nparams else 'few'}"
|
||||||
|
f" arguments for {self};"
|
||||||
|
f" actual {actual_len}, expected {expected}"
|
||||||
|
)
|
||||||
|
return self.copy_with(params)
|
||||||
|
|
||||||
|
_NoneType = type(None)
|
||||||
|
Generator = _SpecialGenericAlias(
|
||||||
|
collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
|
||||||
|
)
|
||||||
|
AsyncGenerator = _SpecialGenericAlias(
|
||||||
|
collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
|
||||||
|
)
|
||||||
|
ContextManager = _SpecialGenericAlias(
|
||||||
|
contextlib.AbstractContextManager,
|
||||||
|
2,
|
||||||
|
name="ContextManager",
|
||||||
|
defaults=(typing.Optional[bool],)
|
||||||
|
)
|
||||||
|
AsyncContextManager = _SpecialGenericAlias(
|
||||||
|
contextlib.AbstractAsyncContextManager,
|
||||||
|
2,
|
||||||
|
name="AsyncContextManager",
|
||||||
|
defaults=(typing.Optional[bool],)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
_PROTO_ALLOWLIST = {
|
_PROTO_ALLOWLIST = {
|
||||||
'collections.abc': [
|
'collections.abc': [
|
||||||
'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
|
'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
|
||||||
|
@ -427,23 +509,11 @@ _PROTO_ALLOWLIST = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
_EXCLUDED_ATTRS = {
|
_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
|
||||||
"__abstractmethods__", "__annotations__", "__weakref__", "_is_protocol",
|
"__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
|
||||||
"_is_runtime_protocol", "__dict__", "__slots__", "__parameters__",
|
"__final__",
|
||||||
"__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__",
|
|
||||||
"__subclasshook__", "__orig_class__", "__init__", "__new__",
|
|
||||||
"__protocol_attrs__", "__non_callable_proto_members__",
|
|
||||||
"__match_args__",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
_EXCLUDED_ATTRS.add("__class_getitem__")
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 12):
|
|
||||||
_EXCLUDED_ATTRS.add("__type_params__")
|
|
||||||
|
|
||||||
_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_protocol_attrs(cls):
|
def _get_protocol_attrs(cls):
|
||||||
attrs = set()
|
attrs = set()
|
||||||
|
@ -669,13 +739,18 @@ else:
|
||||||
not their type signatures!
|
not their type signatures!
|
||||||
"""
|
"""
|
||||||
if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
|
if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
|
||||||
raise TypeError('@runtime_checkable can be only applied to protocol classes,'
|
raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
|
||||||
' got %r' % cls)
|
f' got {cls!r}')
|
||||||
cls._is_runtime_protocol = True
|
cls._is_runtime_protocol = True
|
||||||
|
|
||||||
# Only execute the following block if it's a typing_extensions.Protocol class.
|
# typing.Protocol classes on <=3.11 break if we execute this block,
|
||||||
# typing.Protocol classes don't need it.
|
# because typing.Protocol classes on <=3.11 don't have a
|
||||||
if isinstance(cls, _ProtocolMeta):
|
# `__protocol_attrs__` attribute, and this block relies on the
|
||||||
|
# `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
|
||||||
|
# break if we *don't* execute this block, because *they* assume that all
|
||||||
|
# protocol classes have a `__non_callable_proto_members__` attribute
|
||||||
|
# (which this block sets)
|
||||||
|
if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
|
||||||
# PEP 544 prohibits using issubclass()
|
# PEP 544 prohibits using issubclass()
|
||||||
# with protocols that have non-method members.
|
# with protocols that have non-method members.
|
||||||
# See gh-113320 for why we compute this attribute here,
|
# See gh-113320 for why we compute this attribute here,
|
||||||
|
@ -867,7 +942,13 @@ else:
|
||||||
tp_dict.__orig_bases__ = bases
|
tp_dict.__orig_bases__ = bases
|
||||||
|
|
||||||
annotations = {}
|
annotations = {}
|
||||||
own_annotations = ns.get('__annotations__', {})
|
if "__annotations__" in ns:
|
||||||
|
own_annotations = ns["__annotations__"]
|
||||||
|
elif "__annotate__" in ns:
|
||||||
|
# TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
|
||||||
|
own_annotations = ns["__annotate__"](1)
|
||||||
|
else:
|
||||||
|
own_annotations = {}
|
||||||
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
|
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
|
||||||
if _TAKES_MODULE:
|
if _TAKES_MODULE:
|
||||||
own_annotations = {
|
own_annotations = {
|
||||||
|
@ -1190,7 +1271,7 @@ else:
|
||||||
|
|
||||||
def __reduce__(self):
|
def __reduce__(self):
|
||||||
return operator.getitem, (
|
return operator.getitem, (
|
||||||
Annotated, (self.__origin__,) + self.__metadata__
|
Annotated, (self.__origin__, *self.__metadata__)
|
||||||
)
|
)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
|
@ -1316,7 +1397,7 @@ else:
|
||||||
get_args(Callable[[], T][int]) == ([], int)
|
get_args(Callable[[], T][int]) == ([], int)
|
||||||
"""
|
"""
|
||||||
if isinstance(tp, _AnnotatedAlias):
|
if isinstance(tp, _AnnotatedAlias):
|
||||||
return (tp.__origin__,) + tp.__metadata__
|
return (tp.__origin__, *tp.__metadata__)
|
||||||
if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
|
if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
|
||||||
if getattr(tp, "_special", False):
|
if getattr(tp, "_special", False):
|
||||||
return ()
|
return ()
|
||||||
|
@ -1362,17 +1443,37 @@ else:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if hasattr(typing, "NoDefault"):
|
||||||
|
NoDefault = typing.NoDefault
|
||||||
|
else:
|
||||||
|
class NoDefaultTypeMeta(type):
|
||||||
|
def __setattr__(cls, attr, value):
|
||||||
|
# TypeError is consistent with the behavior of NoneType
|
||||||
|
raise TypeError(
|
||||||
|
f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
class NoDefaultType(metaclass=NoDefaultTypeMeta):
|
||||||
|
"""The type of the NoDefault singleton."""
|
||||||
|
|
||||||
|
__slots__ = ()
|
||||||
|
|
||||||
|
def __new__(cls):
|
||||||
|
return globals().get("NoDefault") or object.__new__(cls)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "typing_extensions.NoDefault"
|
||||||
|
|
||||||
|
def __reduce__(self):
|
||||||
|
return "NoDefault"
|
||||||
|
|
||||||
|
NoDefault = NoDefaultType()
|
||||||
|
del NoDefaultType, NoDefaultTypeMeta
|
||||||
|
|
||||||
|
|
||||||
def _set_default(type_param, default):
|
def _set_default(type_param, default):
|
||||||
if isinstance(default, (tuple, list)):
|
type_param.has_default = lambda: default is not NoDefault
|
||||||
type_param.__default__ = tuple((typing._type_check(d, "Default must be a type")
|
type_param.__default__ = default
|
||||||
for d in default))
|
|
||||||
elif default != _marker:
|
|
||||||
if isinstance(type_param, ParamSpec) and default is ...: # ... not valid <3.11
|
|
||||||
type_param.__default__ = default
|
|
||||||
else:
|
|
||||||
type_param.__default__ = typing._type_check(default, "Default must be a type")
|
|
||||||
else:
|
|
||||||
type_param.__default__ = None
|
|
||||||
|
|
||||||
|
|
||||||
def _set_module(typevarlike):
|
def _set_module(typevarlike):
|
||||||
|
@ -1395,32 +1496,46 @@ class _TypeVarLikeMeta(type):
|
||||||
return isinstance(__instance, cls._backported_typevarlike)
|
return isinstance(__instance, cls._backported_typevarlike)
|
||||||
|
|
||||||
|
|
||||||
# Add default and infer_variance parameters from PEP 696 and 695
|
if _PEP_696_IMPLEMENTED:
|
||||||
class TypeVar(metaclass=_TypeVarLikeMeta):
|
from typing import TypeVar
|
||||||
"""Type variable."""
|
else:
|
||||||
|
# Add default and infer_variance parameters from PEP 696 and 695
|
||||||
|
class TypeVar(metaclass=_TypeVarLikeMeta):
|
||||||
|
"""Type variable."""
|
||||||
|
|
||||||
_backported_typevarlike = typing.TypeVar
|
_backported_typevarlike = typing.TypeVar
|
||||||
|
|
||||||
def __new__(cls, name, *constraints, bound=None,
|
def __new__(cls, name, *constraints, bound=None,
|
||||||
covariant=False, contravariant=False,
|
covariant=False, contravariant=False,
|
||||||
default=_marker, infer_variance=False):
|
default=NoDefault, infer_variance=False):
|
||||||
if hasattr(typing, "TypeAliasType"):
|
if hasattr(typing, "TypeAliasType"):
|
||||||
# PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
|
# PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
|
||||||
typevar = typing.TypeVar(name, *constraints, bound=bound,
|
typevar = typing.TypeVar(name, *constraints, bound=bound,
|
||||||
covariant=covariant, contravariant=contravariant,
|
covariant=covariant, contravariant=contravariant,
|
||||||
infer_variance=infer_variance)
|
infer_variance=infer_variance)
|
||||||
else:
|
else:
|
||||||
typevar = typing.TypeVar(name, *constraints, bound=bound,
|
typevar = typing.TypeVar(name, *constraints, bound=bound,
|
||||||
covariant=covariant, contravariant=contravariant)
|
covariant=covariant, contravariant=contravariant)
|
||||||
if infer_variance and (covariant or contravariant):
|
if infer_variance and (covariant or contravariant):
|
||||||
raise ValueError("Variance cannot be specified with infer_variance.")
|
raise ValueError("Variance cannot be specified with infer_variance.")
|
||||||
typevar.__infer_variance__ = infer_variance
|
typevar.__infer_variance__ = infer_variance
|
||||||
_set_default(typevar, default)
|
|
||||||
_set_module(typevar)
|
|
||||||
return typevar
|
|
||||||
|
|
||||||
def __init_subclass__(cls) -> None:
|
_set_default(typevar, default)
|
||||||
raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
|
_set_module(typevar)
|
||||||
|
|
||||||
|
def _tvar_prepare_subst(alias, args):
|
||||||
|
if (
|
||||||
|
typevar.has_default()
|
||||||
|
and alias.__parameters__.index(typevar) == len(args)
|
||||||
|
):
|
||||||
|
args += (typevar.__default__,)
|
||||||
|
return args
|
||||||
|
|
||||||
|
typevar.__typing_prepare_subst__ = _tvar_prepare_subst
|
||||||
|
return typevar
|
||||||
|
|
||||||
|
def __init_subclass__(cls) -> None:
|
||||||
|
raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
|
||||||
|
|
||||||
|
|
||||||
# Python 3.10+ has PEP 612
|
# Python 3.10+ has PEP 612
|
||||||
|
@ -1485,8 +1600,12 @@ else:
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
return self.__origin__ == other.__origin__
|
return self.__origin__ == other.__origin__
|
||||||
|
|
||||||
|
|
||||||
|
if _PEP_696_IMPLEMENTED:
|
||||||
|
from typing import ParamSpec
|
||||||
|
|
||||||
# 3.10+
|
# 3.10+
|
||||||
if hasattr(typing, 'ParamSpec'):
|
elif hasattr(typing, 'ParamSpec'):
|
||||||
|
|
||||||
# Add default parameter - PEP 696
|
# Add default parameter - PEP 696
|
||||||
class ParamSpec(metaclass=_TypeVarLikeMeta):
|
class ParamSpec(metaclass=_TypeVarLikeMeta):
|
||||||
|
@ -1496,7 +1615,7 @@ if hasattr(typing, 'ParamSpec'):
|
||||||
|
|
||||||
def __new__(cls, name, *, bound=None,
|
def __new__(cls, name, *, bound=None,
|
||||||
covariant=False, contravariant=False,
|
covariant=False, contravariant=False,
|
||||||
infer_variance=False, default=_marker):
|
infer_variance=False, default=NoDefault):
|
||||||
if hasattr(typing, "TypeAliasType"):
|
if hasattr(typing, "TypeAliasType"):
|
||||||
# PEP 695 implemented, can pass infer_variance to typing.TypeVar
|
# PEP 695 implemented, can pass infer_variance to typing.TypeVar
|
||||||
paramspec = typing.ParamSpec(name, bound=bound,
|
paramspec = typing.ParamSpec(name, bound=bound,
|
||||||
|
@ -1511,6 +1630,24 @@ if hasattr(typing, 'ParamSpec'):
|
||||||
|
|
||||||
_set_default(paramspec, default)
|
_set_default(paramspec, default)
|
||||||
_set_module(paramspec)
|
_set_module(paramspec)
|
||||||
|
|
||||||
|
def _paramspec_prepare_subst(alias, args):
|
||||||
|
params = alias.__parameters__
|
||||||
|
i = params.index(paramspec)
|
||||||
|
if i == len(args) and paramspec.has_default():
|
||||||
|
args = [*args, paramspec.__default__]
|
||||||
|
if i >= len(args):
|
||||||
|
raise TypeError(f"Too few arguments for {alias}")
|
||||||
|
# Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
|
||||||
|
if len(params) == 1 and not typing._is_param_expr(args[0]):
|
||||||
|
assert i == 0
|
||||||
|
args = (args,)
|
||||||
|
# Convert lists to tuples to help other libraries cache the results.
|
||||||
|
elif isinstance(args[i], list):
|
||||||
|
args = (*args[:i], tuple(args[i]), *args[i + 1:])
|
||||||
|
return args
|
||||||
|
|
||||||
|
paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
|
||||||
return paramspec
|
return paramspec
|
||||||
|
|
||||||
def __init_subclass__(cls) -> None:
|
def __init_subclass__(cls) -> None:
|
||||||
|
@ -1579,8 +1716,8 @@ else:
|
||||||
return ParamSpecKwargs(self)
|
return ParamSpecKwargs(self)
|
||||||
|
|
||||||
def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
|
def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
|
||||||
infer_variance=False, default=_marker):
|
infer_variance=False, default=NoDefault):
|
||||||
super().__init__([self])
|
list.__init__(self, [self])
|
||||||
self.__name__ = name
|
self.__name__ = name
|
||||||
self.__covariant__ = bool(covariant)
|
self.__covariant__ = bool(covariant)
|
||||||
self.__contravariant__ = bool(contravariant)
|
self.__contravariant__ = bool(contravariant)
|
||||||
|
@ -1674,7 +1811,7 @@ def _concatenate_getitem(self, parameters):
|
||||||
# 3.10+
|
# 3.10+
|
||||||
if hasattr(typing, 'Concatenate'):
|
if hasattr(typing, 'Concatenate'):
|
||||||
Concatenate = typing.Concatenate
|
Concatenate = typing.Concatenate
|
||||||
_ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811
|
_ConcatenateGenericAlias = typing._ConcatenateGenericAlias
|
||||||
# 3.9
|
# 3.9
|
||||||
elif sys.version_info[:2] >= (3, 9):
|
elif sys.version_info[:2] >= (3, 9):
|
||||||
@_ExtensionsSpecialForm
|
@_ExtensionsSpecialForm
|
||||||
|
@ -2209,6 +2346,17 @@ elif sys.version_info[:2] >= (3, 9): # 3.9+
|
||||||
class _UnpackAlias(typing._GenericAlias, _root=True):
|
class _UnpackAlias(typing._GenericAlias, _root=True):
|
||||||
__class__ = typing.TypeVar
|
__class__ = typing.TypeVar
|
||||||
|
|
||||||
|
@property
|
||||||
|
def __typing_unpacked_tuple_args__(self):
|
||||||
|
assert self.__origin__ is Unpack
|
||||||
|
assert len(self.__args__) == 1
|
||||||
|
arg, = self.__args__
|
||||||
|
if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
|
||||||
|
if arg.__origin__ is not tuple:
|
||||||
|
raise TypeError("Unpack[...] must be used with a tuple type")
|
||||||
|
return arg.__args__
|
||||||
|
return None
|
||||||
|
|
||||||
@_UnpackSpecialForm
|
@_UnpackSpecialForm
|
||||||
def Unpack(self, parameters):
|
def Unpack(self, parameters):
|
||||||
item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
|
item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
|
||||||
|
@ -2233,7 +2381,20 @@ else: # 3.8
|
||||||
return isinstance(obj, _UnpackAlias)
|
return isinstance(obj, _UnpackAlias)
|
||||||
|
|
||||||
|
|
||||||
if hasattr(typing, "TypeVarTuple"): # 3.11+
|
if _PEP_696_IMPLEMENTED:
|
||||||
|
from typing import TypeVarTuple
|
||||||
|
|
||||||
|
elif hasattr(typing, "TypeVarTuple"): # 3.11+
|
||||||
|
|
||||||
|
def _unpack_args(*args):
|
||||||
|
newargs = []
|
||||||
|
for arg in args:
|
||||||
|
subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
|
||||||
|
if subargs is not None and not (subargs and subargs[-1] is ...):
|
||||||
|
newargs.extend(subargs)
|
||||||
|
else:
|
||||||
|
newargs.append(arg)
|
||||||
|
return newargs
|
||||||
|
|
||||||
# Add default parameter - PEP 696
|
# Add default parameter - PEP 696
|
||||||
class TypeVarTuple(metaclass=_TypeVarLikeMeta):
|
class TypeVarTuple(metaclass=_TypeVarLikeMeta):
|
||||||
|
@ -2241,10 +2402,57 @@ if hasattr(typing, "TypeVarTuple"): # 3.11+
|
||||||
|
|
||||||
_backported_typevarlike = typing.TypeVarTuple
|
_backported_typevarlike = typing.TypeVarTuple
|
||||||
|
|
||||||
def __new__(cls, name, *, default=_marker):
|
def __new__(cls, name, *, default=NoDefault):
|
||||||
tvt = typing.TypeVarTuple(name)
|
tvt = typing.TypeVarTuple(name)
|
||||||
_set_default(tvt, default)
|
_set_default(tvt, default)
|
||||||
_set_module(tvt)
|
_set_module(tvt)
|
||||||
|
|
||||||
|
def _typevartuple_prepare_subst(alias, args):
|
||||||
|
params = alias.__parameters__
|
||||||
|
typevartuple_index = params.index(tvt)
|
||||||
|
for param in params[typevartuple_index + 1:]:
|
||||||
|
if isinstance(param, TypeVarTuple):
|
||||||
|
raise TypeError(
|
||||||
|
f"More than one TypeVarTuple parameter in {alias}"
|
||||||
|
)
|
||||||
|
|
||||||
|
alen = len(args)
|
||||||
|
plen = len(params)
|
||||||
|
left = typevartuple_index
|
||||||
|
right = plen - typevartuple_index - 1
|
||||||
|
var_tuple_index = None
|
||||||
|
fillarg = None
|
||||||
|
for k, arg in enumerate(args):
|
||||||
|
if not isinstance(arg, type):
|
||||||
|
subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
|
||||||
|
if subargs and len(subargs) == 2 and subargs[-1] is ...:
|
||||||
|
if var_tuple_index is not None:
|
||||||
|
raise TypeError(
|
||||||
|
"More than one unpacked "
|
||||||
|
"arbitrary-length tuple argument"
|
||||||
|
)
|
||||||
|
var_tuple_index = k
|
||||||
|
fillarg = subargs[0]
|
||||||
|
if var_tuple_index is not None:
|
||||||
|
left = min(left, var_tuple_index)
|
||||||
|
right = min(right, alen - var_tuple_index - 1)
|
||||||
|
elif left + right > alen:
|
||||||
|
raise TypeError(f"Too few arguments for {alias};"
|
||||||
|
f" actual {alen}, expected at least {plen - 1}")
|
||||||
|
if left == alen - right and tvt.has_default():
|
||||||
|
replacement = _unpack_args(tvt.__default__)
|
||||||
|
else:
|
||||||
|
replacement = args[left: alen - right]
|
||||||
|
|
||||||
|
return (
|
||||||
|
*args[:left],
|
||||||
|
*([fillarg] * (typevartuple_index - left)),
|
||||||
|
replacement,
|
||||||
|
*([fillarg] * (plen - right - left - typevartuple_index - 1)),
|
||||||
|
*args[alen - right:],
|
||||||
|
)
|
||||||
|
|
||||||
|
tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
|
||||||
return tvt
|
return tvt
|
||||||
|
|
||||||
def __init_subclass__(self, *args, **kwds):
|
def __init_subclass__(self, *args, **kwds):
|
||||||
|
@ -2301,7 +2509,7 @@ else: # <=3.10
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
yield self.__unpacked__
|
yield self.__unpacked__
|
||||||
|
|
||||||
def __init__(self, name, *, default=_marker):
|
def __init__(self, name, *, default=NoDefault):
|
||||||
self.__name__ = name
|
self.__name__ = name
|
||||||
_DefaultMixin.__init__(self, default)
|
_DefaultMixin.__init__(self, default)
|
||||||
|
|
||||||
|
@ -2352,6 +2560,12 @@ else: # <=3.10
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+
|
||||||
|
_ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
|
||||||
|
else: # <=3.10
|
||||||
|
_ASSERT_NEVER_REPR_MAX_LENGTH = 100
|
||||||
|
|
||||||
|
|
||||||
if hasattr(typing, "assert_never"): # 3.11+
|
if hasattr(typing, "assert_never"): # 3.11+
|
||||||
assert_never = typing.assert_never
|
assert_never = typing.assert_never
|
||||||
else: # <=3.10
|
else: # <=3.10
|
||||||
|
@ -2375,7 +2589,10 @@ else: # <=3.10
|
||||||
At runtime, this throws an exception when called.
|
At runtime, this throws an exception when called.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
raise AssertionError("Expected code to be unreachable")
|
value = repr(arg)
|
||||||
|
if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
|
||||||
|
value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
|
||||||
|
raise AssertionError(f"Expected code to be unreachable, but got: {value}")
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 12): # 3.12+
|
if sys.version_info >= (3, 12): # 3.12+
|
||||||
|
@ -2677,11 +2894,14 @@ if not hasattr(typing, "TypeVarTuple"):
|
||||||
if alen < elen:
|
if alen < elen:
|
||||||
# since we validate TypeVarLike default in _collect_type_vars
|
# since we validate TypeVarLike default in _collect_type_vars
|
||||||
# or _collect_parameters we can safely check parameters[alen]
|
# or _collect_parameters we can safely check parameters[alen]
|
||||||
if getattr(parameters[alen], '__default__', None) is not None:
|
if (
|
||||||
|
getattr(parameters[alen], '__default__', NoDefault)
|
||||||
|
is not NoDefault
|
||||||
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
num_default_tv = sum(getattr(p, '__default__', None)
|
num_default_tv = sum(getattr(p, '__default__', NoDefault)
|
||||||
is not None for p in parameters)
|
is not NoDefault for p in parameters)
|
||||||
|
|
||||||
elen -= num_default_tv
|
elen -= num_default_tv
|
||||||
|
|
||||||
|
@ -2711,11 +2931,14 @@ else:
|
||||||
if alen < elen:
|
if alen < elen:
|
||||||
# since we validate TypeVarLike default in _collect_type_vars
|
# since we validate TypeVarLike default in _collect_type_vars
|
||||||
# or _collect_parameters we can safely check parameters[alen]
|
# or _collect_parameters we can safely check parameters[alen]
|
||||||
if getattr(parameters[alen], '__default__', None) is not None:
|
if (
|
||||||
|
getattr(parameters[alen], '__default__', NoDefault)
|
||||||
|
is not NoDefault
|
||||||
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
num_default_tv = sum(getattr(p, '__default__', None)
|
num_default_tv = sum(getattr(p, '__default__', NoDefault)
|
||||||
is not None for p in parameters)
|
is not NoDefault for p in parameters)
|
||||||
|
|
||||||
elen -= num_default_tv
|
elen -= num_default_tv
|
||||||
|
|
||||||
|
@ -2724,7 +2947,42 @@ else:
|
||||||
raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
|
raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
|
||||||
f" for {cls}; actual {alen}, expected {expect_val}")
|
f" for {cls}; actual {alen}, expected {expect_val}")
|
||||||
|
|
||||||
typing._check_generic = _check_generic
|
if not _PEP_696_IMPLEMENTED:
|
||||||
|
typing._check_generic = _check_generic
|
||||||
|
|
||||||
|
|
||||||
|
def _has_generic_or_protocol_as_origin() -> bool:
|
||||||
|
try:
|
||||||
|
frame = sys._getframe(2)
|
||||||
|
# - Catch AttributeError: not all Python implementations have sys._getframe()
|
||||||
|
# - Catch ValueError: maybe we're called from an unexpected module
|
||||||
|
# and the call stack isn't deep enough
|
||||||
|
except (AttributeError, ValueError):
|
||||||
|
return False # err on the side of leniency
|
||||||
|
else:
|
||||||
|
# If we somehow get invoked from outside typing.py,
|
||||||
|
# also err on the side of leniency
|
||||||
|
if frame.f_globals.get("__name__") != "typing":
|
||||||
|
return False
|
||||||
|
origin = frame.f_locals.get("origin")
|
||||||
|
# Cannot use "in" because origin may be an object with a buggy __eq__ that
|
||||||
|
# throws an error.
|
||||||
|
return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
|
||||||
|
|
||||||
|
|
||||||
|
_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
|
||||||
|
|
||||||
|
|
||||||
|
def _is_unpacked_typevartuple(x) -> bool:
|
||||||
|
if get_origin(x) is not Unpack:
|
||||||
|
return False
|
||||||
|
args = get_args(x)
|
||||||
|
return (
|
||||||
|
bool(args)
|
||||||
|
and len(args) == 1
|
||||||
|
and type(args[0]) in _TYPEVARTUPLE_TYPES
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
|
# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
|
||||||
if hasattr(typing, '_collect_type_vars'):
|
if hasattr(typing, '_collect_type_vars'):
|
||||||
|
@ -2737,19 +2995,29 @@ if hasattr(typing, '_collect_type_vars'):
|
||||||
if typevar_types is None:
|
if typevar_types is None:
|
||||||
typevar_types = typing.TypeVar
|
typevar_types = typing.TypeVar
|
||||||
tvars = []
|
tvars = []
|
||||||
# required TypeVarLike cannot appear after TypeVarLike with default
|
|
||||||
|
# A required TypeVarLike cannot appear after a TypeVarLike with a default
|
||||||
|
# if it was a direct call to `Generic[]` or `Protocol[]`
|
||||||
|
enforce_default_ordering = _has_generic_or_protocol_as_origin()
|
||||||
default_encountered = False
|
default_encountered = False
|
||||||
|
|
||||||
|
# Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
|
||||||
|
type_var_tuple_encountered = False
|
||||||
|
|
||||||
for t in types:
|
for t in types:
|
||||||
if (
|
if _is_unpacked_typevartuple(t):
|
||||||
isinstance(t, typevar_types) and
|
type_var_tuple_encountered = True
|
||||||
t not in tvars and
|
elif isinstance(t, typevar_types) and t not in tvars:
|
||||||
not _is_unpack(t)
|
if enforce_default_ordering:
|
||||||
):
|
has_default = getattr(t, '__default__', NoDefault) is not NoDefault
|
||||||
if getattr(t, '__default__', None) is not None:
|
if has_default:
|
||||||
default_encountered = True
|
if type_var_tuple_encountered:
|
||||||
elif default_encountered:
|
raise TypeError('Type parameter with a default'
|
||||||
raise TypeError(f'Type parameter {t!r} without a default'
|
' follows TypeVarTuple')
|
||||||
' follows type parameter with a default')
|
default_encountered = True
|
||||||
|
elif default_encountered:
|
||||||
|
raise TypeError(f'Type parameter {t!r} without a default'
|
||||||
|
' follows type parameter with a default')
|
||||||
|
|
||||||
tvars.append(t)
|
tvars.append(t)
|
||||||
if _should_collect_from_parameters(t):
|
if _should_collect_from_parameters(t):
|
||||||
|
@ -2767,8 +3035,15 @@ else:
|
||||||
assert _collect_parameters((T, Callable[P, T])) == (T, P)
|
assert _collect_parameters((T, Callable[P, T])) == (T, P)
|
||||||
"""
|
"""
|
||||||
parameters = []
|
parameters = []
|
||||||
# required TypeVarLike cannot appear after TypeVarLike with default
|
|
||||||
|
# A required TypeVarLike cannot appear after a TypeVarLike with default
|
||||||
|
# if it was a direct call to `Generic[]` or `Protocol[]`
|
||||||
|
enforce_default_ordering = _has_generic_or_protocol_as_origin()
|
||||||
default_encountered = False
|
default_encountered = False
|
||||||
|
|
||||||
|
# Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
|
||||||
|
type_var_tuple_encountered = False
|
||||||
|
|
||||||
for t in args:
|
for t in args:
|
||||||
if isinstance(t, type):
|
if isinstance(t, type):
|
||||||
# We don't want __parameters__ descriptor of a bare Python class.
|
# We don't want __parameters__ descriptor of a bare Python class.
|
||||||
|
@ -2782,21 +3057,33 @@ else:
|
||||||
parameters.append(collected)
|
parameters.append(collected)
|
||||||
elif hasattr(t, '__typing_subst__'):
|
elif hasattr(t, '__typing_subst__'):
|
||||||
if t not in parameters:
|
if t not in parameters:
|
||||||
if getattr(t, '__default__', None) is not None:
|
if enforce_default_ordering:
|
||||||
default_encountered = True
|
has_default = (
|
||||||
elif default_encountered:
|
getattr(t, '__default__', NoDefault) is not NoDefault
|
||||||
raise TypeError(f'Type parameter {t!r} without a default'
|
)
|
||||||
' follows type parameter with a default')
|
|
||||||
|
if type_var_tuple_encountered and has_default:
|
||||||
|
raise TypeError('Type parameter with a default'
|
||||||
|
' follows TypeVarTuple')
|
||||||
|
|
||||||
|
if has_default:
|
||||||
|
default_encountered = True
|
||||||
|
elif default_encountered:
|
||||||
|
raise TypeError(f'Type parameter {t!r} without a default'
|
||||||
|
' follows type parameter with a default')
|
||||||
|
|
||||||
parameters.append(t)
|
parameters.append(t)
|
||||||
else:
|
else:
|
||||||
|
if _is_unpacked_typevartuple(t):
|
||||||
|
type_var_tuple_encountered = True
|
||||||
for x in getattr(t, '__parameters__', ()):
|
for x in getattr(t, '__parameters__', ()):
|
||||||
if x not in parameters:
|
if x not in parameters:
|
||||||
parameters.append(x)
|
parameters.append(x)
|
||||||
|
|
||||||
return tuple(parameters)
|
return tuple(parameters)
|
||||||
|
|
||||||
typing._collect_parameters = _collect_parameters
|
if not _PEP_696_IMPLEMENTED:
|
||||||
|
typing._collect_parameters = _collect_parameters
|
||||||
|
|
||||||
# Backport typing.NamedTuple as it exists in Python 3.13.
|
# Backport typing.NamedTuple as it exists in Python 3.13.
|
||||||
# In 3.11, the ability to define generic `NamedTuple`s was supported.
|
# In 3.11, the ability to define generic `NamedTuple`s was supported.
|
||||||
|
@ -2830,7 +3117,13 @@ else:
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
'can only inherit from a NamedTuple type and Generic')
|
'can only inherit from a NamedTuple type and Generic')
|
||||||
bases = tuple(tuple if base is _NamedTuple else base for base in bases)
|
bases = tuple(tuple if base is _NamedTuple else base for base in bases)
|
||||||
types = ns.get('__annotations__', {})
|
if "__annotations__" in ns:
|
||||||
|
types = ns["__annotations__"]
|
||||||
|
elif "__annotate__" in ns:
|
||||||
|
# TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
|
||||||
|
types = ns["__annotate__"](1)
|
||||||
|
else:
|
||||||
|
types = {}
|
||||||
default_names = []
|
default_names = []
|
||||||
for field_name in types:
|
for field_name in types:
|
||||||
if field_name in ns:
|
if field_name in ns:
|
||||||
|
@ -2962,7 +3255,7 @@ else:
|
||||||
if hasattr(collections.abc, "Buffer"):
|
if hasattr(collections.abc, "Buffer"):
|
||||||
Buffer = collections.abc.Buffer
|
Buffer = collections.abc.Buffer
|
||||||
else:
|
else:
|
||||||
class Buffer(abc.ABC):
|
class Buffer(abc.ABC): # noqa: B024
|
||||||
"""Base class for classes that implement the buffer protocol.
|
"""Base class for classes that implement the buffer protocol.
|
||||||
|
|
||||||
The buffer protocol allows Python objects to expose a low-level
|
The buffer protocol allows Python objects to expose a low-level
|
||||||
|
@ -3289,6 +3582,23 @@ else:
|
||||||
return self.documentation == other.documentation
|
return self.documentation == other.documentation
|
||||||
|
|
||||||
|
|
||||||
|
_CapsuleType = getattr(_types, "CapsuleType", None)
|
||||||
|
|
||||||
|
if _CapsuleType is None:
|
||||||
|
try:
|
||||||
|
import _socket
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
_CAPI = getattr(_socket, "CAPI", None)
|
||||||
|
if _CAPI is not None:
|
||||||
|
_CapsuleType = type(_CAPI)
|
||||||
|
|
||||||
|
if _CapsuleType is not None:
|
||||||
|
CapsuleType = _CapsuleType
|
||||||
|
__all__.append("CapsuleType")
|
||||||
|
|
||||||
|
|
||||||
# Aliases for items that have always been in typing.
|
# Aliases for items that have always been in typing.
|
||||||
# Explicitly assign these (rather than using `from typing import *` at the top),
|
# Explicitly assign these (rather than using `from typing import *` at the top),
|
||||||
# so that we get a CI error if one of these is deleted from typing.py
|
# so that we get a CI error if one of these is deleted from typing.py
|
||||||
|
@ -3302,7 +3612,6 @@ Container = typing.Container
|
||||||
Dict = typing.Dict
|
Dict = typing.Dict
|
||||||
ForwardRef = typing.ForwardRef
|
ForwardRef = typing.ForwardRef
|
||||||
FrozenSet = typing.FrozenSet
|
FrozenSet = typing.FrozenSet
|
||||||
Generator = typing.Generator
|
|
||||||
Generic = typing.Generic
|
Generic = typing.Generic
|
||||||
Hashable = typing.Hashable
|
Hashable = typing.Hashable
|
||||||
IO = typing.IO
|
IO = typing.IO
|
||||||
|
|
|
@ -5,7 +5,7 @@ beautifulsoup4==4.12.3
|
||||||
bleach==6.1.0
|
bleach==6.1.0
|
||||||
certifi==2024.6.2
|
certifi==2024.6.2
|
||||||
cheroot==10.0.1
|
cheroot==10.0.1
|
||||||
cherrypy==18.9.0
|
cherrypy==18.10.0
|
||||||
cloudinary==1.40.0
|
cloudinary==1.40.0
|
||||||
distro==1.9.0
|
distro==1.9.0
|
||||||
dnspython==2.6.1
|
dnspython==2.6.1
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue