Update cherrypy==18.9.0

This commit is contained in:
JonnyWong16 2024-03-24 17:55:12 -07:00
parent 2fc618c01f
commit 51196a7fb1
No known key found for this signature in database
GPG key ID: B1F1F9807184697A
137 changed files with 44442 additions and 11582 deletions

View file

@ -3,4 +3,4 @@
from .more import * # noqa
from .recipes import * # noqa
__version__ = '10.1.0'
__version__ = '10.2.0'

View file

@ -19,7 +19,7 @@ from itertools import (
zip_longest,
product,
)
from math import exp, factorial, floor, log
from math import exp, factorial, floor, log, perm, comb
from queue import Empty, Queue
from random import random, randrange, uniform
from operator import itemgetter, mul, sub, gt, lt, ge, le
@ -68,8 +68,10 @@ __all__ = [
'divide',
'duplicates_everseen',
'duplicates_justseen',
'classify_unique',
'exactly_n',
'filter_except',
'filter_map',
'first',
'gray_product',
'groupby_transform',
@ -83,6 +85,7 @@ __all__ = [
'is_sorted',
'islice_extended',
'iterate',
'iter_suppress',
'last',
'locate',
'longest_common_prefix',
@ -198,15 +201,14 @@ def first(iterable, default=_marker):
``next(iter(iterable), default)``.
"""
try:
return next(iter(iterable))
except StopIteration as e:
if default is _marker:
raise ValueError(
'first() was called on an empty iterable, and no '
'default value was provided.'
) from e
return default
for item in iterable:
return item
if default is _marker:
raise ValueError(
'first() was called on an empty iterable, and no '
'default value was provided.'
)
return default
def last(iterable, default=_marker):
@ -582,6 +584,9 @@ def strictly_n(iterable, n, too_short=None, too_long=None):
>>> list(strictly_n(iterable, n))
['a', 'b', 'c', 'd']
Note that the returned iterable must be consumed in order for the check to
be made.
By default, *too_short* and *too_long* are functions that raise
``ValueError``.
@ -919,7 +924,7 @@ def substrings_indexes(seq, reverse=False):
class bucket:
"""Wrap *iterable* and return an object that buckets it iterable into
"""Wrap *iterable* and return an object that buckets the iterable into
child iterables based on a *key* function.
>>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
@ -3222,6 +3227,8 @@ class time_limited:
stops if the time elapsed is greater than *limit_seconds*. If your time
limit is 1 second, but it takes 2 seconds to generate the first item from
the iterable, the function will run for 2 seconds and not yield anything.
As a special case, when *limit_seconds* is zero, the iterator never
returns anything.
"""
@ -3237,6 +3244,9 @@ class time_limited:
return self
def __next__(self):
if self.limit_seconds == 0:
self.timed_out = True
raise StopIteration
item = next(self._iterable)
if monotonic() - self._start_time > self.limit_seconds:
self.timed_out = True
@ -3356,7 +3366,7 @@ def iequals(*iterables):
>>> iequals("abc", "acb")
False
Not to be confused with :func:`all_equals`, which checks whether all
Not to be confused with :func:`all_equal`, which checks whether all
elements of iterable are equal to each other.
"""
@ -3853,7 +3863,7 @@ def nth_permutation(iterable, r, index):
elif not 0 <= r < n:
raise ValueError
else:
c = factorial(n) // factorial(n - r)
c = perm(n, r)
if index < 0:
index += c
@ -3898,7 +3908,7 @@ def nth_combination_with_replacement(iterable, r, index):
if (r < 0) or (r > n):
raise ValueError
c = factorial(n + r - 1) // (factorial(r) * factorial(n - 1))
c = comb(n + r - 1, r)
if index < 0:
index += c
@ -3911,9 +3921,7 @@ def nth_combination_with_replacement(iterable, r, index):
while r:
r -= 1
while n >= 0:
num_combs = factorial(n + r - 1) // (
factorial(r) * factorial(n - 1)
)
num_combs = comb(n + r - 1, r)
if index < num_combs:
break
n -= 1
@ -4015,9 +4023,9 @@ def combination_index(element, iterable):
for i, j in enumerate(reversed(indexes), start=1):
j = n - j
if i <= j:
index += factorial(j) // (factorial(i) * factorial(j - i))
index += comb(j, i)
return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index
return comb(n + 1, k + 1) - index
def combination_with_replacement_index(element, iterable):
@ -4057,7 +4065,7 @@ def combination_with_replacement_index(element, iterable):
break
else:
raise ValueError(
'element is not a combination with replacment of iterable'
'element is not a combination with replacement of iterable'
)
n = len(pool)
@ -4066,11 +4074,13 @@ def combination_with_replacement_index(element, iterable):
occupations[p] += 1
index = 0
cumulative_sum = 0
for k in range(1, n):
j = l + n - 1 - k - sum(occupations[:k])
cumulative_sum += occupations[k - 1]
j = l + n - 1 - k - cumulative_sum
i = n - k
if i <= j:
index += factorial(j) // (factorial(i) * factorial(j - i))
index += comb(j, i)
return index
@ -4296,7 +4306,7 @@ def duplicates_everseen(iterable, key=None):
>>> list(duplicates_everseen('AaaBbbCccAaa', str.lower))
['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a']
This function is analagous to :func:`unique_everseen` and is subject to
This function is analogous to :func:`unique_everseen` and is subject to
the same performance considerations.
"""
@ -4326,12 +4336,54 @@ def duplicates_justseen(iterable, key=None):
>>> list(duplicates_justseen('AaaBbbCccAaa', str.lower))
['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a']
This function is analagous to :func:`unique_justseen`.
This function is analogous to :func:`unique_justseen`.
"""
return flatten(g for _, g in groupby(iterable, key) for _ in g)
def classify_unique(iterable, key=None):
"""Classify each element in terms of its uniqueness.
For each element in the input iterable, return a 3-tuple consisting of:
1. The element itself
2. ``False`` if the element is equal to the one preceding it in the input,
``True`` otherwise (i.e. the equivalent of :func:`unique_justseen`)
3. ``False`` if this element has been seen anywhere in the input before,
``True`` otherwise (i.e. the equivalent of :func:`unique_everseen`)
>>> list(classify_unique('otto')) # doctest: +NORMALIZE_WHITESPACE
[('o', True, True),
('t', True, True),
('t', False, False),
('o', True, False)]
This function is analogous to :func:`unique_everseen` and is subject to
the same performance considerations.
"""
seen_set = set()
seen_list = []
use_key = key is not None
previous = None
for i, element in enumerate(iterable):
k = key(element) if use_key else element
is_unique_justseen = not i or previous != k
previous = k
is_unique_everseen = False
try:
if k not in seen_set:
seen_set.add(k)
is_unique_everseen = True
except TypeError:
if k not in seen_list:
seen_list.append(k)
is_unique_everseen = True
yield element, is_unique_justseen, is_unique_everseen
def minmax(iterable_or_value, *others, key=None, default=_marker):
"""Returns both the smallest and largest items in an iterable
or the largest of two or more arguments.
@ -4529,10 +4581,8 @@ def takewhile_inclusive(predicate, iterable):
:func:`takewhile` would return ``[1, 4]``.
"""
for x in iterable:
if predicate(x):
yield x
else:
yield x
yield x
if not predicate(x):
break
@ -4567,3 +4617,40 @@ def outer_product(func, xs, ys, *args, **kwargs):
starmap(lambda x, y: func(x, y, *args, **kwargs), product(xs, ys)),
n=len(ys),
)
def iter_suppress(iterable, *exceptions):
"""Yield each of the items from *iterable*. If the iteration raises one of
the specified *exceptions*, that exception will be suppressed and iteration
will stop.
>>> from itertools import chain
>>> def breaks_at_five(x):
... while True:
... if x >= 5:
... raise RuntimeError
... yield x
... x += 1
>>> it_1 = iter_suppress(breaks_at_five(1), RuntimeError)
>>> it_2 = iter_suppress(breaks_at_five(2), RuntimeError)
>>> list(chain(it_1, it_2))
[1, 2, 3, 4, 2, 3, 4]
"""
try:
yield from iterable
except exceptions:
return
def filter_map(func, iterable):
"""Apply *func* to every element of *iterable*, yielding only those which
are not ``None``.
>>> elems = ['1', 'a', '2', 'b', '3']
>>> list(filter_map(lambda s: int(s) if s.isnumeric() else None, elems))
[1, 2, 3]
"""
for x in iterable:
y = func(x)
if y is not None:
yield y

View file

@ -29,7 +29,7 @@ _U = TypeVar('_U')
_V = TypeVar('_V')
_W = TypeVar('_W')
_T_co = TypeVar('_T_co', covariant=True)
_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[object]])
_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]])
_Raisable = BaseException | Type[BaseException]
@type_check_only
@ -74,7 +74,7 @@ class peekable(Generic[_T], Iterator[_T]):
def __getitem__(self, index: slice) -> list[_T]: ...
def consumer(func: _GenFn) -> _GenFn: ...
def ilen(iterable: Iterable[object]) -> int: ...
def ilen(iterable: Iterable[_T]) -> int: ...
def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ...
def with_iter(
context_manager: ContextManager[Iterable[_T]],
@ -116,7 +116,7 @@ class bucket(Generic[_T, _U], Container[_U]):
self,
iterable: Iterable[_T],
key: Callable[[_T], _U],
validator: Callable[[object], object] | None = ...,
validator: Callable[[_U], object] | None = ...,
) -> None: ...
def __contains__(self, value: object) -> bool: ...
def __iter__(self) -> Iterator[_U]: ...
@ -383,7 +383,7 @@ def mark_ends(
iterable: Iterable[_T],
) -> Iterable[tuple[bool, bool, _T]]: ...
def locate(
iterable: Iterable[object],
iterable: Iterable[_T],
pred: Callable[..., Any] = ...,
window_size: int | None = ...,
) -> Iterator[int]: ...
@ -618,6 +618,9 @@ def duplicates_everseen(
def duplicates_justseen(
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
) -> Iterator[_T]: ...
def classify_unique(
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
) -> Iterator[tuple[_T, bool, bool]]: ...
class _SupportsLessThan(Protocol):
def __lt__(self, __other: Any) -> bool: ...
@ -662,9 +665,9 @@ def minmax(
def longest_common_prefix(
iterables: Iterable[Iterable[_T]],
) -> Iterator[_T]: ...
def iequals(*iterables: Iterable[object]) -> bool: ...
def iequals(*iterables: Iterable[Any]) -> bool: ...
def constrained_batches(
iterable: Iterable[object],
iterable: Iterable[_T],
max_size: int,
max_count: int | None = ...,
get_len: Callable[[_T], object] = ...,
@ -682,3 +685,11 @@ def outer_product(
*args: Any,
**kwargs: Any,
) -> Iterator[tuple[_V, ...]]: ...
def iter_suppress(
iterable: Iterable[_T],
*exceptions: Type[BaseException],
) -> Iterator[_T]: ...
def filter_map(
func: Callable[[_T], _V | None],
iterable: Iterable[_T],
) -> Iterator[_V]: ...

View file

@ -28,6 +28,7 @@ from itertools import (
zip_longest,
)
from random import randrange, sample, choice
from sys import hexversion
__all__ = [
'all_equal',
@ -56,6 +57,7 @@ __all__ = [
'powerset',
'prepend',
'quantify',
'reshape',
'random_combination_with_replacement',
'random_combination',
'random_permutation',
@ -69,6 +71,7 @@ __all__ = [
'tabulate',
'tail',
'take',
'totient',
'transpose',
'triplewise',
'unique_everseen',
@ -492,7 +495,7 @@ def unique_everseen(iterable, key=None):
>>> list(unique_everseen(iterable, key=tuple)) # Faster
[[1, 2], [2, 3]]
Similary, you may want to convert unhashable ``set`` objects with
Similarly, you may want to convert unhashable ``set`` objects with
``key=frozenset``. For ``dict`` objects,
``key=lambda x: frozenset(x.items())`` can be used.
@ -524,6 +527,9 @@ def unique_justseen(iterable, key=None):
['A', 'B', 'C', 'A', 'D']
"""
if key is None:
return map(operator.itemgetter(0), groupby(iterable))
return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
@ -817,35 +823,34 @@ def polynomial_from_roots(roots):
return list(reduce(convolve, factors, [1]))
def iter_index(iterable, value, start=0):
def iter_index(iterable, value, start=0, stop=None):
"""Yield the index of each place in *iterable* that *value* occurs,
beginning with index *start*.
beginning with index *start* and ending before index *stop*.
See :func:`locate` for a more general means of finding the indexes
associated with particular values.
>>> list(iter_index('AABCADEAF', 'A'))
[0, 1, 4, 7]
>>> list(iter_index('AABCADEAF', 'A', 1)) # start index is inclusive
[1, 4, 7]
>>> list(iter_index('AABCADEAF', 'A', 1, 7)) # stop index is not inclusive
[1, 4]
"""
try:
seq_index = iterable.index
except AttributeError:
seq_index = getattr(iterable, 'index', None)
if seq_index is None:
# Slow path for general iterables
it = islice(iterable, start, None)
i = start - 1
try:
while True:
i = i + operator.indexOf(it, value) + 1
it = islice(iterable, start, stop)
for i, element in enumerate(it, start):
if element is value or element == value:
yield i
except ValueError:
pass
else:
# Fast path for sequences
stop = len(iterable) if stop is None else stop
i = start - 1
try:
while True:
i = seq_index(value, i + 1)
yield i
yield (i := seq_index(value, i + 1, stop))
except ValueError:
pass
@ -856,47 +861,52 @@ def sieve(n):
>>> list(sieve(30))
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
"""
if n > 2:
yield 2
start = 3
data = bytearray((0, 1)) * (n // 2)
data[:3] = 0, 0, 0
limit = math.isqrt(n) + 1
for p in compress(range(limit), data):
for p in iter_index(data, 1, start, limit):
yield from iter_index(data, 1, start, p * p)
data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p)))
data[2] = 1
return iter_index(data, 1) if n > 2 else iter([])
start = p * p
yield from iter_index(data, 1, start)
def _batched(iterable, n):
"""Batch data into lists of length *n*. The last batch may be shorter.
def _batched(iterable, n, *, strict=False):
"""Batch data into tuples of length *n*. If the number of items in
*iterable* is not divisible by *n*:
* The last batch will be shorter if *strict* is ``False``.
* :exc:`ValueError` will be raised if *strict* is ``True``.
>>> list(batched('ABCDEFG', 3))
[('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)]
On Python 3.12 and above, this is an alias for :func:`itertools.batched`.
On Python 3.13 and above, this is an alias for :func:`itertools.batched`.
"""
if n < 1:
raise ValueError('n must be at least one')
it = iter(iterable)
while True:
batch = tuple(islice(it, n))
if not batch:
break
while batch := tuple(islice(it, n)):
if strict and len(batch) != n:
raise ValueError('batched(): incomplete batch')
yield batch
try:
if hexversion >= 0x30D00A2:
from itertools import batched as itertools_batched
except ImportError:
batched = _batched
else:
def batched(iterable, n):
return itertools_batched(iterable, n)
def batched(iterable, n, *, strict=False):
return itertools_batched(iterable, n, strict=strict)
else:
batched = _batched
batched.__doc__ = _batched.__doc__
def transpose(it):
"""Swap the rows and columns of the input.
"""Swap the rows and columns of the input matrix.
>>> list(transpose([(1, 2, 3), (11, 22, 33)]))
[(1, 11), (2, 22), (3, 33)]
@ -907,8 +917,20 @@ def transpose(it):
return _zip_strict(*it)
def reshape(matrix, cols):
"""Reshape the 2-D input *matrix* to have a column count given by *cols*.
>>> matrix = [(0, 1), (2, 3), (4, 5)]
>>> cols = 3
>>> list(reshape(matrix, cols))
[(0, 1, 2), (3, 4, 5)]
"""
return batched(chain.from_iterable(matrix), cols)
def matmul(m1, m2):
"""Multiply two matrices.
>>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)]))
[(49, 80), (41, 60)]
@ -921,13 +943,12 @@ def matmul(m1, m2):
def factor(n):
"""Yield the prime factors of n.
>>> list(factor(360))
[2, 2, 2, 3, 3, 5]
"""
for prime in sieve(math.isqrt(n) + 1):
while True:
if n % prime:
break
while not n % prime:
yield prime
n //= prime
if n == 1:
@ -975,3 +996,17 @@ def polynomial_derivative(coefficients):
n = len(coefficients)
powers = reversed(range(1, n))
return list(map(operator.mul, coefficients, powers))
def totient(n):
"""Return the count of natural numbers up to *n* that are coprime with *n*.
>>> totient(9)
6
>>> totient(12)
4
"""
for p in unique_justseen(factor(n)):
n = n // p * (p - 1)
return n

View file

@ -14,6 +14,8 @@ from typing import (
# Type and type variable definitions
_T = TypeVar('_T')
_T1 = TypeVar('_T1')
_T2 = TypeVar('_T2')
_U = TypeVar('_U')
def take(n: int, iterable: Iterable[_T]) -> list[_T]: ...
@ -26,14 +28,14 @@ def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ...
def nth(iterable: Iterable[_T], n: int) -> _T | None: ...
@overload
def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
def all_equal(iterable: Iterable[object]) -> bool: ...
def all_equal(iterable: Iterable[_T]) -> bool: ...
def quantify(
iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
) -> int: ...
def pad_none(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
def padnone(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ...
def dotproduct(vec1: Iterable[object], vec2: Iterable[object]) -> object: ...
def dotproduct(vec1: Iterable[_T1], vec2: Iterable[_T2]) -> Any: ...
def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ...
def repeatfunc(
func: Callable[..., _U], times: int | None = ..., *args: Any
@ -103,20 +105,24 @@ def sliding_window(
def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ...
def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ...
def iter_index(
iterable: Iterable[object],
iterable: Iterable[_T],
value: Any,
start: int | None = ...,
stop: int | None = ...,
) -> Iterator[int]: ...
def sieve(n: int) -> Iterator[int]: ...
def batched(
iterable: Iterable[_T],
n: int,
iterable: Iterable[_T], n: int, *, strict: bool = False
) -> Iterator[tuple[_T]]: ...
def transpose(
it: Iterable[Iterable[_T]],
) -> Iterator[tuple[_T, ...]]: ...
def reshape(
matrix: Iterable[Iterable[_T]], cols: int
) -> Iterator[tuple[_T, ...]]: ...
def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ...
def factor(n: int) -> Iterator[int]: ...
def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ...
def sum_of_squares(it: Iterable[_T]) -> _T: ...
def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ...
def totient(n: int) -> int: ...