Bump tempora from 5.2.1 to 5.5.0 (#2111)

* Bump tempora from 5.2.1 to 5.5.0

Bumps [tempora](https://github.com/jaraco/tempora) from 5.2.1 to 5.5.0.
- [Release notes](https://github.com/jaraco/tempora/releases)
- [Changelog](https://github.com/jaraco/tempora/blob/main/NEWS.rst)
- [Commits](https://github.com/jaraco/tempora/compare/v5.2.1...v5.5.0)

---
updated-dependencies:
- dependency-name: tempora
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* Update tempora==5.5.0

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>

[skip ci]
This commit is contained in:
dependabot[bot] 2023-08-23 21:43:27 -07:00 committed by GitHub
parent 9a196f3dca
commit 1e903b164b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 429 additions and 133 deletions

View file

@ -1,9 +1,10 @@
import functools
import time
import inspect
import collections import collections
import types import functools
import inspect
import itertools import itertools
import operator
import time
import types
import warnings import warnings
import more_itertools import more_itertools
@ -183,8 +184,9 @@ def method_cache(
# Support cache clear even before cache has been created. # Support cache clear even before cache has been created.
wrapper.cache_clear = lambda: None # type: ignore[attr-defined] wrapper.cache_clear = lambda: None # type: ignore[attr-defined]
return ( # type: ignore[return-value] return (
_special_method_cache(method, cache_wrapper) or wrapper _special_method_cache(method, cache_wrapper) # type: ignore[return-value]
or wrapper
) )
@ -554,3 +556,51 @@ def except_(*exceptions, replace=None, use=None):
return wrapper return wrapper
return decorate return decorate
def identity(x):
return x
def bypass_when(check, *, _op=identity):
"""
Decorate a function to return its parameter when ``check``.
>>> bypassed = [] # False
>>> @bypass_when(bypassed)
... def double(x):
... return x * 2
>>> double(2)
4
>>> bypassed[:] = [object()] # True
>>> double(2)
2
"""
def decorate(func):
@functools.wraps(func)
def wrapper(param):
return param if _op(check) else func(param)
return wrapper
return decorate
def bypass_unless(check):
"""
Decorate a function to return its parameter unless ``check``.
>>> enabled = [object()] # True
>>> @bypass_unless(enabled)
... def double(x):
... return x * 2
>>> double(2)
4
>>> del enabled[:] # False
>>> double(2)
2
"""
return bypass_when(check, _op=operator.not_)

View file

@ -3,4 +3,4 @@
from .more import * # noqa from .more import * # noqa
from .recipes import * # noqa from .recipes import * # noqa
__version__ = '9.1.0' __version__ = '10.1.0'

View file

@ -2,7 +2,7 @@ import warnings
from collections import Counter, defaultdict, deque, abc from collections import Counter, defaultdict, deque, abc
from collections.abc import Sequence from collections.abc import Sequence
from functools import partial, reduce, wraps from functools import cached_property, partial, reduce, wraps
from heapq import heapify, heapreplace, heappop from heapq import heapify, heapreplace, heappop
from itertools import ( from itertools import (
chain, chain,
@ -17,6 +17,7 @@ from itertools import (
takewhile, takewhile,
tee, tee,
zip_longest, zip_longest,
product,
) )
from math import exp, factorial, floor, log from math import exp, factorial, floor, log
from queue import Empty, Queue from queue import Empty, Queue
@ -36,6 +37,7 @@ from .recipes import (
take, take,
unique_everseen, unique_everseen,
all_equal, all_equal,
batched,
) )
__all__ = [ __all__ = [
@ -53,6 +55,7 @@ __all__ = [
'circular_shifts', 'circular_shifts',
'collapse', 'collapse',
'combination_index', 'combination_index',
'combination_with_replacement_index',
'consecutive_groups', 'consecutive_groups',
'constrained_batches', 'constrained_batches',
'consumer', 'consumer',
@ -93,10 +96,13 @@ __all__ = [
'nth_or_last', 'nth_or_last',
'nth_permutation', 'nth_permutation',
'nth_product', 'nth_product',
'nth_combination_with_replacement',
'numeric_range', 'numeric_range',
'one', 'one',
'only', 'only',
'outer_product',
'padded', 'padded',
'partial_product',
'partitions', 'partitions',
'peekable', 'peekable',
'permutation_index', 'permutation_index',
@ -125,6 +131,7 @@ __all__ = [
'strictly_n', 'strictly_n',
'substrings', 'substrings',
'substrings_indexes', 'substrings_indexes',
'takewhile_inclusive',
'time_limited', 'time_limited',
'unique_in_window', 'unique_in_window',
'unique_to_each', 'unique_to_each',
@ -472,7 +479,10 @@ def iterate(func, start):
""" """
while True: while True:
yield start yield start
start = func(start) try:
start = func(start)
except StopIteration:
break
def with_iter(context_manager): def with_iter(context_manager):
@ -2069,7 +2079,6 @@ class numeric_range(abc.Sequence, abc.Hashable):
if self._step == self._zero: if self._step == self._zero:
raise ValueError('numeric_range() arg 3 must not be zero') raise ValueError('numeric_range() arg 3 must not be zero')
self._growing = self._step > self._zero self._growing = self._step > self._zero
self._init_len()
def __bool__(self): def __bool__(self):
if self._growing: if self._growing:
@ -2145,7 +2154,8 @@ class numeric_range(abc.Sequence, abc.Hashable):
def __len__(self): def __len__(self):
return self._len return self._len
def _init_len(self): @cached_property
def _len(self):
if self._growing: if self._growing:
start = self._start start = self._start
stop = self._stop stop = self._stop
@ -2156,10 +2166,10 @@ class numeric_range(abc.Sequence, abc.Hashable):
step = -self._step step = -self._step
distance = stop - start distance = stop - start
if distance <= self._zero: if distance <= self._zero:
self._len = 0 return 0
else: # distance > 0 and step > 0: regular euclidean division else: # distance > 0 and step > 0: regular euclidean division
q, r = divmod(distance, step) q, r = divmod(distance, step)
self._len = int(q) + int(r != self._zero) return int(q) + int(r != self._zero)
def __reduce__(self): def __reduce__(self):
return numeric_range, (self._start, self._stop, self._step) return numeric_range, (self._start, self._stop, self._step)
@ -2699,6 +2709,9 @@ class seekable:
>>> it.seek(10) >>> it.seek(10)
>>> next(it) >>> next(it)
'10' '10'
>>> it.relative_seek(-2) # Seeking relative to the current position
>>> next(it)
'9'
>>> it.seek(20) # Seeking past the end of the source isn't a problem >>> it.seek(20) # Seeking past the end of the source isn't a problem
>>> list(it) >>> list(it)
[] []
@ -2812,6 +2825,10 @@ class seekable:
if remainder > 0: if remainder > 0:
consume(self, remainder) consume(self, remainder)
def relative_seek(self, count):
index = len(self._cache)
self.seek(max(index + count, 0))
class run_length: class run_length:
""" """
@ -3859,6 +3876,54 @@ def nth_permutation(iterable, r, index):
return tuple(map(pool.pop, result)) return tuple(map(pool.pop, result))
def nth_combination_with_replacement(iterable, r, index):
"""Equivalent to
``list(combinations_with_replacement(iterable, r))[index]``.
The subsequences with repetition of *iterable* that are of length *r* can
be ordered lexicographically. :func:`nth_combination_with_replacement`
computes the subsequence at sort position *index* directly, without
computing the previous subsequences with replacement.
>>> nth_combination_with_replacement(range(5), 3, 5)
(0, 1, 1)
``ValueError`` will be raised If *r* is negative or greater than the length
of *iterable*.
``IndexError`` will be raised if the given *index* is invalid.
"""
pool = tuple(iterable)
n = len(pool)
if (r < 0) or (r > n):
raise ValueError
c = factorial(n + r - 1) // (factorial(r) * factorial(n - 1))
if index < 0:
index += c
if (index < 0) or (index >= c):
raise IndexError
result = []
i = 0
while r:
r -= 1
while n >= 0:
num_combs = factorial(n + r - 1) // (
factorial(r) * factorial(n - 1)
)
if index < num_combs:
break
n -= 1
i += 1
index -= num_combs
result.append(pool[i])
return tuple(result)
def value_chain(*args): def value_chain(*args):
"""Yield all arguments passed to the function in the same order in which """Yield all arguments passed to the function in the same order in which
they were passed. If an argument itself is iterable then iterate over its they were passed. If an argument itself is iterable then iterate over its
@ -3955,6 +4020,61 @@ def combination_index(element, iterable):
return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index
def combination_with_replacement_index(element, iterable):
"""Equivalent to
``list(combinations_with_replacement(iterable, r)).index(element)``
The subsequences with repetition of *iterable* that are of length *r* can
be ordered lexicographically. :func:`combination_with_replacement_index`
computes the index of the first *element*, without computing the previous
combinations with replacement.
>>> combination_with_replacement_index('adf', 'abcdefg')
20
``ValueError`` will be raised if the given *element* isn't one of the
combinations with replacement of *iterable*.
"""
element = tuple(element)
l = len(element)
element = enumerate(element)
k, y = next(element, (None, None))
if k is None:
return 0
indexes = []
pool = tuple(iterable)
for n, x in enumerate(pool):
while x == y:
indexes.append(n)
tmp, y = next(element, (None, None))
if tmp is None:
break
else:
k = tmp
if y is None:
break
else:
raise ValueError(
'element is not a combination with replacment of iterable'
)
n = len(pool)
occupations = [0] * n
for p in indexes:
occupations[p] += 1
index = 0
for k in range(1, n):
j = l + n - 1 - k - sum(occupations[:k])
i = n - k
if i <= j:
index += factorial(j) // (factorial(i) * factorial(j - i))
return index
def permutation_index(element, iterable): def permutation_index(element, iterable):
"""Equivalent to ``list(permutations(iterable, r)).index(element)``` """Equivalent to ``list(permutations(iterable, r)).index(element)```
@ -4057,26 +4177,20 @@ def _chunked_even_finite(iterable, N, n):
num_full = N - partial_size * num_lists num_full = N - partial_size * num_lists
num_partial = num_lists - num_full num_partial = num_lists - num_full
buffer = []
iterator = iter(iterable)
# Yield num_full lists of full_size # Yield num_full lists of full_size
for x in iterator: partial_start_idx = num_full * full_size
buffer.append(x) if full_size > 0:
if len(buffer) == full_size: for i in range(0, partial_start_idx, full_size):
yield buffer yield list(islice(iterable, i, i + full_size))
buffer = []
num_full -= 1
if num_full <= 0:
break
# Yield num_partial lists of partial_size # Yield num_partial lists of partial_size
for x in iterator: if partial_size > 0:
buffer.append(x) for i in range(
if len(buffer) == partial_size: partial_start_idx,
yield buffer partial_start_idx + (num_partial * partial_size),
buffer = [] partial_size,
num_partial -= 1 ):
yield list(islice(iterable, i, i + partial_size))
def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False): def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
@ -4115,30 +4229,23 @@ def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
if not size: if not size:
return return
new_item = [None] * size
iterables, iterable_positions = [], [] iterables, iterable_positions = [], []
scalars, scalar_positions = [], []
for i, obj in enumerate(objects): for i, obj in enumerate(objects):
if is_scalar(obj): if is_scalar(obj):
scalars.append(obj) new_item[i] = obj
scalar_positions.append(i)
else: else:
iterables.append(iter(obj)) iterables.append(iter(obj))
iterable_positions.append(i) iterable_positions.append(i)
if len(scalars) == size: if not iterables:
yield tuple(objects) yield tuple(objects)
return return
zipper = _zip_equal if strict else zip zipper = _zip_equal if strict else zip
for item in zipper(*iterables): for item in zipper(*iterables):
new_item = [None] * size for i, new_item[i] in zip(iterable_positions, item):
pass
for i, elem in zip(iterable_positions, item):
new_item[i] = elem
for i, elem in zip(scalar_positions, scalars):
new_item[i] = elem
yield tuple(new_item) yield tuple(new_item)
@ -4163,22 +4270,23 @@ def unique_in_window(iterable, n, key=None):
raise ValueError('n must be greater than 0') raise ValueError('n must be greater than 0')
window = deque(maxlen=n) window = deque(maxlen=n)
uniques = set() counts = defaultdict(int)
use_key = key is not None use_key = key is not None
for item in iterable: for item in iterable:
if len(window) == n:
to_discard = window[0]
if counts[to_discard] == 1:
del counts[to_discard]
else:
counts[to_discard] -= 1
k = key(item) if use_key else item k = key(item) if use_key else item
if k in uniques: if k not in counts:
continue yield item
counts[k] += 1
if len(uniques) == n:
uniques.discard(window[0])
uniques.add(k)
window.append(k) window.append(k)
yield item
def duplicates_everseen(iterable, key=None): def duplicates_everseen(iterable, key=None):
"""Yield duplicate elements after their first appearance. """Yield duplicate elements after their first appearance.
@ -4221,12 +4329,7 @@ def duplicates_justseen(iterable, key=None):
This function is analagous to :func:`unique_justseen`. This function is analagous to :func:`unique_justseen`.
""" """
return flatten( return flatten(g for _, g in groupby(iterable, key) for _ in g)
map(
lambda group_tuple: islice_extended(group_tuple[1])[1:],
groupby(iterable, key),
)
)
def minmax(iterable_or_value, *others, key=None, default=_marker): def minmax(iterable_or_value, *others, key=None, default=_marker):
@ -4390,3 +4493,77 @@ def gray_product(*iterables):
o[j] = -o[j] o[j] = -o[j]
f[j] = f[j + 1] f[j] = f[j + 1]
f[j + 1] = j + 1 f[j + 1] = j + 1
def partial_product(*iterables):
"""Yields tuples containing one item from each iterator, with subsequent
tuples changing a single item at a time by advancing each iterator until it
is exhausted. This sequence guarantees every value in each iterable is
output at least once without generating all possible combinations.
This may be useful, for example, when testing an expensive function.
>>> list(partial_product('AB', 'C', 'DEF'))
[('A', 'C', 'D'), ('B', 'C', 'D'), ('B', 'C', 'E'), ('B', 'C', 'F')]
"""
iterators = list(map(iter, iterables))
try:
prod = [next(it) for it in iterators]
except StopIteration:
return
yield tuple(prod)
for i, it in enumerate(iterators):
for prod[i] in it:
yield tuple(prod)
def takewhile_inclusive(predicate, iterable):
"""A variant of :func:`takewhile` that yields one additional element.
>>> list(takewhile_inclusive(lambda x: x < 5, [1, 4, 6, 4, 1]))
[1, 4, 6]
:func:`takewhile` would return ``[1, 4]``.
"""
for x in iterable:
if predicate(x):
yield x
else:
yield x
break
def outer_product(func, xs, ys, *args, **kwargs):
"""A generalized outer product that applies a binary function to all
pairs of items. Returns a 2D matrix with ``len(xs)`` rows and ``len(ys)``
columns.
Also accepts ``*args`` and ``**kwargs`` that are passed to ``func``.
Multiplication table:
>>> list(outer_product(mul, range(1, 4), range(1, 6)))
[(1, 2, 3, 4, 5), (2, 4, 6, 8, 10), (3, 6, 9, 12, 15)]
Cross tabulation:
>>> xs = ['A', 'B', 'A', 'A', 'B', 'B', 'A', 'A', 'B', 'B']
>>> ys = ['X', 'X', 'X', 'Y', 'Z', 'Z', 'Y', 'Y', 'Z', 'Z']
>>> rows = list(zip(xs, ys))
>>> count_rows = lambda x, y: rows.count((x, y))
>>> list(outer_product(count_rows, sorted(set(xs)), sorted(set(ys))))
[(2, 3, 0), (1, 0, 4)]
Usage with ``*args`` and ``**kwargs``:
>>> animals = ['cat', 'wolf', 'mouse']
>>> list(outer_product(min, animals, animals, key=len))
[('cat', 'cat', 'cat'), ('cat', 'wolf', 'wolf'), ('cat', 'wolf', 'mouse')]
"""
ys = tuple(ys)
return batched(
starmap(lambda x, y: func(x, y, *args, **kwargs), product(xs, ys)),
n=len(ys),
)

View file

@ -440,6 +440,7 @@ class seekable(Generic[_T], Iterator[_T]):
def peek(self, default: _U) -> _T | _U: ... def peek(self, default: _U) -> _T | _U: ...
def elements(self) -> SequenceView[_T]: ... def elements(self) -> SequenceView[_T]: ...
def seek(self, index: int) -> None: ... def seek(self, index: int) -> None: ...
def relative_seek(self, count: int) -> None: ...
class run_length: class run_length:
@staticmethod @staticmethod
@ -578,6 +579,9 @@ def all_unique(
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
) -> bool: ... ) -> bool: ...
def nth_product(index: int, *args: Iterable[_T]) -> tuple[_T, ...]: ... def nth_product(index: int, *args: Iterable[_T]) -> tuple[_T, ...]: ...
def nth_combination_with_replacement(
iterable: Iterable[_T], r: int, index: int
) -> tuple[_T, ...]: ...
def nth_permutation( def nth_permutation(
iterable: Iterable[_T], r: int, index: int iterable: Iterable[_T], r: int, index: int
) -> tuple[_T, ...]: ... ) -> tuple[_T, ...]: ...
@ -586,6 +590,9 @@ def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ...
def combination_index( def combination_index(
element: Iterable[_T], iterable: Iterable[_T] element: Iterable[_T], iterable: Iterable[_T]
) -> int: ... ) -> int: ...
def combination_with_replacement_index(
element: Iterable[_T], iterable: Iterable[_T]
) -> int: ...
def permutation_index( def permutation_index(
element: Iterable[_T], iterable: Iterable[_T] element: Iterable[_T], iterable: Iterable[_T]
) -> int: ... ) -> int: ...
@ -664,3 +671,14 @@ def constrained_batches(
strict: bool = ..., strict: bool = ...,
) -> Iterator[tuple[_T]]: ... ) -> Iterator[tuple[_T]]: ...
def gray_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ... def gray_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
def partial_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
def takewhile_inclusive(
predicate: Callable[[_T], bool], iterable: Iterable[_T]
) -> Iterator[_T]: ...
def outer_product(
func: Callable[[_T, _U], _V],
xs: Iterable[_T],
ys: Iterable[_U],
*args: Any,
**kwargs: Any,
) -> Iterator[tuple[_V, ...]]: ...

View file

@ -9,11 +9,10 @@ Some backward-compatible usability improvements have been made.
""" """
import math import math
import operator import operator
import warnings
from collections import deque from collections import deque
from collections.abc import Sized from collections.abc import Sized
from functools import reduce from functools import partial, reduce
from itertools import ( from itertools import (
chain, chain,
combinations, combinations,
@ -29,7 +28,6 @@ from itertools import (
zip_longest, zip_longest,
) )
from random import randrange, sample, choice from random import randrange, sample, choice
from sys import hexversion
__all__ = [ __all__ = [
'all_equal', 'all_equal',
@ -52,7 +50,9 @@ __all__ = [
'pad_none', 'pad_none',
'pairwise', 'pairwise',
'partition', 'partition',
'polynomial_eval',
'polynomial_from_roots', 'polynomial_from_roots',
'polynomial_derivative',
'powerset', 'powerset',
'prepend', 'prepend',
'quantify', 'quantify',
@ -65,6 +65,7 @@ __all__ = [
'sieve', 'sieve',
'sliding_window', 'sliding_window',
'subslices', 'subslices',
'sum_of_squares',
'tabulate', 'tabulate',
'tail', 'tail',
'take', 'take',
@ -77,6 +78,18 @@ __all__ = [
_marker = object() _marker = object()
# zip with strict is available for Python 3.10+
try:
zip(strict=True)
except TypeError:
_zip_strict = zip
else:
_zip_strict = partial(zip, strict=True)
# math.sumprod is available for Python 3.12+
_sumprod = getattr(math, 'sumprod', lambda x, y: dotproduct(x, y))
def take(n, iterable): def take(n, iterable):
"""Return first *n* items of the iterable as a list. """Return first *n* items of the iterable as a list.
@ -293,7 +306,7 @@ def _pairwise(iterable):
""" """
a, b = tee(iterable) a, b = tee(iterable)
next(b, None) next(b, None)
yield from zip(a, b) return zip(a, b)
try: try:
@ -303,7 +316,7 @@ except ImportError:
else: else:
def pairwise(iterable): def pairwise(iterable):
yield from itertools_pairwise(iterable) return itertools_pairwise(iterable)
pairwise.__doc__ = _pairwise.__doc__ pairwise.__doc__ = _pairwise.__doc__
@ -334,13 +347,9 @@ def _zip_equal(*iterables):
for i, it in enumerate(iterables[1:], 1): for i, it in enumerate(iterables[1:], 1):
size = len(it) size = len(it)
if size != first_size: if size != first_size:
break raise UnequalIterablesError(details=(first_size, i, size))
else: # All sizes are equal, we can use the built-in zip.
# If we didn't break out, we can use the built-in zip. return zip(*iterables)
return zip(*iterables)
# If we did break out, there was a mismatch.
raise UnequalIterablesError(details=(first_size, i, size))
# If any one of the iterables didn't have a length, start reading # If any one of the iterables didn't have a length, start reading
# them until one runs out. # them until one runs out.
except TypeError: except TypeError:
@ -433,12 +442,9 @@ def partition(pred, iterable):
if pred is None: if pred is None:
pred = bool pred = bool
evaluations = ((pred(x), x) for x in iterable) t1, t2, p = tee(iterable, 3)
t1, t2 = tee(evaluations) p1, p2 = tee(map(pred, p))
return ( return (compress(t1, map(operator.not_, p1)), compress(t2, p2))
(x for (cond, x) in t1 if not cond),
(x for (cond, x) in t2 if cond),
)
def powerset(iterable): def powerset(iterable):
@ -712,12 +718,14 @@ def convolve(signal, kernel):
is immediately consumed and stored. is immediately consumed and stored.
""" """
# This implementation intentionally doesn't match the one in the itertools
# documentation.
kernel = tuple(kernel)[::-1] kernel = tuple(kernel)[::-1]
n = len(kernel) n = len(kernel)
window = deque([0], maxlen=n) * n window = deque([0], maxlen=n) * n
for x in chain(signal, repeat(0, n - 1)): for x in chain(signal, repeat(0, n - 1)):
window.append(x) window.append(x)
yield sum(map(operator.mul, kernel, window)) yield _sumprod(kernel, window)
def before_and_after(predicate, it): def before_and_after(predicate, it):
@ -778,9 +786,7 @@ def sliding_window(iterable, n):
For a variant with more features, see :func:`windowed`. For a variant with more features, see :func:`windowed`.
""" """
it = iter(iterable) it = iter(iterable)
window = deque(islice(it, n), maxlen=n) window = deque(islice(it, n - 1), maxlen=n)
if len(window) == n:
yield tuple(window)
for x in it: for x in it:
window.append(x) window.append(x)
yield tuple(window) yield tuple(window)
@ -807,12 +813,8 @@ def polynomial_from_roots(roots):
>>> polynomial_from_roots(roots) # x^3 - 4 * x^2 - 17 * x + 60 >>> polynomial_from_roots(roots) # x^3 - 4 * x^2 - 17 * x + 60
[1, -4, -17, 60] [1, -4, -17, 60]
""" """
# Use math.prod for Python 3.8+, factors = zip(repeat(1), map(operator.neg, roots))
prod = getattr(math, 'prod', lambda x: reduce(operator.mul, x, 1)) return list(reduce(convolve, factors, [1]))
roots = list(map(operator.neg, roots))
return [
sum(map(prod, combinations(roots, k))) for k in range(len(roots) + 1)
]
def iter_index(iterable, value, start=0): def iter_index(iterable, value, start=0):
@ -830,9 +832,13 @@ def iter_index(iterable, value, start=0):
except AttributeError: except AttributeError:
# Slow path for general iterables # Slow path for general iterables
it = islice(iterable, start, None) it = islice(iterable, start, None)
for i, element in enumerate(it, start): i = start - 1
if element is value or element == value: try:
while True:
i = i + operator.indexOf(it, value) + 1
yield i yield i
except ValueError:
pass
else: else:
# Fast path for sequences # Fast path for sequences
i = start - 1 i = start - 1
@ -850,43 +856,45 @@ def sieve(n):
>>> list(sieve(30)) >>> list(sieve(30))
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29] [2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
""" """
isqrt = getattr(math, 'isqrt', lambda x: int(math.sqrt(x)))
data = bytearray((0, 1)) * (n // 2) data = bytearray((0, 1)) * (n // 2)
data[:3] = 0, 0, 0 data[:3] = 0, 0, 0
limit = isqrt(n) + 1 limit = math.isqrt(n) + 1
for p in compress(range(limit), data): for p in compress(range(limit), data):
data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p))) data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p)))
data[2] = 1 data[2] = 1
return iter_index(data, 1) if n > 2 else iter([]) return iter_index(data, 1) if n > 2 else iter([])
def batched(iterable, n): def _batched(iterable, n):
"""Batch data into lists of length *n*. The last batch may be shorter. """Batch data into lists of length *n*. The last batch may be shorter.
>>> list(batched('ABCDEFG', 3)) >>> list(batched('ABCDEFG', 3))
[['A', 'B', 'C'], ['D', 'E', 'F'], ['G']] [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)]
This recipe is from the ``itertools`` docs. This library also provides On Python 3.12 and above, this is an alias for :func:`itertools.batched`.
:func:`chunked`, which has a different implementation.
""" """
if hexversion >= 0x30C00A0: # Python 3.12.0a0 if n < 1:
warnings.warn( raise ValueError('n must be at least one')
(
'batched will be removed in a future version of '
'more-itertools. Use the standard library '
'itertools.batched function instead'
),
DeprecationWarning,
)
it = iter(iterable) it = iter(iterable)
while True: while True:
batch = list(islice(it, n)) batch = tuple(islice(it, n))
if not batch: if not batch:
break break
yield batch yield batch
try:
from itertools import batched as itertools_batched
except ImportError:
batched = _batched
else:
def batched(iterable, n):
return itertools_batched(iterable, n)
batched.__doc__ = _batched.__doc__
def transpose(it): def transpose(it):
"""Swap the rows and columns of the input. """Swap the rows and columns of the input.
@ -894,21 +902,21 @@ def transpose(it):
[(1, 11), (2, 22), (3, 33)] [(1, 11), (2, 22), (3, 33)]
The caller should ensure that the dimensions of the input are compatible. The caller should ensure that the dimensions of the input are compatible.
If the input is empty, no output will be produced.
""" """
# TODO: when 3.9 goes end-of-life, add stric=True to this. return _zip_strict(*it)
return zip(*it)
def matmul(m1, m2): def matmul(m1, m2):
"""Multiply two matrices. """Multiply two matrices.
>>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)])) >>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)]))
[[49, 80], [41, 60]] [(49, 80), (41, 60)]
The caller should ensure that the dimensions of the input matrices are The caller should ensure that the dimensions of the input matrices are
compatible with each other. compatible with each other.
""" """
n = len(m2[0]) n = len(m2[0])
return batched(starmap(dotproduct, product(m1, transpose(m2))), n) return batched(starmap(_sumprod, product(m1, transpose(m2))), n)
def factor(n): def factor(n):
@ -916,15 +924,54 @@ def factor(n):
>>> list(factor(360)) >>> list(factor(360))
[2, 2, 2, 3, 3, 5] [2, 2, 2, 3, 3, 5]
""" """
isqrt = getattr(math, 'isqrt', lambda x: int(math.sqrt(x))) for prime in sieve(math.isqrt(n) + 1):
for prime in sieve(isqrt(n) + 1):
while True: while True:
quotient, remainder = divmod(n, prime) if n % prime:
if remainder:
break break
yield prime yield prime
n = quotient n //= prime
if n == 1: if n == 1:
return return
if n >= 2: if n > 1:
yield n yield n
def polynomial_eval(coefficients, x):
"""Evaluate a polynomial at a specific value.
Example: evaluating x^3 - 4 * x^2 - 17 * x + 60 at x = 2.5:
>>> coefficients = [1, -4, -17, 60]
>>> x = 2.5
>>> polynomial_eval(coefficients, x)
8.125
"""
n = len(coefficients)
if n == 0:
return x * 0 # coerce zero to the type of x
powers = map(pow, repeat(x), reversed(range(n)))
return _sumprod(coefficients, powers)
def sum_of_squares(it):
"""Return the sum of the squares of the input values.
>>> sum_of_squares([10, 20, 30])
1400
"""
return _sumprod(*tee(it))
def polynomial_derivative(coefficients):
"""Compute the first derivative of a polynomial.
Example: evaluating the derivative of x^3 - 4 * x^2 - 17 * x + 60
>>> coefficients = [1, -4, -17, 60]
>>> derivative_coefficients = polynomial_derivative(coefficients)
>>> derivative_coefficients
[3, -8, -17]
"""
n = len(coefficients)
powers = reversed(range(1, n))
return list(map(operator.mul, coefficients, powers))

View file

@ -21,7 +21,7 @@ def tabulate(
function: Callable[[int], _T], start: int = ... function: Callable[[int], _T], start: int = ...
) -> Iterator[_T]: ... ) -> Iterator[_T]: ...
def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ... def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ...
def consume(iterator: Iterable[object], n: int | None = ...) -> None: ... def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ...
@overload @overload
def nth(iterable: Iterable[_T], n: int) -> _T | None: ... def nth(iterable: Iterable[_T], n: int) -> _T | None: ...
@overload @overload
@ -101,7 +101,7 @@ def sliding_window(
iterable: Iterable[_T], n: int iterable: Iterable[_T], n: int
) -> Iterator[tuple[_T, ...]]: ... ) -> Iterator[tuple[_T, ...]]: ...
def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ... def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ...
def polynomial_from_roots(roots: Sequence[int]) -> list[int]: ... def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ...
def iter_index( def iter_index(
iterable: Iterable[object], iterable: Iterable[object],
value: Any, value: Any,
@ -111,9 +111,12 @@ def sieve(n: int) -> Iterator[int]: ...
def batched( def batched(
iterable: Iterable[_T], iterable: Iterable[_T],
n: int, n: int,
) -> Iterator[list[_T]]: ... ) -> Iterator[tuple[_T]]: ...
def transpose( def transpose(
it: Iterable[Iterable[_T]], it: Iterable[Iterable[_T]],
) -> tuple[Iterator[_T], ...]: ... ) -> Iterator[tuple[_T, ...]]: ...
def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[list[_T]]: ... def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ...
def factor(n: int) -> Iterator[int]: ... def factor(n: int) -> Iterator[int]: ...
def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ...
def sum_of_squares(it: Iterable[_T]) -> _T: ...
def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ...

View file

@ -383,9 +383,9 @@ def parse_timedelta(str):
Note that months and years strict intervals, not aligned Note that months and years strict intervals, not aligned
to a calendar: to a calendar:
>>> now = datetime.datetime.now() >>> date = datetime.datetime.fromisoformat('2000-01-01')
>>> later = now + parse_timedelta('1 year') >>> later = date + parse_timedelta('1 year')
>>> diff = later.replace(year=now.year) - now >>> diff = later.replace(year=date.year) - date
>>> diff.seconds >>> diff.seconds
20940 20940

View file

@ -28,7 +28,7 @@ def now():
A client may override this function to change the default behavior, A client may override this function to change the default behavior,
such as to use local time or timezone-naïve times. such as to use local time or timezone-naïve times.
""" """
return datetime.datetime.utcnow().replace(tzinfo=pytz.utc) return datetime.datetime.now(pytz.utc)
def from_timestamp(ts): def from_timestamp(ts):
@ -38,7 +38,7 @@ def from_timestamp(ts):
A client may override this function to change the default behavior, A client may override this function to change the default behavior,
such as to use local time or timezone-naïve times. such as to use local time or timezone-naïve times.
""" """
return datetime.datetime.utcfromtimestamp(ts).replace(tzinfo=pytz.utc) return datetime.datetime.fromtimestamp(ts, pytz.utc)
class DelayedCommand(datetime.datetime): class DelayedCommand(datetime.datetime):

View file

@ -48,20 +48,21 @@ class Stopwatch:
def reset(self): def reset(self):
self.elapsed = datetime.timedelta(0) self.elapsed = datetime.timedelta(0)
with contextlib.suppress(AttributeError): with contextlib.suppress(AttributeError):
del self.start_time del self._start
def _diff(self):
return datetime.timedelta(seconds=time.monotonic() - self._start)
def start(self): def start(self):
self.start_time = datetime.datetime.utcnow() self._start = time.monotonic()
def stop(self): def stop(self):
stop_time = datetime.datetime.utcnow() self.elapsed += self._diff()
self.elapsed += stop_time - self.start_time del self._start
del self.start_time
return self.elapsed return self.elapsed
def split(self): def split(self):
local_duration = datetime.datetime.utcnow() - self.start_time return self.elapsed + self._diff()
return self.elapsed + local_duration
# context manager support # context manager support
def __enter__(self): def __enter__(self):

View file

@ -41,7 +41,7 @@ requests-oauthlib==1.3.1
rumps==0.4.0; platform_system == "Darwin" rumps==0.4.0; platform_system == "Darwin"
simplejson==3.19.1 simplejson==3.19.1
six==1.16.0 six==1.16.0
tempora==5.2.1 tempora==5.5.0
tokenize-rt==5.0.0 tokenize-rt==5.0.0
tzdata==2023.3 tzdata==2023.3
tzlocal==4.2 tzlocal==4.2