mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-13 18:16:57 -07:00
Bump tempora from 5.6.0 to 5.7.0 (#2371)
* Bump tempora from 5.6.0 to 5.7.0 Bumps [tempora](https://github.com/jaraco/tempora) from 5.6.0 to 5.7.0. - [Release notes](https://github.com/jaraco/tempora/releases) - [Changelog](https://github.com/jaraco/tempora/blob/main/NEWS.rst) - [Commits](https://github.com/jaraco/tempora/compare/v5.6.0...v5.7.0) --- updated-dependencies: - dependency-name: tempora dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update tempora==5.7.0 --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci]
This commit is contained in:
parent
2e6f541ec2
commit
43cb027592
7 changed files with 453 additions and 101 deletions
|
@ -14,6 +14,14 @@ def compose(*funcs):
|
||||||
"""
|
"""
|
||||||
Compose any number of unary functions into a single unary function.
|
Compose any number of unary functions into a single unary function.
|
||||||
|
|
||||||
|
Comparable to
|
||||||
|
`function composition <https://en.wikipedia.org/wiki/Function_composition>`_
|
||||||
|
in mathematics:
|
||||||
|
|
||||||
|
``h = g ∘ f`` implies ``h(x) = g(f(x))``.
|
||||||
|
|
||||||
|
In Python, ``h = compose(g, f)``.
|
||||||
|
|
||||||
>>> import textwrap
|
>>> import textwrap
|
||||||
>>> expected = str.strip(textwrap.dedent(compose.__doc__))
|
>>> expected = str.strip(textwrap.dedent(compose.__doc__))
|
||||||
>>> strip_and_dedent = compose(str.strip, textwrap.dedent)
|
>>> strip_and_dedent = compose(str.strip, textwrap.dedent)
|
||||||
|
|
|
@ -3,4 +3,4 @@
|
||||||
from .more import * # noqa
|
from .more import * # noqa
|
||||||
from .recipes import * # noqa
|
from .recipes import * # noqa
|
||||||
|
|
||||||
__version__ = '10.3.0'
|
__version__ = '10.4.0'
|
||||||
|
|
|
@ -3,8 +3,9 @@ import warnings
|
||||||
|
|
||||||
from collections import Counter, defaultdict, deque, abc
|
from collections import Counter, defaultdict, deque, abc
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
|
from contextlib import suppress
|
||||||
from functools import cached_property, partial, reduce, wraps
|
from functools import cached_property, partial, reduce, wraps
|
||||||
from heapq import heapify, heapreplace, heappop
|
from heapq import heapify, heapreplace
|
||||||
from itertools import (
|
from itertools import (
|
||||||
chain,
|
chain,
|
||||||
combinations,
|
combinations,
|
||||||
|
@ -21,10 +22,10 @@ from itertools import (
|
||||||
zip_longest,
|
zip_longest,
|
||||||
product,
|
product,
|
||||||
)
|
)
|
||||||
from math import comb, e, exp, factorial, floor, fsum, log, perm, tau
|
from math import comb, e, exp, factorial, floor, fsum, log, log1p, perm, tau
|
||||||
from queue import Empty, Queue
|
from queue import Empty, Queue
|
||||||
from random import random, randrange, uniform
|
from random import random, randrange, shuffle, uniform
|
||||||
from operator import itemgetter, mul, sub, gt, lt, ge, le
|
from operator import itemgetter, mul, sub, gt, lt, le
|
||||||
from sys import hexversion, maxsize
|
from sys import hexversion, maxsize
|
||||||
from time import monotonic
|
from time import monotonic
|
||||||
|
|
||||||
|
@ -34,7 +35,6 @@ from .recipes import (
|
||||||
UnequalIterablesError,
|
UnequalIterablesError,
|
||||||
consume,
|
consume,
|
||||||
flatten,
|
flatten,
|
||||||
pairwise,
|
|
||||||
powerset,
|
powerset,
|
||||||
take,
|
take,
|
||||||
unique_everseen,
|
unique_everseen,
|
||||||
|
@ -473,12 +473,10 @@ def ilen(iterable):
|
||||||
This consumes the iterable, so handle with care.
|
This consumes the iterable, so handle with care.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# This approach was selected because benchmarks showed it's likely the
|
# This is the "most beautiful of the fast variants" of this function.
|
||||||
# fastest of the known implementations at the time of writing.
|
# If you think you can improve on it, please ensure that your version
|
||||||
# See GitHub tracker: #236, #230.
|
# is both 10x faster and 10x more beautiful.
|
||||||
counter = count()
|
return sum(compress(repeat(1), zip(iterable)))
|
||||||
deque(zip(iterable, counter), maxlen=0)
|
|
||||||
return next(counter)
|
|
||||||
|
|
||||||
|
|
||||||
def iterate(func, start):
|
def iterate(func, start):
|
||||||
|
@ -666,9 +664,9 @@ def distinct_permutations(iterable, r=None):
|
||||||
>>> sorted(distinct_permutations([1, 0, 1]))
|
>>> sorted(distinct_permutations([1, 0, 1]))
|
||||||
[(0, 1, 1), (1, 0, 1), (1, 1, 0)]
|
[(0, 1, 1), (1, 0, 1), (1, 1, 0)]
|
||||||
|
|
||||||
Equivalent to ``set(permutations(iterable))``, except duplicates are not
|
Equivalent to yielding from ``set(permutations(iterable))``, except
|
||||||
generated and thrown away. For larger input sequences this is much more
|
duplicates are not generated and thrown away. For larger input sequences
|
||||||
efficient.
|
this is much more efficient.
|
||||||
|
|
||||||
Duplicate permutations arise when there are duplicated elements in the
|
Duplicate permutations arise when there are duplicated elements in the
|
||||||
input iterable. The number of items returned is
|
input iterable. The number of items returned is
|
||||||
|
@ -683,6 +681,25 @@ def distinct_permutations(iterable, r=None):
|
||||||
>>> sorted(distinct_permutations(range(3), r=2))
|
>>> sorted(distinct_permutations(range(3), r=2))
|
||||||
[(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
|
[(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
|
||||||
|
|
||||||
|
*iterable* need not be sortable, but note that using equal (``x == y``)
|
||||||
|
but non-identical (``id(x) != id(y)``) elements may produce surprising
|
||||||
|
behavior. For example, ``1`` and ``True`` are equal but non-identical:
|
||||||
|
|
||||||
|
>>> list(distinct_permutations([1, True, '3'])) # doctest: +SKIP
|
||||||
|
[
|
||||||
|
(1, True, '3'),
|
||||||
|
(1, '3', True),
|
||||||
|
('3', 1, True)
|
||||||
|
]
|
||||||
|
>>> list(distinct_permutations([1, 2, '3'])) # doctest: +SKIP
|
||||||
|
[
|
||||||
|
(1, 2, '3'),
|
||||||
|
(1, '3', 2),
|
||||||
|
(2, 1, '3'),
|
||||||
|
(2, '3', 1),
|
||||||
|
('3', 1, 2),
|
||||||
|
('3', 2, 1)
|
||||||
|
]
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Algorithm: https://w.wiki/Qai
|
# Algorithm: https://w.wiki/Qai
|
||||||
|
@ -749,14 +766,44 @@ def distinct_permutations(iterable, r=None):
|
||||||
i += 1
|
i += 1
|
||||||
head[i:], tail[:] = tail[: r - i], tail[r - i :]
|
head[i:], tail[:] = tail[: r - i], tail[r - i :]
|
||||||
|
|
||||||
items = sorted(iterable)
|
items = list(iterable)
|
||||||
|
|
||||||
|
try:
|
||||||
|
items.sort()
|
||||||
|
sortable = True
|
||||||
|
except TypeError:
|
||||||
|
sortable = False
|
||||||
|
|
||||||
|
indices_dict = defaultdict(list)
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
indices_dict[items.index(item)].append(item)
|
||||||
|
|
||||||
|
indices = [items.index(item) for item in items]
|
||||||
|
indices.sort()
|
||||||
|
|
||||||
|
equivalent_items = {k: cycle(v) for k, v in indices_dict.items()}
|
||||||
|
|
||||||
|
def permuted_items(permuted_indices):
|
||||||
|
return tuple(
|
||||||
|
next(equivalent_items[index]) for index in permuted_indices
|
||||||
|
)
|
||||||
|
|
||||||
size = len(items)
|
size = len(items)
|
||||||
if r is None:
|
if r is None:
|
||||||
r = size
|
r = size
|
||||||
|
|
||||||
|
# functools.partial(_partial, ... )
|
||||||
|
algorithm = _full if (r == size) else partial(_partial, r=r)
|
||||||
|
|
||||||
if 0 < r <= size:
|
if 0 < r <= size:
|
||||||
return _full(items) if (r == size) else _partial(items, r)
|
if sortable:
|
||||||
|
return algorithm(items)
|
||||||
|
else:
|
||||||
|
return (
|
||||||
|
permuted_items(permuted_indices)
|
||||||
|
for permuted_indices in algorithm(indices)
|
||||||
|
)
|
||||||
|
|
||||||
return iter(() if r else ((),))
|
return iter(() if r else ((),))
|
||||||
|
|
||||||
|
@ -1743,7 +1790,9 @@ def zip_offset(*iterables, offsets, longest=False, fillvalue=None):
|
||||||
return zip(*staggered)
|
return zip(*staggered)
|
||||||
|
|
||||||
|
|
||||||
def sort_together(iterables, key_list=(0,), key=None, reverse=False):
|
def sort_together(
|
||||||
|
iterables, key_list=(0,), key=None, reverse=False, strict=False
|
||||||
|
):
|
||||||
"""Return the input iterables sorted together, with *key_list* as the
|
"""Return the input iterables sorted together, with *key_list* as the
|
||||||
priority for sorting. All iterables are trimmed to the length of the
|
priority for sorting. All iterables are trimmed to the length of the
|
||||||
shortest one.
|
shortest one.
|
||||||
|
@ -1782,6 +1831,10 @@ def sort_together(iterables, key_list=(0,), key=None, reverse=False):
|
||||||
>>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True)
|
>>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True)
|
||||||
[(3, 2, 1), ('a', 'b', 'c')]
|
[(3, 2, 1), ('a', 'b', 'c')]
|
||||||
|
|
||||||
|
If the *strict* keyword argument is ``True``, then
|
||||||
|
``UnequalIterablesError`` will be raised if any of the iterables have
|
||||||
|
different lengths.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if key is None:
|
if key is None:
|
||||||
# if there is no key function, the key argument to sorted is an
|
# if there is no key function, the key argument to sorted is an
|
||||||
|
@ -1804,8 +1857,9 @@ def sort_together(iterables, key_list=(0,), key=None, reverse=False):
|
||||||
*get_key_items(zipped_items)
|
*get_key_items(zipped_items)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
zipper = zip_equal if strict else zip
|
||||||
return list(
|
return list(
|
||||||
zip(*sorted(zip(*iterables), key=key_argument, reverse=reverse))
|
zipper(*sorted(zipper(*iterables), key=key_argument, reverse=reverse))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -2747,8 +2801,6 @@ class seekable:
|
||||||
>>> it.seek(0)
|
>>> it.seek(0)
|
||||||
>>> next(it), next(it), next(it)
|
>>> next(it), next(it), next(it)
|
||||||
('0', '1', '2')
|
('0', '1', '2')
|
||||||
>>> next(it)
|
|
||||||
'3'
|
|
||||||
|
|
||||||
You can also seek forward:
|
You can also seek forward:
|
||||||
|
|
||||||
|
@ -2756,15 +2808,29 @@ class seekable:
|
||||||
>>> it.seek(10)
|
>>> it.seek(10)
|
||||||
>>> next(it)
|
>>> next(it)
|
||||||
'10'
|
'10'
|
||||||
>>> it.relative_seek(-2) # Seeking relative to the current position
|
|
||||||
>>> next(it)
|
|
||||||
'9'
|
|
||||||
>>> it.seek(20) # Seeking past the end of the source isn't a problem
|
>>> it.seek(20) # Seeking past the end of the source isn't a problem
|
||||||
>>> list(it)
|
>>> list(it)
|
||||||
[]
|
[]
|
||||||
>>> it.seek(0) # Resetting works even after hitting the end
|
>>> it.seek(0) # Resetting works even after hitting the end
|
||||||
|
>>> next(it)
|
||||||
|
'0'
|
||||||
|
|
||||||
|
Call :meth:`relative_seek` to seek relative to the source iterator's
|
||||||
|
current position.
|
||||||
|
|
||||||
|
>>> it = seekable((str(n) for n in range(20)))
|
||||||
>>> next(it), next(it), next(it)
|
>>> next(it), next(it), next(it)
|
||||||
('0', '1', '2')
|
('0', '1', '2')
|
||||||
|
>>> it.relative_seek(2)
|
||||||
|
>>> next(it)
|
||||||
|
'5'
|
||||||
|
>>> it.relative_seek(-3) # Source is at '6', we move back to '3'
|
||||||
|
>>> next(it)
|
||||||
|
'3'
|
||||||
|
>>> it.relative_seek(-3) # Source is at '4', we move back to '1'
|
||||||
|
>>> next(it)
|
||||||
|
'1'
|
||||||
|
|
||||||
|
|
||||||
Call :meth:`peek` to look ahead one item without advancing the iterator:
|
Call :meth:`peek` to look ahead one item without advancing the iterator:
|
||||||
|
|
||||||
|
@ -2873,8 +2939,10 @@ class seekable:
|
||||||
consume(self, remainder)
|
consume(self, remainder)
|
||||||
|
|
||||||
def relative_seek(self, count):
|
def relative_seek(self, count):
|
||||||
index = len(self._cache)
|
if self._index is None:
|
||||||
self.seek(max(index + count, 0))
|
self._index = len(self._cache)
|
||||||
|
|
||||||
|
self.seek(max(self._index + count, 0))
|
||||||
|
|
||||||
|
|
||||||
class run_length:
|
class run_length:
|
||||||
|
@ -2903,7 +2971,7 @@ class run_length:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def decode(iterable):
|
def decode(iterable):
|
||||||
return chain.from_iterable(repeat(k, n) for k, n in iterable)
|
return chain.from_iterable(starmap(repeat, iterable))
|
||||||
|
|
||||||
|
|
||||||
def exactly_n(iterable, n, predicate=bool):
|
def exactly_n(iterable, n, predicate=bool):
|
||||||
|
@ -2924,14 +2992,34 @@ def exactly_n(iterable, n, predicate=bool):
|
||||||
return len(take(n + 1, filter(predicate, iterable))) == n
|
return len(take(n + 1, filter(predicate, iterable))) == n
|
||||||
|
|
||||||
|
|
||||||
def circular_shifts(iterable):
|
def circular_shifts(iterable, steps=1):
|
||||||
"""Return a list of circular shifts of *iterable*.
|
"""Yield the circular shifts of *iterable*.
|
||||||
|
|
||||||
>>> circular_shifts(range(4))
|
>>> list(circular_shifts(range(4)))
|
||||||
[(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
|
[(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
|
||||||
|
|
||||||
|
Set *steps* to the number of places to rotate to the left
|
||||||
|
(or to the right if negative). Defaults to 1.
|
||||||
|
|
||||||
|
>>> list(circular_shifts(range(4), 2))
|
||||||
|
[(0, 1, 2, 3), (2, 3, 0, 1)]
|
||||||
|
|
||||||
|
>>> list(circular_shifts(range(4), -1))
|
||||||
|
[(0, 1, 2, 3), (3, 0, 1, 2), (2, 3, 0, 1), (1, 2, 3, 0)]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
lst = list(iterable)
|
buffer = deque(iterable)
|
||||||
return take(len(lst), windowed(cycle(lst), len(lst)))
|
if steps == 0:
|
||||||
|
raise ValueError('Steps should be a non-zero integer')
|
||||||
|
|
||||||
|
buffer.rotate(steps)
|
||||||
|
steps = -steps
|
||||||
|
n = len(buffer)
|
||||||
|
n //= math.gcd(n, steps)
|
||||||
|
|
||||||
|
for __ in repeat(None, n):
|
||||||
|
buffer.rotate(steps)
|
||||||
|
yield tuple(buffer)
|
||||||
|
|
||||||
|
|
||||||
def make_decorator(wrapping_func, result_index=0):
|
def make_decorator(wrapping_func, result_index=0):
|
||||||
|
@ -3191,7 +3279,7 @@ def partitions(iterable):
|
||||||
yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))]
|
yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))]
|
||||||
|
|
||||||
|
|
||||||
def set_partitions(iterable, k=None):
|
def set_partitions(iterable, k=None, min_size=None, max_size=None):
|
||||||
"""
|
"""
|
||||||
Yield the set partitions of *iterable* into *k* parts. Set partitions are
|
Yield the set partitions of *iterable* into *k* parts. Set partitions are
|
||||||
not order-preserving.
|
not order-preserving.
|
||||||
|
@ -3215,6 +3303,20 @@ def set_partitions(iterable, k=None):
|
||||||
['b', 'ac']
|
['b', 'ac']
|
||||||
['a', 'b', 'c']
|
['a', 'b', 'c']
|
||||||
|
|
||||||
|
if *min_size* and/or *max_size* are given, the minimum and/or maximum size
|
||||||
|
per block in partition is set.
|
||||||
|
|
||||||
|
>>> iterable = 'abc'
|
||||||
|
>>> for part in set_partitions(iterable, min_size=2):
|
||||||
|
... print([''.join(p) for p in part])
|
||||||
|
['abc']
|
||||||
|
>>> for part in set_partitions(iterable, max_size=2):
|
||||||
|
... print([''.join(p) for p in part])
|
||||||
|
['a', 'bc']
|
||||||
|
['ab', 'c']
|
||||||
|
['b', 'ac']
|
||||||
|
['a', 'b', 'c']
|
||||||
|
|
||||||
"""
|
"""
|
||||||
L = list(iterable)
|
L = list(iterable)
|
||||||
n = len(L)
|
n = len(L)
|
||||||
|
@ -3226,6 +3328,11 @@ def set_partitions(iterable, k=None):
|
||||||
elif k > n:
|
elif k > n:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
min_size = min_size if min_size is not None else 0
|
||||||
|
max_size = max_size if max_size is not None else n
|
||||||
|
if min_size > max_size:
|
||||||
|
return
|
||||||
|
|
||||||
def set_partitions_helper(L, k):
|
def set_partitions_helper(L, k):
|
||||||
n = len(L)
|
n = len(L)
|
||||||
if k == 1:
|
if k == 1:
|
||||||
|
@ -3242,9 +3349,15 @@ def set_partitions(iterable, k=None):
|
||||||
|
|
||||||
if k is None:
|
if k is None:
|
||||||
for k in range(1, n + 1):
|
for k in range(1, n + 1):
|
||||||
yield from set_partitions_helper(L, k)
|
yield from filter(
|
||||||
|
lambda z: all(min_size <= len(bk) <= max_size for bk in z),
|
||||||
|
set_partitions_helper(L, k),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
yield from set_partitions_helper(L, k)
|
yield from filter(
|
||||||
|
lambda z: all(min_size <= len(bk) <= max_size for bk in z),
|
||||||
|
set_partitions_helper(L, k),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class time_limited:
|
class time_limited:
|
||||||
|
@ -3535,32 +3648,27 @@ def map_if(iterable, pred, func, func_else=lambda x: x):
|
||||||
yield func(item) if pred(item) else func_else(item)
|
yield func(item) if pred(item) else func_else(item)
|
||||||
|
|
||||||
|
|
||||||
def _sample_unweighted(iterable, k):
|
def _sample_unweighted(iterator, k, strict):
|
||||||
# Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li:
|
# Algorithm L in the 1994 paper by Kim-Hung Li:
|
||||||
# "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))".
|
# "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))".
|
||||||
|
|
||||||
# Fill up the reservoir (collection of samples) with the first `k` samples
|
reservoir = list(islice(iterator, k))
|
||||||
reservoir = take(k, iterable)
|
if strict and len(reservoir) < k:
|
||||||
|
raise ValueError('Sample larger than population')
|
||||||
|
W = 1.0
|
||||||
|
|
||||||
# Generate random number that's the largest in a sample of k U(0,1) numbers
|
with suppress(StopIteration):
|
||||||
# Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic
|
while True:
|
||||||
W = exp(log(random()) / k)
|
|
||||||
|
|
||||||
# The number of elements to skip before changing the reservoir is a random
|
|
||||||
# number with a geometric distribution. Sample it using random() and logs.
|
|
||||||
next_index = k + floor(log(random()) / log(1 - W))
|
|
||||||
|
|
||||||
for index, element in enumerate(iterable, k):
|
|
||||||
if index == next_index:
|
|
||||||
reservoir[randrange(k)] = element
|
|
||||||
# The new W is the largest in a sample of k U(0, `old_W`) numbers
|
|
||||||
W *= exp(log(random()) / k)
|
W *= exp(log(random()) / k)
|
||||||
next_index += floor(log(random()) / log(1 - W)) + 1
|
skip = floor(log(random()) / log1p(-W))
|
||||||
|
element = next(islice(iterator, skip, None))
|
||||||
|
reservoir[randrange(k)] = element
|
||||||
|
|
||||||
|
shuffle(reservoir)
|
||||||
return reservoir
|
return reservoir
|
||||||
|
|
||||||
|
|
||||||
def _sample_weighted(iterable, k, weights):
|
def _sample_weighted(iterator, k, weights, strict):
|
||||||
# Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. :
|
# Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. :
|
||||||
# "Weighted random sampling with a reservoir".
|
# "Weighted random sampling with a reservoir".
|
||||||
|
|
||||||
|
@ -3569,7 +3677,10 @@ def _sample_weighted(iterable, k, weights):
|
||||||
|
|
||||||
# Fill up the reservoir (collection of samples) with the first `k`
|
# Fill up the reservoir (collection of samples) with the first `k`
|
||||||
# weight-keys and elements, then heapify the list.
|
# weight-keys and elements, then heapify the list.
|
||||||
reservoir = take(k, zip(weight_keys, iterable))
|
reservoir = take(k, zip(weight_keys, iterator))
|
||||||
|
if strict and len(reservoir) < k:
|
||||||
|
raise ValueError('Sample larger than population')
|
||||||
|
|
||||||
heapify(reservoir)
|
heapify(reservoir)
|
||||||
|
|
||||||
# The number of jumps before changing the reservoir is a random variable
|
# The number of jumps before changing the reservoir is a random variable
|
||||||
|
@ -3577,7 +3688,7 @@ def _sample_weighted(iterable, k, weights):
|
||||||
smallest_weight_key, _ = reservoir[0]
|
smallest_weight_key, _ = reservoir[0]
|
||||||
weights_to_skip = log(random()) / smallest_weight_key
|
weights_to_skip = log(random()) / smallest_weight_key
|
||||||
|
|
||||||
for weight, element in zip(weights, iterable):
|
for weight, element in zip(weights, iterator):
|
||||||
if weight >= weights_to_skip:
|
if weight >= weights_to_skip:
|
||||||
# The notation here is consistent with the paper, but we store
|
# The notation here is consistent with the paper, but we store
|
||||||
# the weight-keys in log-space for better numerical stability.
|
# the weight-keys in log-space for better numerical stability.
|
||||||
|
@ -3591,44 +3702,103 @@ def _sample_weighted(iterable, k, weights):
|
||||||
else:
|
else:
|
||||||
weights_to_skip -= weight
|
weights_to_skip -= weight
|
||||||
|
|
||||||
# Equivalent to [element for weight_key, element in sorted(reservoir)]
|
ret = [element for weight_key, element in reservoir]
|
||||||
return [heappop(reservoir)[1] for _ in range(k)]
|
shuffle(ret)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def sample(iterable, k, weights=None):
|
def _sample_counted(population, k, counts, strict):
|
||||||
|
element = None
|
||||||
|
remaining = 0
|
||||||
|
|
||||||
|
def feed(i):
|
||||||
|
# Advance *i* steps ahead and consume an element
|
||||||
|
nonlocal element, remaining
|
||||||
|
|
||||||
|
while i + 1 > remaining:
|
||||||
|
i = i - remaining
|
||||||
|
element = next(population)
|
||||||
|
remaining = next(counts)
|
||||||
|
remaining -= i + 1
|
||||||
|
return element
|
||||||
|
|
||||||
|
with suppress(StopIteration):
|
||||||
|
reservoir = []
|
||||||
|
for _ in range(k):
|
||||||
|
reservoir.append(feed(0))
|
||||||
|
if strict and len(reservoir) < k:
|
||||||
|
raise ValueError('Sample larger than population')
|
||||||
|
|
||||||
|
W = 1.0
|
||||||
|
while True:
|
||||||
|
W *= exp(log(random()) / k)
|
||||||
|
skip = floor(log(random()) / log1p(-W))
|
||||||
|
element = feed(skip)
|
||||||
|
reservoir[randrange(k)] = element
|
||||||
|
|
||||||
|
shuffle(reservoir)
|
||||||
|
return reservoir
|
||||||
|
|
||||||
|
|
||||||
|
def sample(iterable, k, weights=None, *, counts=None, strict=False):
|
||||||
"""Return a *k*-length list of elements chosen (without replacement)
|
"""Return a *k*-length list of elements chosen (without replacement)
|
||||||
from the *iterable*. Like :func:`random.sample`, but works on iterables
|
from the *iterable*. Similar to :func:`random.sample`, but works on
|
||||||
of unknown length.
|
iterables of unknown length.
|
||||||
|
|
||||||
>>> iterable = range(100)
|
>>> iterable = range(100)
|
||||||
>>> sample(iterable, 5) # doctest: +SKIP
|
>>> sample(iterable, 5) # doctest: +SKIP
|
||||||
[81, 60, 96, 16, 4]
|
[81, 60, 96, 16, 4]
|
||||||
|
|
||||||
An iterable with *weights* may also be given:
|
For iterables with repeated elements, you may supply *counts* to
|
||||||
|
indicate the repeats.
|
||||||
|
|
||||||
|
>>> iterable = ['a', 'b']
|
||||||
|
>>> counts = [3, 4] # Equivalent to 'a', 'a', 'a', 'b', 'b', 'b', 'b'
|
||||||
|
>>> sample(iterable, k=3, counts=counts) # doctest: +SKIP
|
||||||
|
['a', 'a', 'b']
|
||||||
|
|
||||||
|
An iterable with *weights* may be given:
|
||||||
|
|
||||||
>>> iterable = range(100)
|
>>> iterable = range(100)
|
||||||
>>> weights = (i * i + 1 for i in range(100))
|
>>> weights = (i * i + 1 for i in range(100))
|
||||||
>>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP
|
>>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP
|
||||||
[79, 67, 74, 66, 78]
|
[79, 67, 74, 66, 78]
|
||||||
|
|
||||||
The algorithm can also be used to generate weighted random permutations.
|
Weighted selections are made without replacement.
|
||||||
The relative weight of each item determines the probability that it
|
After an element is selected, it is removed from the pool and the
|
||||||
appears late in the permutation.
|
relative weights of the other elements increase (this
|
||||||
|
does not match the behavior of :func:`random.sample`'s *counts*
|
||||||
|
parameter). Note that *weights* may not be used with *counts*.
|
||||||
|
|
||||||
>>> data = "abcdefgh"
|
If the length of *iterable* is less than *k*,
|
||||||
>>> weights = range(1, len(data) + 1)
|
``ValueError`` is raised if *strict* is ``True`` and
|
||||||
>>> sample(data, k=len(data), weights=weights) # doctest: +SKIP
|
all elements are returned (in shuffled order) if *strict* is ``False``.
|
||||||
['c', 'a', 'b', 'e', 'g', 'd', 'h', 'f']
|
|
||||||
|
By default, the `Algorithm L <https://w.wiki/ANrM>`__ reservoir sampling
|
||||||
|
technique is used. When *weights* are provided,
|
||||||
|
`Algorithm A-ExpJ <https://w.wiki/ANrS>`__ is used.
|
||||||
"""
|
"""
|
||||||
|
iterator = iter(iterable)
|
||||||
|
|
||||||
|
if k < 0:
|
||||||
|
raise ValueError('k must be non-negative')
|
||||||
|
|
||||||
if k == 0:
|
if k == 0:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
iterable = iter(iterable)
|
if weights is not None and counts is not None:
|
||||||
if weights is None:
|
raise TypeError('weights and counts are mutally exclusive')
|
||||||
return _sample_unweighted(iterable, k)
|
|
||||||
else:
|
elif weights is not None:
|
||||||
weights = iter(weights)
|
weights = iter(weights)
|
||||||
return _sample_weighted(iterable, k, weights)
|
return _sample_weighted(iterator, k, weights, strict)
|
||||||
|
|
||||||
|
elif counts is not None:
|
||||||
|
counts = iter(counts)
|
||||||
|
return _sample_counted(iterator, k, counts, strict)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return _sample_unweighted(iterator, k, strict)
|
||||||
|
|
||||||
|
|
||||||
def is_sorted(iterable, key=None, reverse=False, strict=False):
|
def is_sorted(iterable, key=None, reverse=False, strict=False):
|
||||||
|
@ -3650,12 +3820,16 @@ def is_sorted(iterable, key=None, reverse=False, strict=False):
|
||||||
False
|
False
|
||||||
|
|
||||||
The function returns ``False`` after encountering the first out-of-order
|
The function returns ``False`` after encountering the first out-of-order
|
||||||
item. If there are no out-of-order items, the iterable is exhausted.
|
item, which means it may produce results that differ from the built-in
|
||||||
|
:func:`sorted` function for objects with unusual comparison dynamics.
|
||||||
|
If there are no out-of-order items, the iterable is exhausted.
|
||||||
"""
|
"""
|
||||||
|
compare = le if strict else lt
|
||||||
|
it = iterable if (key is None) else map(key, iterable)
|
||||||
|
it_1, it_2 = tee(it)
|
||||||
|
next(it_2 if reverse else it_1, None)
|
||||||
|
|
||||||
compare = (le if reverse else ge) if strict else (lt if reverse else gt)
|
return not any(map(compare, it_1, it_2))
|
||||||
it = iterable if key is None else map(key, iterable)
|
|
||||||
return not any(starmap(compare, pairwise(it)))
|
|
||||||
|
|
||||||
|
|
||||||
class AbortThread(BaseException):
|
class AbortThread(BaseException):
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
from types import TracebackType
|
from types import TracebackType
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
@ -28,6 +30,9 @@ from typing_extensions import Protocol
|
||||||
_T = TypeVar('_T')
|
_T = TypeVar('_T')
|
||||||
_T1 = TypeVar('_T1')
|
_T1 = TypeVar('_T1')
|
||||||
_T2 = TypeVar('_T2')
|
_T2 = TypeVar('_T2')
|
||||||
|
_T3 = TypeVar('_T3')
|
||||||
|
_T4 = TypeVar('_T4')
|
||||||
|
_T5 = TypeVar('_T5')
|
||||||
_U = TypeVar('_U')
|
_U = TypeVar('_U')
|
||||||
_V = TypeVar('_V')
|
_V = TypeVar('_V')
|
||||||
_W = TypeVar('_W')
|
_W = TypeVar('_W')
|
||||||
|
@ -35,6 +40,12 @@ _T_co = TypeVar('_T_co', covariant=True)
|
||||||
_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]])
|
_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]])
|
||||||
_Raisable = BaseException | Type[BaseException]
|
_Raisable = BaseException | Type[BaseException]
|
||||||
|
|
||||||
|
# The type of isinstance's second argument (from typeshed builtins)
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
_ClassInfo = type | UnionType | tuple[_ClassInfo, ...]
|
||||||
|
else:
|
||||||
|
_ClassInfo = type | tuple[_ClassInfo, ...]
|
||||||
|
|
||||||
@type_check_only
|
@type_check_only
|
||||||
class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ...
|
class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ...
|
||||||
|
|
||||||
|
@ -135,7 +146,7 @@ def interleave_evenly(
|
||||||
) -> Iterator[_T]: ...
|
) -> Iterator[_T]: ...
|
||||||
def collapse(
|
def collapse(
|
||||||
iterable: Iterable[Any],
|
iterable: Iterable[Any],
|
||||||
base_type: type | None = ...,
|
base_type: _ClassInfo | None = ...,
|
||||||
levels: int | None = ...,
|
levels: int | None = ...,
|
||||||
) -> Iterator[Any]: ...
|
) -> Iterator[Any]: ...
|
||||||
@overload
|
@overload
|
||||||
|
@ -213,6 +224,7 @@ def stagger(
|
||||||
class UnequalIterablesError(ValueError):
|
class UnequalIterablesError(ValueError):
|
||||||
def __init__(self, details: tuple[int, int, int] | None = ...) -> None: ...
|
def __init__(self, details: tuple[int, int, int] | None = ...) -> None: ...
|
||||||
|
|
||||||
|
# zip_equal
|
||||||
@overload
|
@overload
|
||||||
def zip_equal(__iter1: Iterable[_T1]) -> Iterator[tuple[_T1]]: ...
|
def zip_equal(__iter1: Iterable[_T1]) -> Iterator[tuple[_T1]]: ...
|
||||||
@overload
|
@overload
|
||||||
|
@ -221,11 +233,35 @@ def zip_equal(
|
||||||
) -> Iterator[tuple[_T1, _T2]]: ...
|
) -> Iterator[tuple[_T1, _T2]]: ...
|
||||||
@overload
|
@overload
|
||||||
def zip_equal(
|
def zip_equal(
|
||||||
__iter1: Iterable[_T],
|
__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]
|
||||||
__iter2: Iterable[_T],
|
) -> Iterator[tuple[_T1, _T2, _T3]]: ...
|
||||||
__iter3: Iterable[_T],
|
@overload
|
||||||
*iterables: Iterable[_T],
|
def zip_equal(
|
||||||
) -> Iterator[tuple[_T, ...]]: ...
|
__iter1: Iterable[_T1],
|
||||||
|
__iter2: Iterable[_T2],
|
||||||
|
__iter3: Iterable[_T3],
|
||||||
|
__iter4: Iterable[_T4],
|
||||||
|
) -> Iterator[tuple[_T1, _T2, _T3, _T4]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_equal(
|
||||||
|
__iter1: Iterable[_T1],
|
||||||
|
__iter2: Iterable[_T2],
|
||||||
|
__iter3: Iterable[_T3],
|
||||||
|
__iter4: Iterable[_T4],
|
||||||
|
__iter5: Iterable[_T5],
|
||||||
|
) -> Iterator[tuple[_T1, _T2, _T3, _T4, _T5]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_equal(
|
||||||
|
__iter1: Iterable[Any],
|
||||||
|
__iter2: Iterable[Any],
|
||||||
|
__iter3: Iterable[Any],
|
||||||
|
__iter4: Iterable[Any],
|
||||||
|
__iter5: Iterable[Any],
|
||||||
|
__iter6: Iterable[Any],
|
||||||
|
*iterables: Iterable[Any],
|
||||||
|
) -> Iterator[tuple[Any, ...]]: ...
|
||||||
|
|
||||||
|
# zip_offset
|
||||||
@overload
|
@overload
|
||||||
def zip_offset(
|
def zip_offset(
|
||||||
__iter1: Iterable[_T1],
|
__iter1: Iterable[_T1],
|
||||||
|
@ -285,12 +321,13 @@ def sort_together(
|
||||||
key_list: Iterable[int] = ...,
|
key_list: Iterable[int] = ...,
|
||||||
key: Callable[..., Any] | None = ...,
|
key: Callable[..., Any] | None = ...,
|
||||||
reverse: bool = ...,
|
reverse: bool = ...,
|
||||||
|
strict: bool = ...,
|
||||||
) -> list[tuple[_T, ...]]: ...
|
) -> list[tuple[_T, ...]]: ...
|
||||||
def unzip(iterable: Iterable[Sequence[_T]]) -> tuple[Iterator[_T], ...]: ...
|
def unzip(iterable: Iterable[Sequence[_T]]) -> tuple[Iterator[_T], ...]: ...
|
||||||
def divide(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ...
|
def divide(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ...
|
||||||
def always_iterable(
|
def always_iterable(
|
||||||
obj: object,
|
obj: object,
|
||||||
base_type: type | tuple[type | tuple[Any, ...], ...] | None = ...,
|
base_type: _ClassInfo | None = ...,
|
||||||
) -> Iterator[Any]: ...
|
) -> Iterator[Any]: ...
|
||||||
def adjacent(
|
def adjacent(
|
||||||
predicate: Callable[[_T], bool],
|
predicate: Callable[[_T], bool],
|
||||||
|
@ -454,7 +491,9 @@ class run_length:
|
||||||
def exactly_n(
|
def exactly_n(
|
||||||
iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ...
|
iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ...
|
||||||
) -> bool: ...
|
) -> bool: ...
|
||||||
def circular_shifts(iterable: Iterable[_T]) -> list[tuple[_T, ...]]: ...
|
def circular_shifts(
|
||||||
|
iterable: Iterable[_T], steps: int = 1
|
||||||
|
) -> list[tuple[_T, ...]]: ...
|
||||||
def make_decorator(
|
def make_decorator(
|
||||||
wrapping_func: Callable[..., _U], result_index: int = ...
|
wrapping_func: Callable[..., _U], result_index: int = ...
|
||||||
) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ...
|
) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ...
|
||||||
|
@ -500,7 +539,10 @@ def replace(
|
||||||
) -> Iterator[_T | _U]: ...
|
) -> Iterator[_T | _U]: ...
|
||||||
def partitions(iterable: Iterable[_T]) -> Iterator[list[list[_T]]]: ...
|
def partitions(iterable: Iterable[_T]) -> Iterator[list[list[_T]]]: ...
|
||||||
def set_partitions(
|
def set_partitions(
|
||||||
iterable: Iterable[_T], k: int | None = ...
|
iterable: Iterable[_T],
|
||||||
|
k: int | None = ...,
|
||||||
|
min_size: int | None = ...,
|
||||||
|
max_size: int | None = ...,
|
||||||
) -> Iterator[list[list[_T]]]: ...
|
) -> Iterator[list[list[_T]]]: ...
|
||||||
|
|
||||||
class time_limited(Generic[_T], Iterator[_T]):
|
class time_limited(Generic[_T], Iterator[_T]):
|
||||||
|
@ -538,10 +580,22 @@ def map_if(
|
||||||
func: Callable[[Any], Any],
|
func: Callable[[Any], Any],
|
||||||
func_else: Callable[[Any], Any] | None = ...,
|
func_else: Callable[[Any], Any] | None = ...,
|
||||||
) -> Iterator[Any]: ...
|
) -> Iterator[Any]: ...
|
||||||
|
def _sample_unweighted(
|
||||||
|
iterator: Iterator[_T], k: int, strict: bool
|
||||||
|
) -> list[_T]: ...
|
||||||
|
def _sample_counted(
|
||||||
|
population: Iterator[_T], k: int, counts: Iterable[int], strict: bool
|
||||||
|
) -> list[_T]: ...
|
||||||
|
def _sample_weighted(
|
||||||
|
iterator: Iterator[_T], k: int, weights, strict
|
||||||
|
) -> list[_T]: ...
|
||||||
def sample(
|
def sample(
|
||||||
iterable: Iterable[_T],
|
iterable: Iterable[_T],
|
||||||
k: int,
|
k: int,
|
||||||
weights: Iterable[float] | None = ...,
|
weights: Iterable[float] | None = ...,
|
||||||
|
*,
|
||||||
|
counts: Iterable[int] | None = ...,
|
||||||
|
strict: bool = False,
|
||||||
) -> list[_T]: ...
|
) -> list[_T]: ...
|
||||||
def is_sorted(
|
def is_sorted(
|
||||||
iterable: Iterable[_T],
|
iterable: Iterable[_T],
|
||||||
|
@ -577,7 +631,7 @@ class callback_iter(Generic[_T], Iterator[_T]):
|
||||||
|
|
||||||
def windowed_complete(
|
def windowed_complete(
|
||||||
iterable: Iterable[_T], n: int
|
iterable: Iterable[_T], n: int
|
||||||
) -> Iterator[tuple[_T, ...]]: ...
|
) -> Iterator[tuple[tuple[_T, ...], tuple[_T, ...], tuple[_T, ...]]]: ...
|
||||||
def all_unique(
|
def all_unique(
|
||||||
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
|
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
|
||||||
) -> bool: ...
|
) -> bool: ...
|
||||||
|
@ -608,9 +662,61 @@ class countable(Generic[_T], Iterator[_T]):
|
||||||
items_seen: int
|
items_seen: int
|
||||||
|
|
||||||
def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ...
|
def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ...
|
||||||
|
@overload
|
||||||
def zip_broadcast(
|
def zip_broadcast(
|
||||||
|
__obj1: _T | Iterable[_T],
|
||||||
|
*,
|
||||||
|
scalar_types: _ClassInfo | None = ...,
|
||||||
|
strict: bool = ...,
|
||||||
|
) -> Iterable[tuple[_T, ...]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_broadcast(
|
||||||
|
__obj1: _T | Iterable[_T],
|
||||||
|
__obj2: _T | Iterable[_T],
|
||||||
|
*,
|
||||||
|
scalar_types: _ClassInfo | None = ...,
|
||||||
|
strict: bool = ...,
|
||||||
|
) -> Iterable[tuple[_T, ...]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_broadcast(
|
||||||
|
__obj1: _T | Iterable[_T],
|
||||||
|
__obj2: _T | Iterable[_T],
|
||||||
|
__obj3: _T | Iterable[_T],
|
||||||
|
*,
|
||||||
|
scalar_types: _ClassInfo | None = ...,
|
||||||
|
strict: bool = ...,
|
||||||
|
) -> Iterable[tuple[_T, ...]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_broadcast(
|
||||||
|
__obj1: _T | Iterable[_T],
|
||||||
|
__obj2: _T | Iterable[_T],
|
||||||
|
__obj3: _T | Iterable[_T],
|
||||||
|
__obj4: _T | Iterable[_T],
|
||||||
|
*,
|
||||||
|
scalar_types: _ClassInfo | None = ...,
|
||||||
|
strict: bool = ...,
|
||||||
|
) -> Iterable[tuple[_T, ...]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_broadcast(
|
||||||
|
__obj1: _T | Iterable[_T],
|
||||||
|
__obj2: _T | Iterable[_T],
|
||||||
|
__obj3: _T | Iterable[_T],
|
||||||
|
__obj4: _T | Iterable[_T],
|
||||||
|
__obj5: _T | Iterable[_T],
|
||||||
|
*,
|
||||||
|
scalar_types: _ClassInfo | None = ...,
|
||||||
|
strict: bool = ...,
|
||||||
|
) -> Iterable[tuple[_T, ...]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_broadcast(
|
||||||
|
__obj1: _T | Iterable[_T],
|
||||||
|
__obj2: _T | Iterable[_T],
|
||||||
|
__obj3: _T | Iterable[_T],
|
||||||
|
__obj4: _T | Iterable[_T],
|
||||||
|
__obj5: _T | Iterable[_T],
|
||||||
|
__obj6: _T | Iterable[_T],
|
||||||
*objects: _T | Iterable[_T],
|
*objects: _T | Iterable[_T],
|
||||||
scalar_types: type | tuple[type | tuple[Any, ...], ...] | None = ...,
|
scalar_types: _ClassInfo | None = ...,
|
||||||
strict: bool = ...,
|
strict: bool = ...,
|
||||||
) -> Iterable[tuple[_T, ...]]: ...
|
) -> Iterable[tuple[_T, ...]]: ...
|
||||||
def unique_in_window(
|
def unique_in_window(
|
||||||
|
|
|
@ -795,8 +795,30 @@ def triplewise(iterable):
|
||||||
[('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')]
|
[('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
for (a, _), (b, c) in pairwise(pairwise(iterable)):
|
# This deviates from the itertools documentation reciple - see
|
||||||
yield a, b, c
|
# https://github.com/more-itertools/more-itertools/issues/889
|
||||||
|
t1, t2, t3 = tee(iterable, 3)
|
||||||
|
next(t3, None)
|
||||||
|
next(t3, None)
|
||||||
|
next(t2, None)
|
||||||
|
return zip(t1, t2, t3)
|
||||||
|
|
||||||
|
|
||||||
|
def _sliding_window_islice(iterable, n):
|
||||||
|
# Fast path for small, non-zero values of n.
|
||||||
|
iterators = tee(iterable, n)
|
||||||
|
for i, iterator in enumerate(iterators):
|
||||||
|
next(islice(iterator, i, i), None)
|
||||||
|
return zip(*iterators)
|
||||||
|
|
||||||
|
|
||||||
|
def _sliding_window_deque(iterable, n):
|
||||||
|
# Normal path for other values of n.
|
||||||
|
it = iter(iterable)
|
||||||
|
window = deque(islice(it, n - 1), maxlen=n)
|
||||||
|
for x in it:
|
||||||
|
window.append(x)
|
||||||
|
yield tuple(window)
|
||||||
|
|
||||||
|
|
||||||
def sliding_window(iterable, n):
|
def sliding_window(iterable, n):
|
||||||
|
@ -812,11 +834,16 @@ def sliding_window(iterable, n):
|
||||||
|
|
||||||
For a variant with more features, see :func:`windowed`.
|
For a variant with more features, see :func:`windowed`.
|
||||||
"""
|
"""
|
||||||
it = iter(iterable)
|
if n > 20:
|
||||||
window = deque(islice(it, n - 1), maxlen=n)
|
return _sliding_window_deque(iterable, n)
|
||||||
for x in it:
|
elif n > 2:
|
||||||
window.append(x)
|
return _sliding_window_islice(iterable, n)
|
||||||
yield tuple(window)
|
elif n == 2:
|
||||||
|
return pairwise(iterable)
|
||||||
|
elif n == 1:
|
||||||
|
return zip(iterable)
|
||||||
|
else:
|
||||||
|
raise ValueError(f'n should be at least one, not {n}')
|
||||||
|
|
||||||
|
|
||||||
def subslices(iterable):
|
def subslices(iterable):
|
||||||
|
@ -1038,9 +1065,6 @@ def totient(n):
|
||||||
>>> totient(12)
|
>>> totient(12)
|
||||||
4
|
4
|
||||||
"""
|
"""
|
||||||
# The itertools docs use unique_justseen instead of set; see
|
for prime in set(factor(n)):
|
||||||
# https://github.com/more-itertools/more-itertools/issues/823
|
n -= n // prime
|
||||||
for p in set(factor(n)):
|
|
||||||
n = n // p * (p - 1)
|
|
||||||
|
|
||||||
return n
|
return n
|
||||||
|
|
|
@ -10,6 +10,9 @@ from numbers import Number
|
||||||
from typing import Union, Tuple, Iterable
|
from typing import Union, Tuple, Iterable
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
|
||||||
|
import dateutil.parser
|
||||||
|
import dateutil.tz
|
||||||
|
|
||||||
|
|
||||||
# some useful constants
|
# some useful constants
|
||||||
osc_per_year = 290_091_329_207_984_000
|
osc_per_year = 290_091_329_207_984_000
|
||||||
|
@ -611,3 +614,40 @@ def date_range(start=None, stop=None, step=None):
|
||||||
while start < stop:
|
while start < stop:
|
||||||
yield start
|
yield start
|
||||||
start += step
|
start += step
|
||||||
|
|
||||||
|
|
||||||
|
tzinfos = dict(
|
||||||
|
AEST=dateutil.tz.gettz("Australia/Sydney"),
|
||||||
|
AEDT=dateutil.tz.gettz("Australia/Sydney"),
|
||||||
|
ACST=dateutil.tz.gettz("Australia/Darwin"),
|
||||||
|
ACDT=dateutil.tz.gettz("Australia/Adelaide"),
|
||||||
|
AWST=dateutil.tz.gettz("Australia/Perth"),
|
||||||
|
EST=dateutil.tz.gettz("America/New_York"),
|
||||||
|
EDT=dateutil.tz.gettz("America/New_York"),
|
||||||
|
CST=dateutil.tz.gettz("America/Chicago"),
|
||||||
|
CDT=dateutil.tz.gettz("America/Chicago"),
|
||||||
|
MST=dateutil.tz.gettz("America/Denver"),
|
||||||
|
MDT=dateutil.tz.gettz("America/Denver"),
|
||||||
|
PST=dateutil.tz.gettz("America/Los_Angeles"),
|
||||||
|
PDT=dateutil.tz.gettz("America/Los_Angeles"),
|
||||||
|
GMT=dateutil.tz.gettz("Etc/GMT"),
|
||||||
|
UTC=dateutil.tz.gettz("UTC"),
|
||||||
|
CET=dateutil.tz.gettz("Europe/Berlin"),
|
||||||
|
CEST=dateutil.tz.gettz("Europe/Berlin"),
|
||||||
|
IST=dateutil.tz.gettz("Asia/Kolkata"),
|
||||||
|
BST=dateutil.tz.gettz("Europe/London"),
|
||||||
|
MSK=dateutil.tz.gettz("Europe/Moscow"),
|
||||||
|
EET=dateutil.tz.gettz("Europe/Helsinki"),
|
||||||
|
EEST=dateutil.tz.gettz("Europe/Helsinki"),
|
||||||
|
# Add more mappings as needed
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse(*args, **kwargs):
|
||||||
|
"""
|
||||||
|
Parse the input using dateutil.parser.parse with friendly tz support.
|
||||||
|
|
||||||
|
>>> parse('2024-07-26 12:59:00 EDT')
|
||||||
|
datetime.datetime(...America/New_York...)
|
||||||
|
"""
|
||||||
|
return dateutil.parser.parse(*args, tzinfos=tzinfos, **kwargs)
|
||||||
|
|
|
@ -39,7 +39,7 @@ requests-oauthlib==2.0.0
|
||||||
rumps==0.4.0; platform_system == "Darwin"
|
rumps==0.4.0; platform_system == "Darwin"
|
||||||
simplejson==3.19.2
|
simplejson==3.19.2
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
tempora==5.6.0
|
tempora==5.7.0
|
||||||
tokenize-rt==5.2.0
|
tokenize-rt==5.2.0
|
||||||
tzdata==2024.1
|
tzdata==2024.1
|
||||||
tzlocal==5.0.1
|
tzlocal==5.0.1
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue