mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-13 01:56:58 -07:00
Bump tempora from 5.6.0 to 5.7.0 (#2371)
* Bump tempora from 5.6.0 to 5.7.0 Bumps [tempora](https://github.com/jaraco/tempora) from 5.6.0 to 5.7.0. - [Release notes](https://github.com/jaraco/tempora/releases) - [Changelog](https://github.com/jaraco/tempora/blob/main/NEWS.rst) - [Commits](https://github.com/jaraco/tempora/compare/v5.6.0...v5.7.0) --- updated-dependencies: - dependency-name: tempora dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update tempora==5.7.0 --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com> [skip ci]
This commit is contained in:
parent
2e6f541ec2
commit
43cb027592
7 changed files with 453 additions and 101 deletions
|
@ -14,6 +14,14 @@ def compose(*funcs):
|
|||
"""
|
||||
Compose any number of unary functions into a single unary function.
|
||||
|
||||
Comparable to
|
||||
`function composition <https://en.wikipedia.org/wiki/Function_composition>`_
|
||||
in mathematics:
|
||||
|
||||
``h = g ∘ f`` implies ``h(x) = g(f(x))``.
|
||||
|
||||
In Python, ``h = compose(g, f)``.
|
||||
|
||||
>>> import textwrap
|
||||
>>> expected = str.strip(textwrap.dedent(compose.__doc__))
|
||||
>>> strip_and_dedent = compose(str.strip, textwrap.dedent)
|
||||
|
|
|
@ -3,4 +3,4 @@
|
|||
from .more import * # noqa
|
||||
from .recipes import * # noqa
|
||||
|
||||
__version__ = '10.3.0'
|
||||
__version__ = '10.4.0'
|
||||
|
|
|
@ -3,8 +3,9 @@ import warnings
|
|||
|
||||
from collections import Counter, defaultdict, deque, abc
|
||||
from collections.abc import Sequence
|
||||
from contextlib import suppress
|
||||
from functools import cached_property, partial, reduce, wraps
|
||||
from heapq import heapify, heapreplace, heappop
|
||||
from heapq import heapify, heapreplace
|
||||
from itertools import (
|
||||
chain,
|
||||
combinations,
|
||||
|
@ -21,10 +22,10 @@ from itertools import (
|
|||
zip_longest,
|
||||
product,
|
||||
)
|
||||
from math import comb, e, exp, factorial, floor, fsum, log, perm, tau
|
||||
from math import comb, e, exp, factorial, floor, fsum, log, log1p, perm, tau
|
||||
from queue import Empty, Queue
|
||||
from random import random, randrange, uniform
|
||||
from operator import itemgetter, mul, sub, gt, lt, ge, le
|
||||
from random import random, randrange, shuffle, uniform
|
||||
from operator import itemgetter, mul, sub, gt, lt, le
|
||||
from sys import hexversion, maxsize
|
||||
from time import monotonic
|
||||
|
||||
|
@ -34,7 +35,6 @@ from .recipes import (
|
|||
UnequalIterablesError,
|
||||
consume,
|
||||
flatten,
|
||||
pairwise,
|
||||
powerset,
|
||||
take,
|
||||
unique_everseen,
|
||||
|
@ -473,12 +473,10 @@ def ilen(iterable):
|
|||
This consumes the iterable, so handle with care.
|
||||
|
||||
"""
|
||||
# This approach was selected because benchmarks showed it's likely the
|
||||
# fastest of the known implementations at the time of writing.
|
||||
# See GitHub tracker: #236, #230.
|
||||
counter = count()
|
||||
deque(zip(iterable, counter), maxlen=0)
|
||||
return next(counter)
|
||||
# This is the "most beautiful of the fast variants" of this function.
|
||||
# If you think you can improve on it, please ensure that your version
|
||||
# is both 10x faster and 10x more beautiful.
|
||||
return sum(compress(repeat(1), zip(iterable)))
|
||||
|
||||
|
||||
def iterate(func, start):
|
||||
|
@ -666,9 +664,9 @@ def distinct_permutations(iterable, r=None):
|
|||
>>> sorted(distinct_permutations([1, 0, 1]))
|
||||
[(0, 1, 1), (1, 0, 1), (1, 1, 0)]
|
||||
|
||||
Equivalent to ``set(permutations(iterable))``, except duplicates are not
|
||||
generated and thrown away. For larger input sequences this is much more
|
||||
efficient.
|
||||
Equivalent to yielding from ``set(permutations(iterable))``, except
|
||||
duplicates are not generated and thrown away. For larger input sequences
|
||||
this is much more efficient.
|
||||
|
||||
Duplicate permutations arise when there are duplicated elements in the
|
||||
input iterable. The number of items returned is
|
||||
|
@ -683,6 +681,25 @@ def distinct_permutations(iterable, r=None):
|
|||
>>> sorted(distinct_permutations(range(3), r=2))
|
||||
[(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
|
||||
|
||||
*iterable* need not be sortable, but note that using equal (``x == y``)
|
||||
but non-identical (``id(x) != id(y)``) elements may produce surprising
|
||||
behavior. For example, ``1`` and ``True`` are equal but non-identical:
|
||||
|
||||
>>> list(distinct_permutations([1, True, '3'])) # doctest: +SKIP
|
||||
[
|
||||
(1, True, '3'),
|
||||
(1, '3', True),
|
||||
('3', 1, True)
|
||||
]
|
||||
>>> list(distinct_permutations([1, 2, '3'])) # doctest: +SKIP
|
||||
[
|
||||
(1, 2, '3'),
|
||||
(1, '3', 2),
|
||||
(2, 1, '3'),
|
||||
(2, '3', 1),
|
||||
('3', 1, 2),
|
||||
('3', 2, 1)
|
||||
]
|
||||
"""
|
||||
|
||||
# Algorithm: https://w.wiki/Qai
|
||||
|
@ -749,14 +766,44 @@ def distinct_permutations(iterable, r=None):
|
|||
i += 1
|
||||
head[i:], tail[:] = tail[: r - i], tail[r - i :]
|
||||
|
||||
items = sorted(iterable)
|
||||
items = list(iterable)
|
||||
|
||||
try:
|
||||
items.sort()
|
||||
sortable = True
|
||||
except TypeError:
|
||||
sortable = False
|
||||
|
||||
indices_dict = defaultdict(list)
|
||||
|
||||
for item in items:
|
||||
indices_dict[items.index(item)].append(item)
|
||||
|
||||
indices = [items.index(item) for item in items]
|
||||
indices.sort()
|
||||
|
||||
equivalent_items = {k: cycle(v) for k, v in indices_dict.items()}
|
||||
|
||||
def permuted_items(permuted_indices):
|
||||
return tuple(
|
||||
next(equivalent_items[index]) for index in permuted_indices
|
||||
)
|
||||
|
||||
size = len(items)
|
||||
if r is None:
|
||||
r = size
|
||||
|
||||
# functools.partial(_partial, ... )
|
||||
algorithm = _full if (r == size) else partial(_partial, r=r)
|
||||
|
||||
if 0 < r <= size:
|
||||
return _full(items) if (r == size) else _partial(items, r)
|
||||
if sortable:
|
||||
return algorithm(items)
|
||||
else:
|
||||
return (
|
||||
permuted_items(permuted_indices)
|
||||
for permuted_indices in algorithm(indices)
|
||||
)
|
||||
|
||||
return iter(() if r else ((),))
|
||||
|
||||
|
@ -1743,7 +1790,9 @@ def zip_offset(*iterables, offsets, longest=False, fillvalue=None):
|
|||
return zip(*staggered)
|
||||
|
||||
|
||||
def sort_together(iterables, key_list=(0,), key=None, reverse=False):
|
||||
def sort_together(
|
||||
iterables, key_list=(0,), key=None, reverse=False, strict=False
|
||||
):
|
||||
"""Return the input iterables sorted together, with *key_list* as the
|
||||
priority for sorting. All iterables are trimmed to the length of the
|
||||
shortest one.
|
||||
|
@ -1782,6 +1831,10 @@ def sort_together(iterables, key_list=(0,), key=None, reverse=False):
|
|||
>>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True)
|
||||
[(3, 2, 1), ('a', 'b', 'c')]
|
||||
|
||||
If the *strict* keyword argument is ``True``, then
|
||||
``UnequalIterablesError`` will be raised if any of the iterables have
|
||||
different lengths.
|
||||
|
||||
"""
|
||||
if key is None:
|
||||
# if there is no key function, the key argument to sorted is an
|
||||
|
@ -1804,8 +1857,9 @@ def sort_together(iterables, key_list=(0,), key=None, reverse=False):
|
|||
*get_key_items(zipped_items)
|
||||
)
|
||||
|
||||
zipper = zip_equal if strict else zip
|
||||
return list(
|
||||
zip(*sorted(zip(*iterables), key=key_argument, reverse=reverse))
|
||||
zipper(*sorted(zipper(*iterables), key=key_argument, reverse=reverse))
|
||||
)
|
||||
|
||||
|
||||
|
@ -2747,8 +2801,6 @@ class seekable:
|
|||
>>> it.seek(0)
|
||||
>>> next(it), next(it), next(it)
|
||||
('0', '1', '2')
|
||||
>>> next(it)
|
||||
'3'
|
||||
|
||||
You can also seek forward:
|
||||
|
||||
|
@ -2756,15 +2808,29 @@ class seekable:
|
|||
>>> it.seek(10)
|
||||
>>> next(it)
|
||||
'10'
|
||||
>>> it.relative_seek(-2) # Seeking relative to the current position
|
||||
>>> next(it)
|
||||
'9'
|
||||
>>> it.seek(20) # Seeking past the end of the source isn't a problem
|
||||
>>> list(it)
|
||||
[]
|
||||
>>> it.seek(0) # Resetting works even after hitting the end
|
||||
>>> next(it)
|
||||
'0'
|
||||
|
||||
Call :meth:`relative_seek` to seek relative to the source iterator's
|
||||
current position.
|
||||
|
||||
>>> it = seekable((str(n) for n in range(20)))
|
||||
>>> next(it), next(it), next(it)
|
||||
('0', '1', '2')
|
||||
>>> it.relative_seek(2)
|
||||
>>> next(it)
|
||||
'5'
|
||||
>>> it.relative_seek(-3) # Source is at '6', we move back to '3'
|
||||
>>> next(it)
|
||||
'3'
|
||||
>>> it.relative_seek(-3) # Source is at '4', we move back to '1'
|
||||
>>> next(it)
|
||||
'1'
|
||||
|
||||
|
||||
Call :meth:`peek` to look ahead one item without advancing the iterator:
|
||||
|
||||
|
@ -2873,8 +2939,10 @@ class seekable:
|
|||
consume(self, remainder)
|
||||
|
||||
def relative_seek(self, count):
|
||||
index = len(self._cache)
|
||||
self.seek(max(index + count, 0))
|
||||
if self._index is None:
|
||||
self._index = len(self._cache)
|
||||
|
||||
self.seek(max(self._index + count, 0))
|
||||
|
||||
|
||||
class run_length:
|
||||
|
@ -2903,7 +2971,7 @@ class run_length:
|
|||
|
||||
@staticmethod
|
||||
def decode(iterable):
|
||||
return chain.from_iterable(repeat(k, n) for k, n in iterable)
|
||||
return chain.from_iterable(starmap(repeat, iterable))
|
||||
|
||||
|
||||
def exactly_n(iterable, n, predicate=bool):
|
||||
|
@ -2924,14 +2992,34 @@ def exactly_n(iterable, n, predicate=bool):
|
|||
return len(take(n + 1, filter(predicate, iterable))) == n
|
||||
|
||||
|
||||
def circular_shifts(iterable):
|
||||
"""Return a list of circular shifts of *iterable*.
|
||||
def circular_shifts(iterable, steps=1):
|
||||
"""Yield the circular shifts of *iterable*.
|
||||
|
||||
>>> circular_shifts(range(4))
|
||||
>>> list(circular_shifts(range(4)))
|
||||
[(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
|
||||
|
||||
Set *steps* to the number of places to rotate to the left
|
||||
(or to the right if negative). Defaults to 1.
|
||||
|
||||
>>> list(circular_shifts(range(4), 2))
|
||||
[(0, 1, 2, 3), (2, 3, 0, 1)]
|
||||
|
||||
>>> list(circular_shifts(range(4), -1))
|
||||
[(0, 1, 2, 3), (3, 0, 1, 2), (2, 3, 0, 1), (1, 2, 3, 0)]
|
||||
|
||||
"""
|
||||
lst = list(iterable)
|
||||
return take(len(lst), windowed(cycle(lst), len(lst)))
|
||||
buffer = deque(iterable)
|
||||
if steps == 0:
|
||||
raise ValueError('Steps should be a non-zero integer')
|
||||
|
||||
buffer.rotate(steps)
|
||||
steps = -steps
|
||||
n = len(buffer)
|
||||
n //= math.gcd(n, steps)
|
||||
|
||||
for __ in repeat(None, n):
|
||||
buffer.rotate(steps)
|
||||
yield tuple(buffer)
|
||||
|
||||
|
||||
def make_decorator(wrapping_func, result_index=0):
|
||||
|
@ -3191,7 +3279,7 @@ def partitions(iterable):
|
|||
yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))]
|
||||
|
||||
|
||||
def set_partitions(iterable, k=None):
|
||||
def set_partitions(iterable, k=None, min_size=None, max_size=None):
|
||||
"""
|
||||
Yield the set partitions of *iterable* into *k* parts. Set partitions are
|
||||
not order-preserving.
|
||||
|
@ -3215,6 +3303,20 @@ def set_partitions(iterable, k=None):
|
|||
['b', 'ac']
|
||||
['a', 'b', 'c']
|
||||
|
||||
if *min_size* and/or *max_size* are given, the minimum and/or maximum size
|
||||
per block in partition is set.
|
||||
|
||||
>>> iterable = 'abc'
|
||||
>>> for part in set_partitions(iterable, min_size=2):
|
||||
... print([''.join(p) for p in part])
|
||||
['abc']
|
||||
>>> for part in set_partitions(iterable, max_size=2):
|
||||
... print([''.join(p) for p in part])
|
||||
['a', 'bc']
|
||||
['ab', 'c']
|
||||
['b', 'ac']
|
||||
['a', 'b', 'c']
|
||||
|
||||
"""
|
||||
L = list(iterable)
|
||||
n = len(L)
|
||||
|
@ -3226,6 +3328,11 @@ def set_partitions(iterable, k=None):
|
|||
elif k > n:
|
||||
return
|
||||
|
||||
min_size = min_size if min_size is not None else 0
|
||||
max_size = max_size if max_size is not None else n
|
||||
if min_size > max_size:
|
||||
return
|
||||
|
||||
def set_partitions_helper(L, k):
|
||||
n = len(L)
|
||||
if k == 1:
|
||||
|
@ -3242,9 +3349,15 @@ def set_partitions(iterable, k=None):
|
|||
|
||||
if k is None:
|
||||
for k in range(1, n + 1):
|
||||
yield from set_partitions_helper(L, k)
|
||||
yield from filter(
|
||||
lambda z: all(min_size <= len(bk) <= max_size for bk in z),
|
||||
set_partitions_helper(L, k),
|
||||
)
|
||||
else:
|
||||
yield from set_partitions_helper(L, k)
|
||||
yield from filter(
|
||||
lambda z: all(min_size <= len(bk) <= max_size for bk in z),
|
||||
set_partitions_helper(L, k),
|
||||
)
|
||||
|
||||
|
||||
class time_limited:
|
||||
|
@ -3535,32 +3648,27 @@ def map_if(iterable, pred, func, func_else=lambda x: x):
|
|||
yield func(item) if pred(item) else func_else(item)
|
||||
|
||||
|
||||
def _sample_unweighted(iterable, k):
|
||||
# Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li:
|
||||
def _sample_unweighted(iterator, k, strict):
|
||||
# Algorithm L in the 1994 paper by Kim-Hung Li:
|
||||
# "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))".
|
||||
|
||||
# Fill up the reservoir (collection of samples) with the first `k` samples
|
||||
reservoir = take(k, iterable)
|
||||
reservoir = list(islice(iterator, k))
|
||||
if strict and len(reservoir) < k:
|
||||
raise ValueError('Sample larger than population')
|
||||
W = 1.0
|
||||
|
||||
# Generate random number that's the largest in a sample of k U(0,1) numbers
|
||||
# Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic
|
||||
W = exp(log(random()) / k)
|
||||
|
||||
# The number of elements to skip before changing the reservoir is a random
|
||||
# number with a geometric distribution. Sample it using random() and logs.
|
||||
next_index = k + floor(log(random()) / log(1 - W))
|
||||
|
||||
for index, element in enumerate(iterable, k):
|
||||
if index == next_index:
|
||||
reservoir[randrange(k)] = element
|
||||
# The new W is the largest in a sample of k U(0, `old_W`) numbers
|
||||
with suppress(StopIteration):
|
||||
while True:
|
||||
W *= exp(log(random()) / k)
|
||||
next_index += floor(log(random()) / log(1 - W)) + 1
|
||||
skip = floor(log(random()) / log1p(-W))
|
||||
element = next(islice(iterator, skip, None))
|
||||
reservoir[randrange(k)] = element
|
||||
|
||||
shuffle(reservoir)
|
||||
return reservoir
|
||||
|
||||
|
||||
def _sample_weighted(iterable, k, weights):
|
||||
def _sample_weighted(iterator, k, weights, strict):
|
||||
# Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. :
|
||||
# "Weighted random sampling with a reservoir".
|
||||
|
||||
|
@ -3569,7 +3677,10 @@ def _sample_weighted(iterable, k, weights):
|
|||
|
||||
# Fill up the reservoir (collection of samples) with the first `k`
|
||||
# weight-keys and elements, then heapify the list.
|
||||
reservoir = take(k, zip(weight_keys, iterable))
|
||||
reservoir = take(k, zip(weight_keys, iterator))
|
||||
if strict and len(reservoir) < k:
|
||||
raise ValueError('Sample larger than population')
|
||||
|
||||
heapify(reservoir)
|
||||
|
||||
# The number of jumps before changing the reservoir is a random variable
|
||||
|
@ -3577,7 +3688,7 @@ def _sample_weighted(iterable, k, weights):
|
|||
smallest_weight_key, _ = reservoir[0]
|
||||
weights_to_skip = log(random()) / smallest_weight_key
|
||||
|
||||
for weight, element in zip(weights, iterable):
|
||||
for weight, element in zip(weights, iterator):
|
||||
if weight >= weights_to_skip:
|
||||
# The notation here is consistent with the paper, but we store
|
||||
# the weight-keys in log-space for better numerical stability.
|
||||
|
@ -3591,44 +3702,103 @@ def _sample_weighted(iterable, k, weights):
|
|||
else:
|
||||
weights_to_skip -= weight
|
||||
|
||||
# Equivalent to [element for weight_key, element in sorted(reservoir)]
|
||||
return [heappop(reservoir)[1] for _ in range(k)]
|
||||
ret = [element for weight_key, element in reservoir]
|
||||
shuffle(ret)
|
||||
return ret
|
||||
|
||||
|
||||
def sample(iterable, k, weights=None):
|
||||
def _sample_counted(population, k, counts, strict):
|
||||
element = None
|
||||
remaining = 0
|
||||
|
||||
def feed(i):
|
||||
# Advance *i* steps ahead and consume an element
|
||||
nonlocal element, remaining
|
||||
|
||||
while i + 1 > remaining:
|
||||
i = i - remaining
|
||||
element = next(population)
|
||||
remaining = next(counts)
|
||||
remaining -= i + 1
|
||||
return element
|
||||
|
||||
with suppress(StopIteration):
|
||||
reservoir = []
|
||||
for _ in range(k):
|
||||
reservoir.append(feed(0))
|
||||
if strict and len(reservoir) < k:
|
||||
raise ValueError('Sample larger than population')
|
||||
|
||||
W = 1.0
|
||||
while True:
|
||||
W *= exp(log(random()) / k)
|
||||
skip = floor(log(random()) / log1p(-W))
|
||||
element = feed(skip)
|
||||
reservoir[randrange(k)] = element
|
||||
|
||||
shuffle(reservoir)
|
||||
return reservoir
|
||||
|
||||
|
||||
def sample(iterable, k, weights=None, *, counts=None, strict=False):
|
||||
"""Return a *k*-length list of elements chosen (without replacement)
|
||||
from the *iterable*. Like :func:`random.sample`, but works on iterables
|
||||
of unknown length.
|
||||
from the *iterable*. Similar to :func:`random.sample`, but works on
|
||||
iterables of unknown length.
|
||||
|
||||
>>> iterable = range(100)
|
||||
>>> sample(iterable, 5) # doctest: +SKIP
|
||||
[81, 60, 96, 16, 4]
|
||||
|
||||
An iterable with *weights* may also be given:
|
||||
For iterables with repeated elements, you may supply *counts* to
|
||||
indicate the repeats.
|
||||
|
||||
>>> iterable = ['a', 'b']
|
||||
>>> counts = [3, 4] # Equivalent to 'a', 'a', 'a', 'b', 'b', 'b', 'b'
|
||||
>>> sample(iterable, k=3, counts=counts) # doctest: +SKIP
|
||||
['a', 'a', 'b']
|
||||
|
||||
An iterable with *weights* may be given:
|
||||
|
||||
>>> iterable = range(100)
|
||||
>>> weights = (i * i + 1 for i in range(100))
|
||||
>>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP
|
||||
[79, 67, 74, 66, 78]
|
||||
|
||||
The algorithm can also be used to generate weighted random permutations.
|
||||
The relative weight of each item determines the probability that it
|
||||
appears late in the permutation.
|
||||
Weighted selections are made without replacement.
|
||||
After an element is selected, it is removed from the pool and the
|
||||
relative weights of the other elements increase (this
|
||||
does not match the behavior of :func:`random.sample`'s *counts*
|
||||
parameter). Note that *weights* may not be used with *counts*.
|
||||
|
||||
>>> data = "abcdefgh"
|
||||
>>> weights = range(1, len(data) + 1)
|
||||
>>> sample(data, k=len(data), weights=weights) # doctest: +SKIP
|
||||
['c', 'a', 'b', 'e', 'g', 'd', 'h', 'f']
|
||||
If the length of *iterable* is less than *k*,
|
||||
``ValueError`` is raised if *strict* is ``True`` and
|
||||
all elements are returned (in shuffled order) if *strict* is ``False``.
|
||||
|
||||
By default, the `Algorithm L <https://w.wiki/ANrM>`__ reservoir sampling
|
||||
technique is used. When *weights* are provided,
|
||||
`Algorithm A-ExpJ <https://w.wiki/ANrS>`__ is used.
|
||||
"""
|
||||
iterator = iter(iterable)
|
||||
|
||||
if k < 0:
|
||||
raise ValueError('k must be non-negative')
|
||||
|
||||
if k == 0:
|
||||
return []
|
||||
|
||||
iterable = iter(iterable)
|
||||
if weights is None:
|
||||
return _sample_unweighted(iterable, k)
|
||||
else:
|
||||
if weights is not None and counts is not None:
|
||||
raise TypeError('weights and counts are mutally exclusive')
|
||||
|
||||
elif weights is not None:
|
||||
weights = iter(weights)
|
||||
return _sample_weighted(iterable, k, weights)
|
||||
return _sample_weighted(iterator, k, weights, strict)
|
||||
|
||||
elif counts is not None:
|
||||
counts = iter(counts)
|
||||
return _sample_counted(iterator, k, counts, strict)
|
||||
|
||||
else:
|
||||
return _sample_unweighted(iterator, k, strict)
|
||||
|
||||
|
||||
def is_sorted(iterable, key=None, reverse=False, strict=False):
|
||||
|
@ -3650,12 +3820,16 @@ def is_sorted(iterable, key=None, reverse=False, strict=False):
|
|||
False
|
||||
|
||||
The function returns ``False`` after encountering the first out-of-order
|
||||
item. If there are no out-of-order items, the iterable is exhausted.
|
||||
item, which means it may produce results that differ from the built-in
|
||||
:func:`sorted` function for objects with unusual comparison dynamics.
|
||||
If there are no out-of-order items, the iterable is exhausted.
|
||||
"""
|
||||
compare = le if strict else lt
|
||||
it = iterable if (key is None) else map(key, iterable)
|
||||
it_1, it_2 = tee(it)
|
||||
next(it_2 if reverse else it_1, None)
|
||||
|
||||
compare = (le if reverse else ge) if strict else (lt if reverse else gt)
|
||||
it = iterable if key is None else map(key, iterable)
|
||||
return not any(starmap(compare, pairwise(it)))
|
||||
return not any(map(compare, it_1, it_2))
|
||||
|
||||
|
||||
class AbortThread(BaseException):
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
|
@ -28,6 +30,9 @@ from typing_extensions import Protocol
|
|||
_T = TypeVar('_T')
|
||||
_T1 = TypeVar('_T1')
|
||||
_T2 = TypeVar('_T2')
|
||||
_T3 = TypeVar('_T3')
|
||||
_T4 = TypeVar('_T4')
|
||||
_T5 = TypeVar('_T5')
|
||||
_U = TypeVar('_U')
|
||||
_V = TypeVar('_V')
|
||||
_W = TypeVar('_W')
|
||||
|
@ -35,6 +40,12 @@ _T_co = TypeVar('_T_co', covariant=True)
|
|||
_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]])
|
||||
_Raisable = BaseException | Type[BaseException]
|
||||
|
||||
# The type of isinstance's second argument (from typeshed builtins)
|
||||
if sys.version_info >= (3, 10):
|
||||
_ClassInfo = type | UnionType | tuple[_ClassInfo, ...]
|
||||
else:
|
||||
_ClassInfo = type | tuple[_ClassInfo, ...]
|
||||
|
||||
@type_check_only
|
||||
class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ...
|
||||
|
||||
|
@ -135,7 +146,7 @@ def interleave_evenly(
|
|||
) -> Iterator[_T]: ...
|
||||
def collapse(
|
||||
iterable: Iterable[Any],
|
||||
base_type: type | None = ...,
|
||||
base_type: _ClassInfo | None = ...,
|
||||
levels: int | None = ...,
|
||||
) -> Iterator[Any]: ...
|
||||
@overload
|
||||
|
@ -213,6 +224,7 @@ def stagger(
|
|||
class UnequalIterablesError(ValueError):
|
||||
def __init__(self, details: tuple[int, int, int] | None = ...) -> None: ...
|
||||
|
||||
# zip_equal
|
||||
@overload
|
||||
def zip_equal(__iter1: Iterable[_T1]) -> Iterator[tuple[_T1]]: ...
|
||||
@overload
|
||||
|
@ -221,11 +233,35 @@ def zip_equal(
|
|||
) -> Iterator[tuple[_T1, _T2]]: ...
|
||||
@overload
|
||||
def zip_equal(
|
||||
__iter1: Iterable[_T],
|
||||
__iter2: Iterable[_T],
|
||||
__iter3: Iterable[_T],
|
||||
*iterables: Iterable[_T],
|
||||
) -> Iterator[tuple[_T, ...]]: ...
|
||||
__iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]
|
||||
) -> Iterator[tuple[_T1, _T2, _T3]]: ...
|
||||
@overload
|
||||
def zip_equal(
|
||||
__iter1: Iterable[_T1],
|
||||
__iter2: Iterable[_T2],
|
||||
__iter3: Iterable[_T3],
|
||||
__iter4: Iterable[_T4],
|
||||
) -> Iterator[tuple[_T1, _T2, _T3, _T4]]: ...
|
||||
@overload
|
||||
def zip_equal(
|
||||
__iter1: Iterable[_T1],
|
||||
__iter2: Iterable[_T2],
|
||||
__iter3: Iterable[_T3],
|
||||
__iter4: Iterable[_T4],
|
||||
__iter5: Iterable[_T5],
|
||||
) -> Iterator[tuple[_T1, _T2, _T3, _T4, _T5]]: ...
|
||||
@overload
|
||||
def zip_equal(
|
||||
__iter1: Iterable[Any],
|
||||
__iter2: Iterable[Any],
|
||||
__iter3: Iterable[Any],
|
||||
__iter4: Iterable[Any],
|
||||
__iter5: Iterable[Any],
|
||||
__iter6: Iterable[Any],
|
||||
*iterables: Iterable[Any],
|
||||
) -> Iterator[tuple[Any, ...]]: ...
|
||||
|
||||
# zip_offset
|
||||
@overload
|
||||
def zip_offset(
|
||||
__iter1: Iterable[_T1],
|
||||
|
@ -285,12 +321,13 @@ def sort_together(
|
|||
key_list: Iterable[int] = ...,
|
||||
key: Callable[..., Any] | None = ...,
|
||||
reverse: bool = ...,
|
||||
strict: bool = ...,
|
||||
) -> list[tuple[_T, ...]]: ...
|
||||
def unzip(iterable: Iterable[Sequence[_T]]) -> tuple[Iterator[_T], ...]: ...
|
||||
def divide(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ...
|
||||
def always_iterable(
|
||||
obj: object,
|
||||
base_type: type | tuple[type | tuple[Any, ...], ...] | None = ...,
|
||||
base_type: _ClassInfo | None = ...,
|
||||
) -> Iterator[Any]: ...
|
||||
def adjacent(
|
||||
predicate: Callable[[_T], bool],
|
||||
|
@ -454,7 +491,9 @@ class run_length:
|
|||
def exactly_n(
|
||||
iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ...
|
||||
) -> bool: ...
|
||||
def circular_shifts(iterable: Iterable[_T]) -> list[tuple[_T, ...]]: ...
|
||||
def circular_shifts(
|
||||
iterable: Iterable[_T], steps: int = 1
|
||||
) -> list[tuple[_T, ...]]: ...
|
||||
def make_decorator(
|
||||
wrapping_func: Callable[..., _U], result_index: int = ...
|
||||
) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ...
|
||||
|
@ -500,7 +539,10 @@ def replace(
|
|||
) -> Iterator[_T | _U]: ...
|
||||
def partitions(iterable: Iterable[_T]) -> Iterator[list[list[_T]]]: ...
|
||||
def set_partitions(
|
||||
iterable: Iterable[_T], k: int | None = ...
|
||||
iterable: Iterable[_T],
|
||||
k: int | None = ...,
|
||||
min_size: int | None = ...,
|
||||
max_size: int | None = ...,
|
||||
) -> Iterator[list[list[_T]]]: ...
|
||||
|
||||
class time_limited(Generic[_T], Iterator[_T]):
|
||||
|
@ -538,10 +580,22 @@ def map_if(
|
|||
func: Callable[[Any], Any],
|
||||
func_else: Callable[[Any], Any] | None = ...,
|
||||
) -> Iterator[Any]: ...
|
||||
def _sample_unweighted(
|
||||
iterator: Iterator[_T], k: int, strict: bool
|
||||
) -> list[_T]: ...
|
||||
def _sample_counted(
|
||||
population: Iterator[_T], k: int, counts: Iterable[int], strict: bool
|
||||
) -> list[_T]: ...
|
||||
def _sample_weighted(
|
||||
iterator: Iterator[_T], k: int, weights, strict
|
||||
) -> list[_T]: ...
|
||||
def sample(
|
||||
iterable: Iterable[_T],
|
||||
k: int,
|
||||
weights: Iterable[float] | None = ...,
|
||||
*,
|
||||
counts: Iterable[int] | None = ...,
|
||||
strict: bool = False,
|
||||
) -> list[_T]: ...
|
||||
def is_sorted(
|
||||
iterable: Iterable[_T],
|
||||
|
@ -577,7 +631,7 @@ class callback_iter(Generic[_T], Iterator[_T]):
|
|||
|
||||
def windowed_complete(
|
||||
iterable: Iterable[_T], n: int
|
||||
) -> Iterator[tuple[_T, ...]]: ...
|
||||
) -> Iterator[tuple[tuple[_T, ...], tuple[_T, ...], tuple[_T, ...]]]: ...
|
||||
def all_unique(
|
||||
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
|
||||
) -> bool: ...
|
||||
|
@ -608,9 +662,61 @@ class countable(Generic[_T], Iterator[_T]):
|
|||
items_seen: int
|
||||
|
||||
def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ...
|
||||
@overload
|
||||
def zip_broadcast(
|
||||
__obj1: _T | Iterable[_T],
|
||||
*,
|
||||
scalar_types: _ClassInfo | None = ...,
|
||||
strict: bool = ...,
|
||||
) -> Iterable[tuple[_T, ...]]: ...
|
||||
@overload
|
||||
def zip_broadcast(
|
||||
__obj1: _T | Iterable[_T],
|
||||
__obj2: _T | Iterable[_T],
|
||||
*,
|
||||
scalar_types: _ClassInfo | None = ...,
|
||||
strict: bool = ...,
|
||||
) -> Iterable[tuple[_T, ...]]: ...
|
||||
@overload
|
||||
def zip_broadcast(
|
||||
__obj1: _T | Iterable[_T],
|
||||
__obj2: _T | Iterable[_T],
|
||||
__obj3: _T | Iterable[_T],
|
||||
*,
|
||||
scalar_types: _ClassInfo | None = ...,
|
||||
strict: bool = ...,
|
||||
) -> Iterable[tuple[_T, ...]]: ...
|
||||
@overload
|
||||
def zip_broadcast(
|
||||
__obj1: _T | Iterable[_T],
|
||||
__obj2: _T | Iterable[_T],
|
||||
__obj3: _T | Iterable[_T],
|
||||
__obj4: _T | Iterable[_T],
|
||||
*,
|
||||
scalar_types: _ClassInfo | None = ...,
|
||||
strict: bool = ...,
|
||||
) -> Iterable[tuple[_T, ...]]: ...
|
||||
@overload
|
||||
def zip_broadcast(
|
||||
__obj1: _T | Iterable[_T],
|
||||
__obj2: _T | Iterable[_T],
|
||||
__obj3: _T | Iterable[_T],
|
||||
__obj4: _T | Iterable[_T],
|
||||
__obj5: _T | Iterable[_T],
|
||||
*,
|
||||
scalar_types: _ClassInfo | None = ...,
|
||||
strict: bool = ...,
|
||||
) -> Iterable[tuple[_T, ...]]: ...
|
||||
@overload
|
||||
def zip_broadcast(
|
||||
__obj1: _T | Iterable[_T],
|
||||
__obj2: _T | Iterable[_T],
|
||||
__obj3: _T | Iterable[_T],
|
||||
__obj4: _T | Iterable[_T],
|
||||
__obj5: _T | Iterable[_T],
|
||||
__obj6: _T | Iterable[_T],
|
||||
*objects: _T | Iterable[_T],
|
||||
scalar_types: type | tuple[type | tuple[Any, ...], ...] | None = ...,
|
||||
scalar_types: _ClassInfo | None = ...,
|
||||
strict: bool = ...,
|
||||
) -> Iterable[tuple[_T, ...]]: ...
|
||||
def unique_in_window(
|
||||
|
|
|
@ -795,8 +795,30 @@ def triplewise(iterable):
|
|||
[('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')]
|
||||
|
||||
"""
|
||||
for (a, _), (b, c) in pairwise(pairwise(iterable)):
|
||||
yield a, b, c
|
||||
# This deviates from the itertools documentation reciple - see
|
||||
# https://github.com/more-itertools/more-itertools/issues/889
|
||||
t1, t2, t3 = tee(iterable, 3)
|
||||
next(t3, None)
|
||||
next(t3, None)
|
||||
next(t2, None)
|
||||
return zip(t1, t2, t3)
|
||||
|
||||
|
||||
def _sliding_window_islice(iterable, n):
|
||||
# Fast path for small, non-zero values of n.
|
||||
iterators = tee(iterable, n)
|
||||
for i, iterator in enumerate(iterators):
|
||||
next(islice(iterator, i, i), None)
|
||||
return zip(*iterators)
|
||||
|
||||
|
||||
def _sliding_window_deque(iterable, n):
|
||||
# Normal path for other values of n.
|
||||
it = iter(iterable)
|
||||
window = deque(islice(it, n - 1), maxlen=n)
|
||||
for x in it:
|
||||
window.append(x)
|
||||
yield tuple(window)
|
||||
|
||||
|
||||
def sliding_window(iterable, n):
|
||||
|
@ -812,11 +834,16 @@ def sliding_window(iterable, n):
|
|||
|
||||
For a variant with more features, see :func:`windowed`.
|
||||
"""
|
||||
it = iter(iterable)
|
||||
window = deque(islice(it, n - 1), maxlen=n)
|
||||
for x in it:
|
||||
window.append(x)
|
||||
yield tuple(window)
|
||||
if n > 20:
|
||||
return _sliding_window_deque(iterable, n)
|
||||
elif n > 2:
|
||||
return _sliding_window_islice(iterable, n)
|
||||
elif n == 2:
|
||||
return pairwise(iterable)
|
||||
elif n == 1:
|
||||
return zip(iterable)
|
||||
else:
|
||||
raise ValueError(f'n should be at least one, not {n}')
|
||||
|
||||
|
||||
def subslices(iterable):
|
||||
|
@ -1038,9 +1065,6 @@ def totient(n):
|
|||
>>> totient(12)
|
||||
4
|
||||
"""
|
||||
# The itertools docs use unique_justseen instead of set; see
|
||||
# https://github.com/more-itertools/more-itertools/issues/823
|
||||
for p in set(factor(n)):
|
||||
n = n // p * (p - 1)
|
||||
|
||||
for prime in set(factor(n)):
|
||||
n -= n // prime
|
||||
return n
|
||||
|
|
|
@ -10,6 +10,9 @@ from numbers import Number
|
|||
from typing import Union, Tuple, Iterable
|
||||
from typing import cast
|
||||
|
||||
import dateutil.parser
|
||||
import dateutil.tz
|
||||
|
||||
|
||||
# some useful constants
|
||||
osc_per_year = 290_091_329_207_984_000
|
||||
|
@ -611,3 +614,40 @@ def date_range(start=None, stop=None, step=None):
|
|||
while start < stop:
|
||||
yield start
|
||||
start += step
|
||||
|
||||
|
||||
tzinfos = dict(
|
||||
AEST=dateutil.tz.gettz("Australia/Sydney"),
|
||||
AEDT=dateutil.tz.gettz("Australia/Sydney"),
|
||||
ACST=dateutil.tz.gettz("Australia/Darwin"),
|
||||
ACDT=dateutil.tz.gettz("Australia/Adelaide"),
|
||||
AWST=dateutil.tz.gettz("Australia/Perth"),
|
||||
EST=dateutil.tz.gettz("America/New_York"),
|
||||
EDT=dateutil.tz.gettz("America/New_York"),
|
||||
CST=dateutil.tz.gettz("America/Chicago"),
|
||||
CDT=dateutil.tz.gettz("America/Chicago"),
|
||||
MST=dateutil.tz.gettz("America/Denver"),
|
||||
MDT=dateutil.tz.gettz("America/Denver"),
|
||||
PST=dateutil.tz.gettz("America/Los_Angeles"),
|
||||
PDT=dateutil.tz.gettz("America/Los_Angeles"),
|
||||
GMT=dateutil.tz.gettz("Etc/GMT"),
|
||||
UTC=dateutil.tz.gettz("UTC"),
|
||||
CET=dateutil.tz.gettz("Europe/Berlin"),
|
||||
CEST=dateutil.tz.gettz("Europe/Berlin"),
|
||||
IST=dateutil.tz.gettz("Asia/Kolkata"),
|
||||
BST=dateutil.tz.gettz("Europe/London"),
|
||||
MSK=dateutil.tz.gettz("Europe/Moscow"),
|
||||
EET=dateutil.tz.gettz("Europe/Helsinki"),
|
||||
EEST=dateutil.tz.gettz("Europe/Helsinki"),
|
||||
# Add more mappings as needed
|
||||
)
|
||||
|
||||
|
||||
def parse(*args, **kwargs):
|
||||
"""
|
||||
Parse the input using dateutil.parser.parse with friendly tz support.
|
||||
|
||||
>>> parse('2024-07-26 12:59:00 EDT')
|
||||
datetime.datetime(...America/New_York...)
|
||||
"""
|
||||
return dateutil.parser.parse(*args, tzinfos=tzinfos, **kwargs)
|
||||
|
|
|
@ -39,7 +39,7 @@ requests-oauthlib==2.0.0
|
|||
rumps==0.4.0; platform_system == "Darwin"
|
||||
simplejson==3.19.2
|
||||
six==1.16.0
|
||||
tempora==5.6.0
|
||||
tempora==5.7.0
|
||||
tokenize-rt==5.2.0
|
||||
tzdata==2024.1
|
||||
tzlocal==5.0.1
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue