Updated more-itertools to 5.0.0

This commit is contained in:
Labrys of Knossos 2022-11-29 01:26:33 -05:00
parent 1aff7eb85d
commit 684cca8c9b
24 changed files with 3339 additions and 3478 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -1,6 +1,2 @@
"""More routines for operating on iterables, beyond itertools"""
from .more import * # noqa
from .recipes import * # noqa
__version__ = '9.0.0'
from more_itertools.more import * # noqa
from more_itertools.recipes import * # noqa

View file

@ -1,2 +0,0 @@
from .more import *
from .recipes import *

File diff suppressed because it is too large Load diff

View file

@ -1,674 +0,0 @@
"""Stubs for more_itertools.more"""
from typing import (
Any,
Callable,
Container,
Dict,
Generic,
Hashable,
Iterable,
Iterator,
List,
Optional,
Reversible,
Sequence,
Sized,
Tuple,
Union,
TypeVar,
type_check_only,
)
from types import TracebackType
from typing_extensions import ContextManager, Protocol, Type, overload
# Type and type variable definitions
_T = TypeVar('_T')
_T1 = TypeVar('_T1')
_T2 = TypeVar('_T2')
_U = TypeVar('_U')
_V = TypeVar('_V')
_W = TypeVar('_W')
_T_co = TypeVar('_T_co', covariant=True)
_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[object]])
_Raisable = Union[BaseException, 'Type[BaseException]']
@type_check_only
class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ...
@type_check_only
class _SizedReversible(Protocol[_T_co], Sized, Reversible[_T_co]): ...
def chunked(
iterable: Iterable[_T], n: Optional[int], strict: bool = ...
) -> Iterator[List[_T]]: ...
@overload
def first(iterable: Iterable[_T]) -> _T: ...
@overload
def first(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ...
@overload
def last(iterable: Iterable[_T]) -> _T: ...
@overload
def last(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ...
@overload
def nth_or_last(iterable: Iterable[_T], n: int) -> _T: ...
@overload
def nth_or_last(
iterable: Iterable[_T], n: int, default: _U
) -> Union[_T, _U]: ...
class peekable(Generic[_T], Iterator[_T]):
def __init__(self, iterable: Iterable[_T]) -> None: ...
def __iter__(self) -> peekable[_T]: ...
def __bool__(self) -> bool: ...
@overload
def peek(self) -> _T: ...
@overload
def peek(self, default: _U) -> Union[_T, _U]: ...
def prepend(self, *items: _T) -> None: ...
def __next__(self) -> _T: ...
@overload
def __getitem__(self, index: int) -> _T: ...
@overload
def __getitem__(self, index: slice) -> List[_T]: ...
def consumer(func: _GenFn) -> _GenFn: ...
def ilen(iterable: Iterable[object]) -> int: ...
def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ...
def with_iter(
context_manager: ContextManager[Iterable[_T]],
) -> Iterator[_T]: ...
def one(
iterable: Iterable[_T],
too_short: Optional[_Raisable] = ...,
too_long: Optional[_Raisable] = ...,
) -> _T: ...
def raise_(exception: _Raisable, *args: Any) -> None: ...
def strictly_n(
iterable: Iterable[_T],
n: int,
too_short: Optional[_GenFn] = ...,
too_long: Optional[_GenFn] = ...,
) -> List[_T]: ...
def distinct_permutations(
iterable: Iterable[_T], r: Optional[int] = ...
) -> Iterator[Tuple[_T, ...]]: ...
def intersperse(
e: _U, iterable: Iterable[_T], n: int = ...
) -> Iterator[Union[_T, _U]]: ...
def unique_to_each(*iterables: Iterable[_T]) -> List[List[_T]]: ...
@overload
def windowed(
seq: Iterable[_T], n: int, *, step: int = ...
) -> Iterator[Tuple[Optional[_T], ...]]: ...
@overload
def windowed(
seq: Iterable[_T], n: int, fillvalue: _U, step: int = ...
) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
def substrings(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ...
def substrings_indexes(
seq: Sequence[_T], reverse: bool = ...
) -> Iterator[Tuple[Sequence[_T], int, int]]: ...
class bucket(Generic[_T, _U], Container[_U]):
def __init__(
self,
iterable: Iterable[_T],
key: Callable[[_T], _U],
validator: Optional[Callable[[object], object]] = ...,
) -> None: ...
def __contains__(self, value: object) -> bool: ...
def __iter__(self) -> Iterator[_U]: ...
def __getitem__(self, value: object) -> Iterator[_T]: ...
def spy(
iterable: Iterable[_T], n: int = ...
) -> Tuple[List[_T], Iterator[_T]]: ...
def interleave(*iterables: Iterable[_T]) -> Iterator[_T]: ...
def interleave_longest(*iterables: Iterable[_T]) -> Iterator[_T]: ...
def interleave_evenly(
iterables: List[Iterable[_T]], lengths: Optional[List[int]] = ...
) -> Iterator[_T]: ...
def collapse(
iterable: Iterable[Any],
base_type: Optional[type] = ...,
levels: Optional[int] = ...,
) -> Iterator[Any]: ...
@overload
def side_effect(
func: Callable[[_T], object],
iterable: Iterable[_T],
chunk_size: None = ...,
before: Optional[Callable[[], object]] = ...,
after: Optional[Callable[[], object]] = ...,
) -> Iterator[_T]: ...
@overload
def side_effect(
func: Callable[[List[_T]], object],
iterable: Iterable[_T],
chunk_size: int,
before: Optional[Callable[[], object]] = ...,
after: Optional[Callable[[], object]] = ...,
) -> Iterator[_T]: ...
def sliced(
seq: Sequence[_T], n: int, strict: bool = ...
) -> Iterator[Sequence[_T]]: ...
def split_at(
iterable: Iterable[_T],
pred: Callable[[_T], object],
maxsplit: int = ...,
keep_separator: bool = ...,
) -> Iterator[List[_T]]: ...
def split_before(
iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
) -> Iterator[List[_T]]: ...
def split_after(
iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
) -> Iterator[List[_T]]: ...
def split_when(
iterable: Iterable[_T],
pred: Callable[[_T, _T], object],
maxsplit: int = ...,
) -> Iterator[List[_T]]: ...
def split_into(
iterable: Iterable[_T], sizes: Iterable[Optional[int]]
) -> Iterator[List[_T]]: ...
@overload
def padded(
iterable: Iterable[_T],
*,
n: Optional[int] = ...,
next_multiple: bool = ...,
) -> Iterator[Optional[_T]]: ...
@overload
def padded(
iterable: Iterable[_T],
fillvalue: _U,
n: Optional[int] = ...,
next_multiple: bool = ...,
) -> Iterator[Union[_T, _U]]: ...
@overload
def repeat_last(iterable: Iterable[_T]) -> Iterator[_T]: ...
@overload
def repeat_last(
iterable: Iterable[_T], default: _U
) -> Iterator[Union[_T, _U]]: ...
def distribute(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ...
@overload
def stagger(
iterable: Iterable[_T],
offsets: _SizedIterable[int] = ...,
longest: bool = ...,
) -> Iterator[Tuple[Optional[_T], ...]]: ...
@overload
def stagger(
iterable: Iterable[_T],
offsets: _SizedIterable[int] = ...,
longest: bool = ...,
fillvalue: _U = ...,
) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
class UnequalIterablesError(ValueError):
def __init__(
self, details: Optional[Tuple[int, int, int]] = ...
) -> None: ...
@overload
def zip_equal(__iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ...
@overload
def zip_equal(
__iter1: Iterable[_T1], __iter2: Iterable[_T2]
) -> Iterator[Tuple[_T1, _T2]]: ...
@overload
def zip_equal(
__iter1: Iterable[_T],
__iter2: Iterable[_T],
__iter3: Iterable[_T],
*iterables: Iterable[_T],
) -> Iterator[Tuple[_T, ...]]: ...
@overload
def zip_offset(
__iter1: Iterable[_T1],
*,
offsets: _SizedIterable[int],
longest: bool = ...,
fillvalue: None = None,
) -> Iterator[Tuple[Optional[_T1]]]: ...
@overload
def zip_offset(
__iter1: Iterable[_T1],
__iter2: Iterable[_T2],
*,
offsets: _SizedIterable[int],
longest: bool = ...,
fillvalue: None = None,
) -> Iterator[Tuple[Optional[_T1], Optional[_T2]]]: ...
@overload
def zip_offset(
__iter1: Iterable[_T],
__iter2: Iterable[_T],
__iter3: Iterable[_T],
*iterables: Iterable[_T],
offsets: _SizedIterable[int],
longest: bool = ...,
fillvalue: None = None,
) -> Iterator[Tuple[Optional[_T], ...]]: ...
@overload
def zip_offset(
__iter1: Iterable[_T1],
*,
offsets: _SizedIterable[int],
longest: bool = ...,
fillvalue: _U,
) -> Iterator[Tuple[Union[_T1, _U]]]: ...
@overload
def zip_offset(
__iter1: Iterable[_T1],
__iter2: Iterable[_T2],
*,
offsets: _SizedIterable[int],
longest: bool = ...,
fillvalue: _U,
) -> Iterator[Tuple[Union[_T1, _U], Union[_T2, _U]]]: ...
@overload
def zip_offset(
__iter1: Iterable[_T],
__iter2: Iterable[_T],
__iter3: Iterable[_T],
*iterables: Iterable[_T],
offsets: _SizedIterable[int],
longest: bool = ...,
fillvalue: _U,
) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
def sort_together(
iterables: Iterable[Iterable[_T]],
key_list: Iterable[int] = ...,
key: Optional[Callable[..., Any]] = ...,
reverse: bool = ...,
) -> List[Tuple[_T, ...]]: ...
def unzip(iterable: Iterable[Sequence[_T]]) -> Tuple[Iterator[_T], ...]: ...
def divide(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ...
def always_iterable(
obj: object,
base_type: Union[
type, Tuple[Union[type, Tuple[Any, ...]], ...], None
] = ...,
) -> Iterator[Any]: ...
def adjacent(
predicate: Callable[[_T], bool],
iterable: Iterable[_T],
distance: int = ...,
) -> Iterator[Tuple[bool, _T]]: ...
@overload
def groupby_transform(
iterable: Iterable[_T],
keyfunc: None = None,
valuefunc: None = None,
reducefunc: None = None,
) -> Iterator[Tuple[_T, Iterator[_T]]]: ...
@overload
def groupby_transform(
iterable: Iterable[_T],
keyfunc: Callable[[_T], _U],
valuefunc: None,
reducefunc: None,
) -> Iterator[Tuple[_U, Iterator[_T]]]: ...
@overload
def groupby_transform(
iterable: Iterable[_T],
keyfunc: None,
valuefunc: Callable[[_T], _V],
reducefunc: None,
) -> Iterable[Tuple[_T, Iterable[_V]]]: ...
@overload
def groupby_transform(
iterable: Iterable[_T],
keyfunc: Callable[[_T], _U],
valuefunc: Callable[[_T], _V],
reducefunc: None,
) -> Iterable[Tuple[_U, Iterator[_V]]]: ...
@overload
def groupby_transform(
iterable: Iterable[_T],
keyfunc: None,
valuefunc: None,
reducefunc: Callable[[Iterator[_T]], _W],
) -> Iterable[Tuple[_T, _W]]: ...
@overload
def groupby_transform(
iterable: Iterable[_T],
keyfunc: Callable[[_T], _U],
valuefunc: None,
reducefunc: Callable[[Iterator[_T]], _W],
) -> Iterable[Tuple[_U, _W]]: ...
@overload
def groupby_transform(
iterable: Iterable[_T],
keyfunc: None,
valuefunc: Callable[[_T], _V],
reducefunc: Callable[[Iterable[_V]], _W],
) -> Iterable[Tuple[_T, _W]]: ...
@overload
def groupby_transform(
iterable: Iterable[_T],
keyfunc: Callable[[_T], _U],
valuefunc: Callable[[_T], _V],
reducefunc: Callable[[Iterable[_V]], _W],
) -> Iterable[Tuple[_U, _W]]: ...
class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]):
@overload
def __init__(self, __stop: _T) -> None: ...
@overload
def __init__(self, __start: _T, __stop: _T) -> None: ...
@overload
def __init__(self, __start: _T, __stop: _T, __step: _U) -> None: ...
def __bool__(self) -> bool: ...
def __contains__(self, elem: object) -> bool: ...
def __eq__(self, other: object) -> bool: ...
@overload
def __getitem__(self, key: int) -> _T: ...
@overload
def __getitem__(self, key: slice) -> numeric_range[_T, _U]: ...
def __hash__(self) -> int: ...
def __iter__(self) -> Iterator[_T]: ...
def __len__(self) -> int: ...
def __reduce__(
self,
) -> Tuple[Type[numeric_range[_T, _U]], Tuple[_T, _T, _U]]: ...
def __repr__(self) -> str: ...
def __reversed__(self) -> Iterator[_T]: ...
def count(self, value: _T) -> int: ...
def index(self, value: _T) -> int: ... # type: ignore
def count_cycle(
iterable: Iterable[_T], n: Optional[int] = ...
) -> Iterable[Tuple[int, _T]]: ...
def mark_ends(
iterable: Iterable[_T],
) -> Iterable[Tuple[bool, bool, _T]]: ...
def locate(
iterable: Iterable[object],
pred: Callable[..., Any] = ...,
window_size: Optional[int] = ...,
) -> Iterator[int]: ...
def lstrip(
iterable: Iterable[_T], pred: Callable[[_T], object]
) -> Iterator[_T]: ...
def rstrip(
iterable: Iterable[_T], pred: Callable[[_T], object]
) -> Iterator[_T]: ...
def strip(
iterable: Iterable[_T], pred: Callable[[_T], object]
) -> Iterator[_T]: ...
class islice_extended(Generic[_T], Iterator[_T]):
def __init__(
self, iterable: Iterable[_T], *args: Optional[int]
) -> None: ...
def __iter__(self) -> islice_extended[_T]: ...
def __next__(self) -> _T: ...
def __getitem__(self, index: slice) -> islice_extended[_T]: ...
def always_reversible(iterable: Iterable[_T]) -> Iterator[_T]: ...
def consecutive_groups(
iterable: Iterable[_T], ordering: Callable[[_T], int] = ...
) -> Iterator[Iterator[_T]]: ...
@overload
def difference(
iterable: Iterable[_T],
func: Callable[[_T, _T], _U] = ...,
*,
initial: None = ...,
) -> Iterator[Union[_T, _U]]: ...
@overload
def difference(
iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, initial: _U
) -> Iterator[_U]: ...
class SequenceView(Generic[_T], Sequence[_T]):
def __init__(self, target: Sequence[_T]) -> None: ...
@overload
def __getitem__(self, index: int) -> _T: ...
@overload
def __getitem__(self, index: slice) -> Sequence[_T]: ...
def __len__(self) -> int: ...
class seekable(Generic[_T], Iterator[_T]):
def __init__(
self, iterable: Iterable[_T], maxlen: Optional[int] = ...
) -> None: ...
def __iter__(self) -> seekable[_T]: ...
def __next__(self) -> _T: ...
def __bool__(self) -> bool: ...
@overload
def peek(self) -> _T: ...
@overload
def peek(self, default: _U) -> Union[_T, _U]: ...
def elements(self) -> SequenceView[_T]: ...
def seek(self, index: int) -> None: ...
class run_length:
@staticmethod
def encode(iterable: Iterable[_T]) -> Iterator[Tuple[_T, int]]: ...
@staticmethod
def decode(iterable: Iterable[Tuple[_T, int]]) -> Iterator[_T]: ...
def exactly_n(
iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ...
) -> bool: ...
def circular_shifts(iterable: Iterable[_T]) -> List[Tuple[_T, ...]]: ...
def make_decorator(
wrapping_func: Callable[..., _U], result_index: int = ...
) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ...
@overload
def map_reduce(
iterable: Iterable[_T],
keyfunc: Callable[[_T], _U],
valuefunc: None = ...,
reducefunc: None = ...,
) -> Dict[_U, List[_T]]: ...
@overload
def map_reduce(
iterable: Iterable[_T],
keyfunc: Callable[[_T], _U],
valuefunc: Callable[[_T], _V],
reducefunc: None = ...,
) -> Dict[_U, List[_V]]: ...
@overload
def map_reduce(
iterable: Iterable[_T],
keyfunc: Callable[[_T], _U],
valuefunc: None = ...,
reducefunc: Callable[[List[_T]], _W] = ...,
) -> Dict[_U, _W]: ...
@overload
def map_reduce(
iterable: Iterable[_T],
keyfunc: Callable[[_T], _U],
valuefunc: Callable[[_T], _V],
reducefunc: Callable[[List[_V]], _W],
) -> Dict[_U, _W]: ...
def rlocate(
iterable: Iterable[_T],
pred: Callable[..., object] = ...,
window_size: Optional[int] = ...,
) -> Iterator[int]: ...
def replace(
iterable: Iterable[_T],
pred: Callable[..., object],
substitutes: Iterable[_U],
count: Optional[int] = ...,
window_size: int = ...,
) -> Iterator[Union[_T, _U]]: ...
def partitions(iterable: Iterable[_T]) -> Iterator[List[List[_T]]]: ...
def set_partitions(
iterable: Iterable[_T], k: Optional[int] = ...
) -> Iterator[List[List[_T]]]: ...
class time_limited(Generic[_T], Iterator[_T]):
def __init__(
self, limit_seconds: float, iterable: Iterable[_T]
) -> None: ...
def __iter__(self) -> islice_extended[_T]: ...
def __next__(self) -> _T: ...
@overload
def only(
iterable: Iterable[_T], *, too_long: Optional[_Raisable] = ...
) -> Optional[_T]: ...
@overload
def only(
iterable: Iterable[_T], default: _U, too_long: Optional[_Raisable] = ...
) -> Union[_T, _U]: ...
def ichunked(iterable: Iterable[_T], n: int) -> Iterator[Iterator[_T]]: ...
def distinct_combinations(
iterable: Iterable[_T], r: int
) -> Iterator[Tuple[_T, ...]]: ...
def filter_except(
validator: Callable[[Any], object],
iterable: Iterable[_T],
*exceptions: Type[BaseException],
) -> Iterator[_T]: ...
def map_except(
function: Callable[[Any], _U],
iterable: Iterable[_T],
*exceptions: Type[BaseException],
) -> Iterator[_U]: ...
def map_if(
iterable: Iterable[Any],
pred: Callable[[Any], bool],
func: Callable[[Any], Any],
func_else: Optional[Callable[[Any], Any]] = ...,
) -> Iterator[Any]: ...
def sample(
iterable: Iterable[_T],
k: int,
weights: Optional[Iterable[float]] = ...,
) -> List[_T]: ...
def is_sorted(
iterable: Iterable[_T],
key: Optional[Callable[[_T], _U]] = ...,
reverse: bool = False,
strict: bool = False,
) -> bool: ...
class AbortThread(BaseException):
pass
class callback_iter(Generic[_T], Iterator[_T]):
def __init__(
self,
func: Callable[..., Any],
callback_kwd: str = ...,
wait_seconds: float = ...,
) -> None: ...
def __enter__(self) -> callback_iter[_T]: ...
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> Optional[bool]: ...
def __iter__(self) -> callback_iter[_T]: ...
def __next__(self) -> _T: ...
def _reader(self) -> Iterator[_T]: ...
@property
def done(self) -> bool: ...
@property
def result(self) -> Any: ...
def windowed_complete(
iterable: Iterable[_T], n: int
) -> Iterator[Tuple[_T, ...]]: ...
def all_unique(
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
) -> bool: ...
def nth_product(index: int, *args: Iterable[_T]) -> Tuple[_T, ...]: ...
def nth_permutation(
iterable: Iterable[_T], r: int, index: int
) -> Tuple[_T, ...]: ...
def value_chain(*args: Union[_T, Iterable[_T]]) -> Iterable[_T]: ...
def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ...
def combination_index(
element: Iterable[_T], iterable: Iterable[_T]
) -> int: ...
def permutation_index(
element: Iterable[_T], iterable: Iterable[_T]
) -> int: ...
def repeat_each(iterable: Iterable[_T], n: int = ...) -> Iterator[_T]: ...
class countable(Generic[_T], Iterator[_T]):
def __init__(self, iterable: Iterable[_T]) -> None: ...
def __iter__(self) -> countable[_T]: ...
def __next__(self) -> _T: ...
def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[List[_T]]: ...
def zip_broadcast(
*objects: Union[_T, Iterable[_T]],
scalar_types: Union[
type, Tuple[Union[type, Tuple[Any, ...]], ...], None
] = ...,
strict: bool = ...,
) -> Iterable[Tuple[_T, ...]]: ...
def unique_in_window(
iterable: Iterable[_T], n: int, key: Optional[Callable[[_T], _U]] = ...
) -> Iterator[_T]: ...
def duplicates_everseen(
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
) -> Iterator[_T]: ...
def duplicates_justseen(
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
) -> Iterator[_T]: ...
class _SupportsLessThan(Protocol):
def __lt__(self, __other: Any) -> bool: ...
_SupportsLessThanT = TypeVar("_SupportsLessThanT", bound=_SupportsLessThan)
@overload
def minmax(
iterable_or_value: Iterable[_SupportsLessThanT], *, key: None = None
) -> Tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
@overload
def minmax(
iterable_or_value: Iterable[_T], *, key: Callable[[_T], _SupportsLessThan]
) -> Tuple[_T, _T]: ...
@overload
def minmax(
iterable_or_value: Iterable[_SupportsLessThanT],
*,
key: None = None,
default: _U,
) -> Union[_U, Tuple[_SupportsLessThanT, _SupportsLessThanT]]: ...
@overload
def minmax(
iterable_or_value: Iterable[_T],
*,
key: Callable[[_T], _SupportsLessThan],
default: _U,
) -> Union[_U, Tuple[_T, _T]]: ...
@overload
def minmax(
iterable_or_value: _SupportsLessThanT,
__other: _SupportsLessThanT,
*others: _SupportsLessThanT,
) -> Tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
@overload
def minmax(
iterable_or_value: _T,
__other: _T,
*others: _T,
key: Callable[[_T], _SupportsLessThan],
) -> Tuple[_T, _T]: ...
def longest_common_prefix(
iterables: Iterable[Iterable[_T]],
) -> Iterator[_T]: ...
def iequals(*iterables: Iterable[object]) -> bool: ...
def constrained_batches(
iterable: Iterable[object],
max_size: int,
max_count: Optional[int] = ...,
get_len: Callable[[_T], object] = ...,
strict: bool = ...,
) -> Iterator[Tuple[_T]]: ...

View file

@ -7,33 +7,20 @@ Some backward-compatible usability improvements have been made.
.. [1] http://docs.python.org/library/itertools.html#recipes
"""
import math
import operator
from collections import deque
from collections.abc import Sized
from functools import reduce
from itertools import (
chain,
combinations,
compress,
count,
cycle,
groupby,
islice,
repeat,
starmap,
tee,
zip_longest,
chain, combinations, count, cycle, groupby, islice, repeat, starmap, tee
)
import operator
from random import randrange, sample, choice
from six import PY2
from six.moves import filter, filterfalse, map, range, zip, zip_longest
__all__ = [
'accumulate',
'all_equal',
'batched',
'before_and_after',
'consume',
'convolve',
'dotproduct',
'first_true',
'flatten',
@ -43,10 +30,8 @@ __all__ = [
'nth',
'nth_combination',
'padnone',
'pad_none',
'pairwise',
'partition',
'polynomial_from_roots',
'powerset',
'prepend',
'quantify',
@ -56,18 +41,42 @@ __all__ = [
'random_product',
'repeatfunc',
'roundrobin',
'sieve',
'sliding_window',
'subslices',
'tabulate',
'tail',
'take',
'triplewise',
'unique_everseen',
'unique_justseen',
]
_marker = object()
def accumulate(iterable, func=operator.add):
"""
Return an iterator whose items are the accumulated results of a function
(specified by the optional *func* argument) that takes two arguments.
By default, returns accumulated sums with :func:`operator.add`.
>>> list(accumulate([1, 2, 3, 4, 5])) # Running sum
[1, 3, 6, 10, 15]
>>> list(accumulate([1, 2, 3], func=operator.mul)) # Running product
[1, 2, 6]
>>> list(accumulate([0, 1, -1, 2, 3, 2], func=max)) # Running maximum
[0, 1, 1, 2, 3, 3]
This function is available in the ``itertools`` module for Python 3.2 and
greater.
"""
it = iter(iterable)
try:
total = next(it)
except StopIteration:
return
else:
yield total
for element in it:
total = func(total, element)
yield total
def take(n, iterable):
@ -75,13 +84,12 @@ def take(n, iterable):
>>> take(3, range(10))
[0, 1, 2]
If there are fewer than *n* items in the iterable, all of them are
returned.
>>> take(10, range(3))
>>> take(5, range(3))
[0, 1, 2]
Effectively a short replacement for ``next`` based iterator consumption
when you want more than one item, but less than the whole iterator.
"""
return list(islice(iterable, n))
@ -107,19 +115,12 @@ def tabulate(function, start=0):
def tail(n, iterable):
"""Return an iterator over the last *n* items of *iterable*.
>>> t = tail(3, 'ABCDEFG')
>>> list(t)
['E', 'F', 'G']
>>> t = tail(3, 'ABCDEFG')
>>> list(t)
['E', 'F', 'G']
"""
# If the given iterable has a length, then we can use islice to get its
# final elements. Note that if the iterable is not actually Iterable,
# either islice or deque will throw a TypeError. This is why we don't
# check if it is Iterable.
if isinstance(iterable, Sized):
yield from islice(iterable, max(0, len(iterable) - n), None)
else:
yield from iter(deque(iterable, maxlen=n))
return iter(deque(iterable, maxlen=n))
def consume(iterator, n=None):
@ -165,11 +166,11 @@ def consume(iterator, n=None):
def nth(iterable, n, default=None):
"""Returns the nth item or a default value.
>>> l = range(10)
>>> nth(l, 3)
3
>>> nth(l, 20, "zebra")
'zebra'
>>> l = range(10)
>>> nth(l, 3)
3
>>> nth(l, 20, "zebra")
'zebra'
"""
return next(islice(iterable, n, None), default)
@ -192,17 +193,17 @@ def all_equal(iterable):
def quantify(iterable, pred=bool):
"""Return the how many times the predicate is true.
>>> quantify([True, False, True])
2
>>> quantify([True, False, True])
2
"""
return sum(map(pred, iterable))
def pad_none(iterable):
def padnone(iterable):
"""Returns the sequence of elements and then returns ``None`` indefinitely.
>>> take(5, pad_none(range(3)))
>>> take(5, padnone(range(3)))
[0, 1, 2, None, None]
Useful for emulating the behavior of the built-in :func:`map` function.
@ -213,14 +214,11 @@ def pad_none(iterable):
return chain(iterable, repeat(None))
padnone = pad_none
def ncycles(iterable, n):
"""Returns the sequence elements *n* times
>>> list(ncycles(["a", "b"], 3))
['a', 'b', 'a', 'b', 'a', 'b']
>>> list(ncycles(["a", "b"], 3))
['a', 'b', 'a', 'b', 'a', 'b']
"""
return chain.from_iterable(repeat(tuple(iterable), n))
@ -229,8 +227,8 @@ def ncycles(iterable, n):
def dotproduct(vec1, vec2):
"""Returns the dot product of the two iterables.
>>> dotproduct([10, 10], [20, 20])
400
>>> dotproduct([10, 10], [20, 20])
400
"""
return sum(map(operator.mul, vec1, vec2))
@ -275,109 +273,27 @@ def repeatfunc(func, times=None, *args):
return starmap(func, repeat(args, times))
def _pairwise(iterable):
def pairwise(iterable):
"""Returns an iterator of paired items, overlapping, from the original
>>> take(4, pairwise(count()))
[(0, 1), (1, 2), (2, 3), (3, 4)]
On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`.
>>> take(4, pairwise(count()))
[(0, 1), (1, 2), (2, 3), (3, 4)]
"""
a, b = tee(iterable)
next(b, None)
yield from zip(a, b)
return zip(a, b)
try:
from itertools import pairwise as itertools_pairwise
except ImportError:
pairwise = _pairwise
else:
def grouper(n, iterable, fillvalue=None):
"""Collect data into fixed-length chunks or blocks.
def pairwise(iterable):
yield from itertools_pairwise(iterable)
pairwise.__doc__ = _pairwise.__doc__
class UnequalIterablesError(ValueError):
def __init__(self, details=None):
msg = 'Iterables have different lengths'
if details is not None:
msg += (': index 0 has length {}; index {} has length {}').format(
*details
)
super().__init__(msg)
def _zip_equal_generator(iterables):
for combo in zip_longest(*iterables, fillvalue=_marker):
for val in combo:
if val is _marker:
raise UnequalIterablesError()
yield combo
def _zip_equal(*iterables):
# Check whether the iterables are all the same size.
try:
first_size = len(iterables[0])
for i, it in enumerate(iterables[1:], 1):
size = len(it)
if size != first_size:
break
else:
# If we didn't break out, we can use the built-in zip.
return zip(*iterables)
# If we did break out, there was a mismatch.
raise UnequalIterablesError(details=(first_size, i, size))
# If any one of the iterables didn't have a length, start reading
# them until one runs out.
except TypeError:
return _zip_equal_generator(iterables)
def grouper(iterable, n, incomplete='fill', fillvalue=None):
"""Group elements from *iterable* into fixed-length groups of length *n*.
>>> list(grouper('ABCDEF', 3))
[('A', 'B', 'C'), ('D', 'E', 'F')]
The keyword arguments *incomplete* and *fillvalue* control what happens for
iterables whose length is not a multiple of *n*.
When *incomplete* is `'fill'`, the last group will contain instances of
*fillvalue*.
>>> list(grouper('ABCDEFG', 3, incomplete='fill', fillvalue='x'))
[('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
When *incomplete* is `'ignore'`, the last group will not be emitted.
>>> list(grouper('ABCDEFG', 3, incomplete='ignore', fillvalue='x'))
[('A', 'B', 'C'), ('D', 'E', 'F')]
When *incomplete* is `'strict'`, a subclass of `ValueError` will be raised.
>>> it = grouper('ABCDEFG', 3, incomplete='strict')
>>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
UnequalIterablesError
>>> list(grouper(3, 'ABCDEFG', 'x'))
[('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
"""
args = [iter(iterable)] * n
if incomplete == 'fill':
return zip_longest(*args, fillvalue=fillvalue)
if incomplete == 'strict':
return _zip_equal(*args)
if incomplete == 'ignore':
return zip(*args)
else:
raise ValueError('Expected fill, strict, or ignore')
return zip_longest(fillvalue=fillvalue, *args)
def roundrobin(*iterables):
@ -393,7 +309,10 @@ def roundrobin(*iterables):
"""
# Recipe credited to George Sakkis
pending = len(iterables)
nexts = cycle(iter(it).__next__ for it in iterables)
if PY2:
nexts = cycle(iter(it).next for it in iterables)
else:
nexts = cycle(iter(it).__next__ for it in iterables)
while pending:
try:
for next in nexts:
@ -415,23 +334,10 @@ def partition(pred, iterable):
>>> list(even_items), list(odd_items)
([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
If *pred* is None, :func:`bool` is used.
>>> iterable = [0, 1, False, True, '', ' ']
>>> false_items, true_items = partition(None, iterable)
>>> list(false_items), list(true_items)
([0, False, ''], [1, True, ' '])
"""
if pred is None:
pred = bool
evaluations = ((pred(x), x) for x in iterable)
t1, t2 = tee(evaluations)
return (
(x for (cond, x) in t1 if not cond),
(x for (cond, x) in t2 if cond),
)
# partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9
t1, t2 = tee(iterable)
return filterfalse(pred, t1), filter(pred, t2)
def powerset(iterable):
@ -469,46 +375,41 @@ def unique_everseen(iterable, key=None):
Sequences with a mix of hashable and unhashable items can be used.
The function will be slower (i.e., `O(n^2)`) for unhashable items.
Remember that ``list`` objects are unhashable - you can use the *key*
parameter to transform the list to a tuple (which is hashable) to
avoid a slowdown.
>>> iterable = ([1, 2], [2, 3], [1, 2])
>>> list(unique_everseen(iterable)) # Slow
[[1, 2], [2, 3]]
>>> list(unique_everseen(iterable, key=tuple)) # Faster
[[1, 2], [2, 3]]
Similary, you may want to convert unhashable ``set`` objects with
``key=frozenset``. For ``dict`` objects,
``key=lambda x: frozenset(x.items())`` can be used.
"""
seenset = set()
seenset_add = seenset.add
seenlist = []
seenlist_add = seenlist.append
use_key = key is not None
for element in iterable:
k = key(element) if use_key else element
try:
if k not in seenset:
seenset_add(k)
yield element
except TypeError:
if k not in seenlist:
seenlist_add(k)
yield element
if key is None:
for element in iterable:
try:
if element not in seenset:
seenset_add(element)
yield element
except TypeError:
if element not in seenlist:
seenlist_add(element)
yield element
else:
for element in iterable:
k = key(element)
try:
if k not in seenset:
seenset_add(k)
yield element
except TypeError:
if k not in seenlist:
seenlist_add(k)
yield element
def unique_justseen(iterable, key=None):
"""Yields elements in order, ignoring serial duplicates
>>> list(unique_justseen('AAAABBBCCDAABBB'))
['A', 'B', 'C', 'D', 'A', 'B']
>>> list(unique_justseen('ABBCcAD', str.lower))
['A', 'B', 'C', 'A', 'D']
>>> list(unique_justseen('AAAABBBCCDAABBB'))
['A', 'B', 'C', 'D', 'A', 'B']
>>> list(unique_justseen('ABBCcAD', str.lower))
['A', 'B', 'C', 'A', 'D']
"""
return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
@ -525,16 +426,6 @@ def iter_except(func, exception, first=None):
>>> list(iter_except(l.pop, IndexError))
[2, 1, 0]
Multiple exceptions can be specified as a stopping condition:
>>> l = [1, 2, 3, '...', 4, 5, 6]
>>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
[7, 6, 5]
>>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
[4, 3, 2]
>>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
[]
"""
try:
if first is not None:
@ -565,7 +456,7 @@ def first_true(iterable, default=None, pred=None):
return next(filter(pred, iterable), default)
def random_product(*args, repeat=1):
def random_product(*args, **kwds):
"""Draw an item at random from each of the input iterables.
>>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
@ -581,7 +472,7 @@ def random_product(*args, repeat=1):
``itertools.product(*args, **kwarg)``.
"""
pools = [tuple(pool) for pool in args] * repeat
pools = [tuple(pool) for pool in args] * kwds.get('repeat', 1)
return tuple(choice(pool) for pool in pools)
@ -644,12 +535,6 @@ def nth_combination(iterable, r, index):
sort position *index* directly, without computing the previous
subsequences.
>>> nth_combination(range(5), 3, 5)
(0, 3, 4)
``ValueError`` will be raised If *r* is negative or greater than the length
of *iterable*.
``IndexError`` will be raised if the given *index* is invalid.
"""
pool = tuple(iterable)
n = len(pool)
@ -686,156 +571,7 @@ def prepend(value, iterator):
>>> list(prepend(value, iterator))
['0', '1', '2', '3']
To prepend multiple values, see :func:`itertools.chain`
or :func:`value_chain`.
To prepend multiple values, see :func:`itertools.chain`.
"""
return chain([value], iterator)
def convolve(signal, kernel):
"""Convolve the iterable *signal* with the iterable *kernel*.
>>> signal = (1, 2, 3, 4, 5)
>>> kernel = [3, 2, 1]
>>> list(convolve(signal, kernel))
[3, 8, 14, 20, 26, 14, 5]
Note: the input arguments are not interchangeable, as the *kernel*
is immediately consumed and stored.
"""
kernel = tuple(kernel)[::-1]
n = len(kernel)
window = deque([0], maxlen=n) * n
for x in chain(signal, repeat(0, n - 1)):
window.append(x)
yield sum(map(operator.mul, kernel, window))
def before_and_after(predicate, it):
"""A variant of :func:`takewhile` that allows complete access to the
remainder of the iterator.
>>> it = iter('ABCdEfGhI')
>>> all_upper, remainder = before_and_after(str.isupper, it)
>>> ''.join(all_upper)
'ABC'
>>> ''.join(remainder) # takewhile() would lose the 'd'
'dEfGhI'
Note that the first iterator must be fully consumed before the second
iterator can generate valid results.
"""
it = iter(it)
transition = []
def true_iterator():
for elem in it:
if predicate(elem):
yield elem
else:
transition.append(elem)
return
# Note: this is different from itertools recipes to allow nesting
# before_and_after remainders into before_and_after again. See tests
# for an example.
remainder_iterator = chain(transition, it)
return true_iterator(), remainder_iterator
def triplewise(iterable):
"""Return overlapping triplets from *iterable*.
>>> list(triplewise('ABCDE'))
[('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')]
"""
for (a, _), (b, c) in pairwise(pairwise(iterable)):
yield a, b, c
def sliding_window(iterable, n):
"""Return a sliding window of width *n* over *iterable*.
>>> list(sliding_window(range(6), 4))
[(0, 1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5)]
If *iterable* has fewer than *n* items, then nothing is yielded:
>>> list(sliding_window(range(3), 4))
[]
For a variant with more features, see :func:`windowed`.
"""
it = iter(iterable)
window = deque(islice(it, n), maxlen=n)
if len(window) == n:
yield tuple(window)
for x in it:
window.append(x)
yield tuple(window)
def subslices(iterable):
"""Return all contiguous non-empty subslices of *iterable*.
>>> list(subslices('ABC'))
[['A'], ['A', 'B'], ['A', 'B', 'C'], ['B'], ['B', 'C'], ['C']]
This is similar to :func:`substrings`, but emits items in a different
order.
"""
seq = list(iterable)
slices = starmap(slice, combinations(range(len(seq) + 1), 2))
return map(operator.getitem, repeat(seq), slices)
def polynomial_from_roots(roots):
"""Compute a polynomial's coefficients from its roots.
>>> roots = [5, -4, 3] # (x - 5) * (x + 4) * (x - 3)
>>> polynomial_from_roots(roots) # x^3 - 4 * x^2 - 17 * x + 60
[1, -4, -17, 60]
"""
# Use math.prod for Python 3.8+,
prod = getattr(math, 'prod', lambda x: reduce(operator.mul, x, 1))
roots = list(map(operator.neg, roots))
return [
sum(map(prod, combinations(roots, k))) for k in range(len(roots) + 1)
]
def sieve(n):
"""Yield the primes less than n.
>>> list(sieve(30))
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
"""
isqrt = getattr(math, 'isqrt', lambda x: int(math.sqrt(x)))
limit = isqrt(n) + 1
data = bytearray([1]) * n
data[:2] = 0, 0
for p in compress(range(limit), data):
data[p + p : n : p] = bytearray(len(range(p + p, n, p)))
return compress(count(), data)
def batched(iterable, n):
"""Batch data into lists of length *n*. The last batch may be shorter.
>>> list(batched('ABCDEFG', 3))
[['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]
This recipe is from the ``itertools`` docs. This library also provides
:func:`chunked`, which has a different implementation.
"""
it = iter(iterable)
while True:
batch = list(islice(it, n))
if not batch:
break
yield batch

View file

@ -1,110 +0,0 @@
"""Stubs for more_itertools.recipes"""
from typing import (
Any,
Callable,
Iterable,
Iterator,
List,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
)
from typing_extensions import overload, Type
# Type and type variable definitions
_T = TypeVar('_T')
_U = TypeVar('_U')
def take(n: int, iterable: Iterable[_T]) -> List[_T]: ...
def tabulate(
function: Callable[[int], _T], start: int = ...
) -> Iterator[_T]: ...
def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ...
def consume(iterator: Iterable[object], n: Optional[int] = ...) -> None: ...
@overload
def nth(iterable: Iterable[_T], n: int) -> Optional[_T]: ...
@overload
def nth(iterable: Iterable[_T], n: int, default: _U) -> Union[_T, _U]: ...
def all_equal(iterable: Iterable[object]) -> bool: ...
def quantify(
iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
) -> int: ...
def pad_none(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ...
def padnone(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ...
def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ...
def dotproduct(vec1: Iterable[object], vec2: Iterable[object]) -> object: ...
def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ...
def repeatfunc(
func: Callable[..., _U], times: Optional[int] = ..., *args: Any
) -> Iterator[_U]: ...
def pairwise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T]]: ...
def grouper(
iterable: Iterable[_T],
n: int,
incomplete: str = ...,
fillvalue: _U = ...,
) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ...
def partition(
pred: Optional[Callable[[_T], object]], iterable: Iterable[_T]
) -> Tuple[Iterator[_T], Iterator[_T]]: ...
def powerset(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ...
def unique_everseen(
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
) -> Iterator[_T]: ...
def unique_justseen(
iterable: Iterable[_T], key: Optional[Callable[[_T], object]] = ...
) -> Iterator[_T]: ...
@overload
def iter_except(
func: Callable[[], _T],
exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
first: None = ...,
) -> Iterator[_T]: ...
@overload
def iter_except(
func: Callable[[], _T],
exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
first: Callable[[], _U],
) -> Iterator[Union[_T, _U]]: ...
@overload
def first_true(
iterable: Iterable[_T], *, pred: Optional[Callable[[_T], object]] = ...
) -> Optional[_T]: ...
@overload
def first_true(
iterable: Iterable[_T],
default: _U,
pred: Optional[Callable[[_T], object]] = ...,
) -> Union[_T, _U]: ...
def random_product(
*args: Iterable[_T], repeat: int = ...
) -> Tuple[_T, ...]: ...
def random_permutation(
iterable: Iterable[_T], r: Optional[int] = ...
) -> Tuple[_T, ...]: ...
def random_combination(iterable: Iterable[_T], r: int) -> Tuple[_T, ...]: ...
def random_combination_with_replacement(
iterable: Iterable[_T], r: int
) -> Tuple[_T, ...]: ...
def nth_combination(
iterable: Iterable[_T], r: int, index: int
) -> Tuple[_T, ...]: ...
def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[Union[_T, _U]]: ...
def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ...
def before_and_after(
predicate: Callable[[_T], bool], it: Iterable[_T]
) -> Tuple[Iterator[_T], Iterator[_T]]: ...
def triplewise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T, _T]]: ...
def sliding_window(
iterable: Iterable[_T], n: int
) -> Iterator[Tuple[_T, ...]]: ...
def subslices(iterable: Iterable[_T]) -> Iterator[List[_T]]: ...
def polynomial_from_roots(roots: Sequence[int]) -> List[int]: ...
def sieve(n: int) -> Iterator[int]: ...
def batched(
iterable: Iterable[_T],
n: int,
) -> Iterator[List[_T]]: ...

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,616 @@
from doctest import DocTestSuite
from unittest import TestCase
from itertools import combinations
from six.moves import range
import more_itertools as mi
def load_tests(loader, tests, ignore):
# Add the doctests
tests.addTests(DocTestSuite('more_itertools.recipes'))
return tests
class AccumulateTests(TestCase):
"""Tests for ``accumulate()``"""
def test_empty(self):
"""Test that an empty input returns an empty output"""
self.assertEqual(list(mi.accumulate([])), [])
def test_default(self):
"""Test accumulate with the default function (addition)"""
self.assertEqual(list(mi.accumulate([1, 2, 3])), [1, 3, 6])
def test_bogus_function(self):
"""Test accumulate with an invalid function"""
with self.assertRaises(TypeError):
list(mi.accumulate([1, 2, 3], func=lambda x: x))
def test_custom_function(self):
"""Test accumulate with a custom function"""
self.assertEqual(
list(mi.accumulate((1, 2, 3, 2, 1), func=max)), [1, 2, 3, 3, 3]
)
class TakeTests(TestCase):
"""Tests for ``take()``"""
def test_simple_take(self):
"""Test basic usage"""
t = mi.take(5, range(10))
self.assertEqual(t, [0, 1, 2, 3, 4])
def test_null_take(self):
"""Check the null case"""
t = mi.take(0, range(10))
self.assertEqual(t, [])
def test_negative_take(self):
"""Make sure taking negative items results in a ValueError"""
self.assertRaises(ValueError, lambda: mi.take(-3, range(10)))
def test_take_too_much(self):
"""Taking more than an iterator has remaining should return what the
iterator has remaining.
"""
t = mi.take(10, range(5))
self.assertEqual(t, [0, 1, 2, 3, 4])
class TabulateTests(TestCase):
"""Tests for ``tabulate()``"""
def test_simple_tabulate(self):
"""Test the happy path"""
t = mi.tabulate(lambda x: x)
f = tuple([next(t) for _ in range(3)])
self.assertEqual(f, (0, 1, 2))
def test_count(self):
"""Ensure tabulate accepts specific count"""
t = mi.tabulate(lambda x: 2 * x, -1)
f = (next(t), next(t), next(t))
self.assertEqual(f, (-2, 0, 2))
class TailTests(TestCase):
"""Tests for ``tail()``"""
def test_greater(self):
"""Length of iterable is greater than requested tail"""
self.assertEqual(list(mi.tail(3, 'ABCDEFG')), ['E', 'F', 'G'])
def test_equal(self):
"""Length of iterable is equal to the requested tail"""
self.assertEqual(
list(mi.tail(7, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G']
)
def test_less(self):
"""Length of iterable is less than requested tail"""
self.assertEqual(
list(mi.tail(8, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G']
)
class ConsumeTests(TestCase):
"""Tests for ``consume()``"""
def test_sanity(self):
"""Test basic functionality"""
r = (x for x in range(10))
mi.consume(r, 3)
self.assertEqual(3, next(r))
def test_null_consume(self):
"""Check the null case"""
r = (x for x in range(10))
mi.consume(r, 0)
self.assertEqual(0, next(r))
def test_negative_consume(self):
"""Check that negative consumsion throws an error"""
r = (x for x in range(10))
self.assertRaises(ValueError, lambda: mi.consume(r, -1))
def test_total_consume(self):
"""Check that iterator is totally consumed by default"""
r = (x for x in range(10))
mi.consume(r)
self.assertRaises(StopIteration, lambda: next(r))
class NthTests(TestCase):
"""Tests for ``nth()``"""
def test_basic(self):
"""Make sure the nth item is returned"""
l = range(10)
for i, v in enumerate(l):
self.assertEqual(mi.nth(l, i), v)
def test_default(self):
"""Ensure a default value is returned when nth item not found"""
l = range(3)
self.assertEqual(mi.nth(l, 100, "zebra"), "zebra")
def test_negative_item_raises(self):
"""Ensure asking for a negative item raises an exception"""
self.assertRaises(ValueError, lambda: mi.nth(range(10), -3))
class AllEqualTests(TestCase):
"""Tests for ``all_equal()``"""
def test_true(self):
"""Everything is equal"""
self.assertTrue(mi.all_equal('aaaaaa'))
self.assertTrue(mi.all_equal([0, 0, 0, 0]))
def test_false(self):
"""Not everything is equal"""
self.assertFalse(mi.all_equal('aaaaab'))
self.assertFalse(mi.all_equal([0, 0, 0, 1]))
def test_tricky(self):
"""Not everything is identical, but everything is equal"""
items = [1, complex(1, 0), 1.0]
self.assertTrue(mi.all_equal(items))
def test_empty(self):
"""Return True if the iterable is empty"""
self.assertTrue(mi.all_equal(''))
self.assertTrue(mi.all_equal([]))
def test_one(self):
"""Return True if the iterable is singular"""
self.assertTrue(mi.all_equal('0'))
self.assertTrue(mi.all_equal([0]))
class QuantifyTests(TestCase):
"""Tests for ``quantify()``"""
def test_happy_path(self):
"""Make sure True count is returned"""
q = [True, False, True]
self.assertEqual(mi.quantify(q), 2)
def test_custom_predicate(self):
"""Ensure non-default predicates return as expected"""
q = range(10)
self.assertEqual(mi.quantify(q, lambda x: x % 2 == 0), 5)
class PadnoneTests(TestCase):
"""Tests for ``padnone()``"""
def test_happy_path(self):
"""wrapper iterator should return None indefinitely"""
r = range(2)
p = mi.padnone(r)
self.assertEqual([0, 1, None, None], [next(p) for _ in range(4)])
class NcyclesTests(TestCase):
"""Tests for ``nyclces()``"""
def test_happy_path(self):
"""cycle a sequence three times"""
r = ["a", "b", "c"]
n = mi.ncycles(r, 3)
self.assertEqual(
["a", "b", "c", "a", "b", "c", "a", "b", "c"],
list(n)
)
def test_null_case(self):
"""asking for 0 cycles should return an empty iterator"""
n = mi.ncycles(range(100), 0)
self.assertRaises(StopIteration, lambda: next(n))
def test_pathalogical_case(self):
"""asking for negative cycles should return an empty iterator"""
n = mi.ncycles(range(100), -10)
self.assertRaises(StopIteration, lambda: next(n))
class DotproductTests(TestCase):
"""Tests for ``dotproduct()``'"""
def test_happy_path(self):
"""simple dotproduct example"""
self.assertEqual(400, mi.dotproduct([10, 10], [20, 20]))
class FlattenTests(TestCase):
"""Tests for ``flatten()``"""
def test_basic_usage(self):
"""ensure list of lists is flattened one level"""
f = [[0, 1, 2], [3, 4, 5]]
self.assertEqual(list(range(6)), list(mi.flatten(f)))
def test_single_level(self):
"""ensure list of lists is flattened only one level"""
f = [[0, [1, 2]], [[3, 4], 5]]
self.assertEqual([0, [1, 2], [3, 4], 5], list(mi.flatten(f)))
class RepeatfuncTests(TestCase):
"""Tests for ``repeatfunc()``"""
def test_simple_repeat(self):
"""test simple repeated functions"""
r = mi.repeatfunc(lambda: 5)
self.assertEqual([5, 5, 5, 5, 5], [next(r) for _ in range(5)])
def test_finite_repeat(self):
"""ensure limited repeat when times is provided"""
r = mi.repeatfunc(lambda: 5, times=5)
self.assertEqual([5, 5, 5, 5, 5], list(r))
def test_added_arguments(self):
"""ensure arguments are applied to the function"""
r = mi.repeatfunc(lambda x: x, 2, 3)
self.assertEqual([3, 3], list(r))
def test_null_times(self):
"""repeat 0 should return an empty iterator"""
r = mi.repeatfunc(range, 0, 3)
self.assertRaises(StopIteration, lambda: next(r))
class PairwiseTests(TestCase):
"""Tests for ``pairwise()``"""
def test_base_case(self):
"""ensure an iterable will return pairwise"""
p = mi.pairwise([1, 2, 3])
self.assertEqual([(1, 2), (2, 3)], list(p))
def test_short_case(self):
"""ensure an empty iterator if there's not enough values to pair"""
p = mi.pairwise("a")
self.assertRaises(StopIteration, lambda: next(p))
class GrouperTests(TestCase):
"""Tests for ``grouper()``"""
def test_even(self):
"""Test when group size divides evenly into the length of
the iterable.
"""
self.assertEqual(
list(mi.grouper(3, 'ABCDEF')), [('A', 'B', 'C'), ('D', 'E', 'F')]
)
def test_odd(self):
"""Test when group size does not divide evenly into the length of the
iterable.
"""
self.assertEqual(
list(mi.grouper(3, 'ABCDE')), [('A', 'B', 'C'), ('D', 'E', None)]
)
def test_fill_value(self):
"""Test that the fill value is used to pad the final group"""
self.assertEqual(
list(mi.grouper(3, 'ABCDE', 'x')),
[('A', 'B', 'C'), ('D', 'E', 'x')]
)
class RoundrobinTests(TestCase):
"""Tests for ``roundrobin()``"""
def test_even_groups(self):
"""Ensure ordered output from evenly populated iterables"""
self.assertEqual(
list(mi.roundrobin('ABC', [1, 2, 3], range(3))),
['A', 1, 0, 'B', 2, 1, 'C', 3, 2]
)
def test_uneven_groups(self):
"""Ensure ordered output from unevenly populated iterables"""
self.assertEqual(
list(mi.roundrobin('ABCD', [1, 2], range(0))),
['A', 1, 'B', 2, 'C', 'D']
)
class PartitionTests(TestCase):
"""Tests for ``partition()``"""
def test_bool(self):
"""Test when pred() returns a boolean"""
lesser, greater = mi.partition(lambda x: x > 5, range(10))
self.assertEqual(list(lesser), [0, 1, 2, 3, 4, 5])
self.assertEqual(list(greater), [6, 7, 8, 9])
def test_arbitrary(self):
"""Test when pred() returns an integer"""
divisibles, remainders = mi.partition(lambda x: x % 3, range(10))
self.assertEqual(list(divisibles), [0, 3, 6, 9])
self.assertEqual(list(remainders), [1, 2, 4, 5, 7, 8])
class PowersetTests(TestCase):
"""Tests for ``powerset()``"""
def test_combinatorics(self):
"""Ensure a proper enumeration"""
p = mi.powerset([1, 2, 3])
self.assertEqual(
list(p),
[(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
)
class UniqueEverseenTests(TestCase):
"""Tests for ``unique_everseen()``"""
def test_everseen(self):
"""ensure duplicate elements are ignored"""
u = mi.unique_everseen('AAAABBBBCCDAABBB')
self.assertEqual(
['A', 'B', 'C', 'D'],
list(u)
)
def test_custom_key(self):
"""ensure the custom key comparison works"""
u = mi.unique_everseen('aAbACCc', key=str.lower)
self.assertEqual(list('abC'), list(u))
def test_unhashable(self):
"""ensure things work for unhashable items"""
iterable = ['a', [1, 2, 3], [1, 2, 3], 'a']
u = mi.unique_everseen(iterable)
self.assertEqual(list(u), ['a', [1, 2, 3]])
def test_unhashable_key(self):
"""ensure things work for unhashable items with a custom key"""
iterable = ['a', [1, 2, 3], [1, 2, 3], 'a']
u = mi.unique_everseen(iterable, key=lambda x: x)
self.assertEqual(list(u), ['a', [1, 2, 3]])
class UniqueJustseenTests(TestCase):
"""Tests for ``unique_justseen()``"""
def test_justseen(self):
"""ensure only last item is remembered"""
u = mi.unique_justseen('AAAABBBCCDABB')
self.assertEqual(list('ABCDAB'), list(u))
def test_custom_key(self):
"""ensure the custom key comparison works"""
u = mi.unique_justseen('AABCcAD', str.lower)
self.assertEqual(list('ABCAD'), list(u))
class IterExceptTests(TestCase):
"""Tests for ``iter_except()``"""
def test_exact_exception(self):
"""ensure the exact specified exception is caught"""
l = [1, 2, 3]
i = mi.iter_except(l.pop, IndexError)
self.assertEqual(list(i), [3, 2, 1])
def test_generic_exception(self):
"""ensure the generic exception can be caught"""
l = [1, 2]
i = mi.iter_except(l.pop, Exception)
self.assertEqual(list(i), [2, 1])
def test_uncaught_exception_is_raised(self):
"""ensure a non-specified exception is raised"""
l = [1, 2, 3]
i = mi.iter_except(l.pop, KeyError)
self.assertRaises(IndexError, lambda: list(i))
def test_first(self):
"""ensure first is run before the function"""
l = [1, 2, 3]
f = lambda: 25
i = mi.iter_except(l.pop, IndexError, f)
self.assertEqual(list(i), [25, 3, 2, 1])
class FirstTrueTests(TestCase):
"""Tests for ``first_true()``"""
def test_something_true(self):
"""Test with no keywords"""
self.assertEqual(mi.first_true(range(10)), 1)
def test_nothing_true(self):
"""Test default return value."""
self.assertIsNone(mi.first_true([0, 0, 0]))
def test_default(self):
"""Test with a default keyword"""
self.assertEqual(mi.first_true([0, 0, 0], default='!'), '!')
def test_pred(self):
"""Test with a custom predicate"""
self.assertEqual(
mi.first_true([2, 4, 6], pred=lambda x: x % 3 == 0), 6
)
class RandomProductTests(TestCase):
"""Tests for ``random_product()``
Since random.choice() has different results with the same seed across
python versions 2.x and 3.x, these tests use highly probably events to
create predictable outcomes across platforms.
"""
def test_simple_lists(self):
"""Ensure that one item is chosen from each list in each pair.
Also ensure that each item from each list eventually appears in
the chosen combinations.
Odds are roughly 1 in 7.1 * 10e16 that one item from either list will
not be chosen after 100 samplings of one item from each list. Just to
be safe, better use a known random seed, too.
"""
nums = [1, 2, 3]
lets = ['a', 'b', 'c']
n, m = zip(*[mi.random_product(nums, lets) for _ in range(100)])
n, m = set(n), set(m)
self.assertEqual(n, set(nums))
self.assertEqual(m, set(lets))
self.assertEqual(len(n), len(nums))
self.assertEqual(len(m), len(lets))
def test_list_with_repeat(self):
"""ensure multiple items are chosen, and that they appear to be chosen
from one list then the next, in proper order.
"""
nums = [1, 2, 3]
lets = ['a', 'b', 'c']
r = list(mi.random_product(nums, lets, repeat=100))
self.assertEqual(2 * 100, len(r))
n, m = set(r[::2]), set(r[1::2])
self.assertEqual(n, set(nums))
self.assertEqual(m, set(lets))
self.assertEqual(len(n), len(nums))
self.assertEqual(len(m), len(lets))
class RandomPermutationTests(TestCase):
"""Tests for ``random_permutation()``"""
def test_full_permutation(self):
"""ensure every item from the iterable is returned in a new ordering
15 elements have a 1 in 1.3 * 10e12 of appearing in sorted order, so
we fix a seed value just to be sure.
"""
i = range(15)
r = mi.random_permutation(i)
self.assertEqual(set(i), set(r))
if i == r:
raise AssertionError("Values were not permuted")
def test_partial_permutation(self):
"""ensure all returned items are from the iterable, that the returned
permutation is of the desired length, and that all items eventually
get returned.
Sampling 100 permutations of length 5 from a set of 15 leaves a
(2/3)^100 chance that an item will not be chosen. Multiplied by 15
items, there is a 1 in 2.6e16 chance that at least 1 item will not
show up in the resulting output. Using a random seed will fix that.
"""
items = range(15)
item_set = set(items)
all_items = set()
for _ in range(100):
permutation = mi.random_permutation(items, 5)
self.assertEqual(len(permutation), 5)
permutation_set = set(permutation)
self.assertLessEqual(permutation_set, item_set)
all_items |= permutation_set
self.assertEqual(all_items, item_set)
class RandomCombinationTests(TestCase):
"""Tests for ``random_combination()``"""
def test_pseudorandomness(self):
"""ensure different subsets of the iterable get returned over many
samplings of random combinations"""
items = range(15)
all_items = set()
for _ in range(50):
combination = mi.random_combination(items, 5)
all_items |= set(combination)
self.assertEqual(all_items, set(items))
def test_no_replacement(self):
"""ensure that elements are sampled without replacement"""
items = range(15)
for _ in range(50):
combination = mi.random_combination(items, len(items))
self.assertEqual(len(combination), len(set(combination)))
self.assertRaises(
ValueError, lambda: mi.random_combination(items, len(items) + 1)
)
class RandomCombinationWithReplacementTests(TestCase):
"""Tests for ``random_combination_with_replacement()``"""
def test_replacement(self):
"""ensure that elements are sampled with replacement"""
items = range(5)
combo = mi.random_combination_with_replacement(items, len(items) * 2)
self.assertEqual(2 * len(items), len(combo))
if len(set(combo)) == len(combo):
raise AssertionError("Combination contained no duplicates")
def test_pseudorandomness(self):
"""ensure different subsets of the iterable get returned over many
samplings of random combinations"""
items = range(15)
all_items = set()
for _ in range(50):
combination = mi.random_combination_with_replacement(items, 5)
all_items |= set(combination)
self.assertEqual(all_items, set(items))
class NthCombinationTests(TestCase):
def test_basic(self):
iterable = 'abcdefg'
r = 4
for index, expected in enumerate(combinations(iterable, r)):
actual = mi.nth_combination(iterable, r, index)
self.assertEqual(actual, expected)
def test_long(self):
actual = mi.nth_combination(range(180), 4, 2000000)
expected = (2, 12, 35, 126)
self.assertEqual(actual, expected)
def test_invalid_r(self):
for r in (-1, 3):
with self.assertRaises(ValueError):
mi.nth_combination([], r, 0)
def test_invalid_index(self):
with self.assertRaises(IndexError):
mi.nth_combination('abcdefg', 3, -36)
class PrependTests(TestCase):
def test_basic(self):
value = 'a'
iterator = iter('bcdefg')
actual = list(mi.prepend(value, iterator))
expected = list('abcdefg')
self.assertEqual(actual, expected)
def test_multiple(self):
value = 'ab'
iterator = iter('cdefg')
actual = tuple(mi.prepend(value, iterator))
expected = ('ab',) + tuple('cdefg')
self.assertEqual(actual, expected)