mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-07-06 05:01:10 -07:00
Updated zipp to 2.0.1
This commit is contained in:
parent
f05b09f349
commit
1aff7eb85d
27 changed files with 6200 additions and 393 deletions
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
6
libs/common/more_itertools/__init__.py
Normal file
6
libs/common/more_itertools/__init__.py
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
"""More routines for operating on iterables, beyond itertools"""
|
||||||
|
|
||||||
|
from .more import * # noqa
|
||||||
|
from .recipes import * # noqa
|
||||||
|
|
||||||
|
__version__ = '9.0.0'
|
2
libs/common/more_itertools/__init__.pyi
Normal file
2
libs/common/more_itertools/__init__.pyi
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
from .more import *
|
||||||
|
from .recipes import *
|
4347
libs/common/more_itertools/more.py
Normal file
4347
libs/common/more_itertools/more.py
Normal file
File diff suppressed because it is too large
Load diff
674
libs/common/more_itertools/more.pyi
Normal file
674
libs/common/more_itertools/more.pyi
Normal file
|
@ -0,0 +1,674 @@
|
||||||
|
"""Stubs for more_itertools.more"""
|
||||||
|
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Container,
|
||||||
|
Dict,
|
||||||
|
Generic,
|
||||||
|
Hashable,
|
||||||
|
Iterable,
|
||||||
|
Iterator,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Reversible,
|
||||||
|
Sequence,
|
||||||
|
Sized,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
TypeVar,
|
||||||
|
type_check_only,
|
||||||
|
)
|
||||||
|
from types import TracebackType
|
||||||
|
from typing_extensions import ContextManager, Protocol, Type, overload
|
||||||
|
|
||||||
|
# Type and type variable definitions
|
||||||
|
_T = TypeVar('_T')
|
||||||
|
_T1 = TypeVar('_T1')
|
||||||
|
_T2 = TypeVar('_T2')
|
||||||
|
_U = TypeVar('_U')
|
||||||
|
_V = TypeVar('_V')
|
||||||
|
_W = TypeVar('_W')
|
||||||
|
_T_co = TypeVar('_T_co', covariant=True)
|
||||||
|
_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[object]])
|
||||||
|
_Raisable = Union[BaseException, 'Type[BaseException]']
|
||||||
|
|
||||||
|
@type_check_only
|
||||||
|
class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ...
|
||||||
|
|
||||||
|
@type_check_only
|
||||||
|
class _SizedReversible(Protocol[_T_co], Sized, Reversible[_T_co]): ...
|
||||||
|
|
||||||
|
def chunked(
|
||||||
|
iterable: Iterable[_T], n: Optional[int], strict: bool = ...
|
||||||
|
) -> Iterator[List[_T]]: ...
|
||||||
|
@overload
|
||||||
|
def first(iterable: Iterable[_T]) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def first(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ...
|
||||||
|
@overload
|
||||||
|
def last(iterable: Iterable[_T]) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def last(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ...
|
||||||
|
@overload
|
||||||
|
def nth_or_last(iterable: Iterable[_T], n: int) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def nth_or_last(
|
||||||
|
iterable: Iterable[_T], n: int, default: _U
|
||||||
|
) -> Union[_T, _U]: ...
|
||||||
|
|
||||||
|
class peekable(Generic[_T], Iterator[_T]):
|
||||||
|
def __init__(self, iterable: Iterable[_T]) -> None: ...
|
||||||
|
def __iter__(self) -> peekable[_T]: ...
|
||||||
|
def __bool__(self) -> bool: ...
|
||||||
|
@overload
|
||||||
|
def peek(self) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def peek(self, default: _U) -> Union[_T, _U]: ...
|
||||||
|
def prepend(self, *items: _T) -> None: ...
|
||||||
|
def __next__(self) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, index: int) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, index: slice) -> List[_T]: ...
|
||||||
|
|
||||||
|
def consumer(func: _GenFn) -> _GenFn: ...
|
||||||
|
def ilen(iterable: Iterable[object]) -> int: ...
|
||||||
|
def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ...
|
||||||
|
def with_iter(
|
||||||
|
context_manager: ContextManager[Iterable[_T]],
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def one(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
too_short: Optional[_Raisable] = ...,
|
||||||
|
too_long: Optional[_Raisable] = ...,
|
||||||
|
) -> _T: ...
|
||||||
|
def raise_(exception: _Raisable, *args: Any) -> None: ...
|
||||||
|
def strictly_n(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
n: int,
|
||||||
|
too_short: Optional[_GenFn] = ...,
|
||||||
|
too_long: Optional[_GenFn] = ...,
|
||||||
|
) -> List[_T]: ...
|
||||||
|
def distinct_permutations(
|
||||||
|
iterable: Iterable[_T], r: Optional[int] = ...
|
||||||
|
) -> Iterator[Tuple[_T, ...]]: ...
|
||||||
|
def intersperse(
|
||||||
|
e: _U, iterable: Iterable[_T], n: int = ...
|
||||||
|
) -> Iterator[Union[_T, _U]]: ...
|
||||||
|
def unique_to_each(*iterables: Iterable[_T]) -> List[List[_T]]: ...
|
||||||
|
@overload
|
||||||
|
def windowed(
|
||||||
|
seq: Iterable[_T], n: int, *, step: int = ...
|
||||||
|
) -> Iterator[Tuple[Optional[_T], ...]]: ...
|
||||||
|
@overload
|
||||||
|
def windowed(
|
||||||
|
seq: Iterable[_T], n: int, fillvalue: _U, step: int = ...
|
||||||
|
) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
|
||||||
|
def substrings(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ...
|
||||||
|
def substrings_indexes(
|
||||||
|
seq: Sequence[_T], reverse: bool = ...
|
||||||
|
) -> Iterator[Tuple[Sequence[_T], int, int]]: ...
|
||||||
|
|
||||||
|
class bucket(Generic[_T, _U], Container[_U]):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
key: Callable[[_T], _U],
|
||||||
|
validator: Optional[Callable[[object], object]] = ...,
|
||||||
|
) -> None: ...
|
||||||
|
def __contains__(self, value: object) -> bool: ...
|
||||||
|
def __iter__(self) -> Iterator[_U]: ...
|
||||||
|
def __getitem__(self, value: object) -> Iterator[_T]: ...
|
||||||
|
|
||||||
|
def spy(
|
||||||
|
iterable: Iterable[_T], n: int = ...
|
||||||
|
) -> Tuple[List[_T], Iterator[_T]]: ...
|
||||||
|
def interleave(*iterables: Iterable[_T]) -> Iterator[_T]: ...
|
||||||
|
def interleave_longest(*iterables: Iterable[_T]) -> Iterator[_T]: ...
|
||||||
|
def interleave_evenly(
|
||||||
|
iterables: List[Iterable[_T]], lengths: Optional[List[int]] = ...
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def collapse(
|
||||||
|
iterable: Iterable[Any],
|
||||||
|
base_type: Optional[type] = ...,
|
||||||
|
levels: Optional[int] = ...,
|
||||||
|
) -> Iterator[Any]: ...
|
||||||
|
@overload
|
||||||
|
def side_effect(
|
||||||
|
func: Callable[[_T], object],
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
chunk_size: None = ...,
|
||||||
|
before: Optional[Callable[[], object]] = ...,
|
||||||
|
after: Optional[Callable[[], object]] = ...,
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
@overload
|
||||||
|
def side_effect(
|
||||||
|
func: Callable[[List[_T]], object],
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
chunk_size: int,
|
||||||
|
before: Optional[Callable[[], object]] = ...,
|
||||||
|
after: Optional[Callable[[], object]] = ...,
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def sliced(
|
||||||
|
seq: Sequence[_T], n: int, strict: bool = ...
|
||||||
|
) -> Iterator[Sequence[_T]]: ...
|
||||||
|
def split_at(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
pred: Callable[[_T], object],
|
||||||
|
maxsplit: int = ...,
|
||||||
|
keep_separator: bool = ...,
|
||||||
|
) -> Iterator[List[_T]]: ...
|
||||||
|
def split_before(
|
||||||
|
iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
|
||||||
|
) -> Iterator[List[_T]]: ...
|
||||||
|
def split_after(
|
||||||
|
iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
|
||||||
|
) -> Iterator[List[_T]]: ...
|
||||||
|
def split_when(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
pred: Callable[[_T, _T], object],
|
||||||
|
maxsplit: int = ...,
|
||||||
|
) -> Iterator[List[_T]]: ...
|
||||||
|
def split_into(
|
||||||
|
iterable: Iterable[_T], sizes: Iterable[Optional[int]]
|
||||||
|
) -> Iterator[List[_T]]: ...
|
||||||
|
@overload
|
||||||
|
def padded(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
*,
|
||||||
|
n: Optional[int] = ...,
|
||||||
|
next_multiple: bool = ...,
|
||||||
|
) -> Iterator[Optional[_T]]: ...
|
||||||
|
@overload
|
||||||
|
def padded(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
fillvalue: _U,
|
||||||
|
n: Optional[int] = ...,
|
||||||
|
next_multiple: bool = ...,
|
||||||
|
) -> Iterator[Union[_T, _U]]: ...
|
||||||
|
@overload
|
||||||
|
def repeat_last(iterable: Iterable[_T]) -> Iterator[_T]: ...
|
||||||
|
@overload
|
||||||
|
def repeat_last(
|
||||||
|
iterable: Iterable[_T], default: _U
|
||||||
|
) -> Iterator[Union[_T, _U]]: ...
|
||||||
|
def distribute(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ...
|
||||||
|
@overload
|
||||||
|
def stagger(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
offsets: _SizedIterable[int] = ...,
|
||||||
|
longest: bool = ...,
|
||||||
|
) -> Iterator[Tuple[Optional[_T], ...]]: ...
|
||||||
|
@overload
|
||||||
|
def stagger(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
offsets: _SizedIterable[int] = ...,
|
||||||
|
longest: bool = ...,
|
||||||
|
fillvalue: _U = ...,
|
||||||
|
) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
|
||||||
|
|
||||||
|
class UnequalIterablesError(ValueError):
|
||||||
|
def __init__(
|
||||||
|
self, details: Optional[Tuple[int, int, int]] = ...
|
||||||
|
) -> None: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def zip_equal(__iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_equal(
|
||||||
|
__iter1: Iterable[_T1], __iter2: Iterable[_T2]
|
||||||
|
) -> Iterator[Tuple[_T1, _T2]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_equal(
|
||||||
|
__iter1: Iterable[_T],
|
||||||
|
__iter2: Iterable[_T],
|
||||||
|
__iter3: Iterable[_T],
|
||||||
|
*iterables: Iterable[_T],
|
||||||
|
) -> Iterator[Tuple[_T, ...]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_offset(
|
||||||
|
__iter1: Iterable[_T1],
|
||||||
|
*,
|
||||||
|
offsets: _SizedIterable[int],
|
||||||
|
longest: bool = ...,
|
||||||
|
fillvalue: None = None,
|
||||||
|
) -> Iterator[Tuple[Optional[_T1]]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_offset(
|
||||||
|
__iter1: Iterable[_T1],
|
||||||
|
__iter2: Iterable[_T2],
|
||||||
|
*,
|
||||||
|
offsets: _SizedIterable[int],
|
||||||
|
longest: bool = ...,
|
||||||
|
fillvalue: None = None,
|
||||||
|
) -> Iterator[Tuple[Optional[_T1], Optional[_T2]]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_offset(
|
||||||
|
__iter1: Iterable[_T],
|
||||||
|
__iter2: Iterable[_T],
|
||||||
|
__iter3: Iterable[_T],
|
||||||
|
*iterables: Iterable[_T],
|
||||||
|
offsets: _SizedIterable[int],
|
||||||
|
longest: bool = ...,
|
||||||
|
fillvalue: None = None,
|
||||||
|
) -> Iterator[Tuple[Optional[_T], ...]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_offset(
|
||||||
|
__iter1: Iterable[_T1],
|
||||||
|
*,
|
||||||
|
offsets: _SizedIterable[int],
|
||||||
|
longest: bool = ...,
|
||||||
|
fillvalue: _U,
|
||||||
|
) -> Iterator[Tuple[Union[_T1, _U]]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_offset(
|
||||||
|
__iter1: Iterable[_T1],
|
||||||
|
__iter2: Iterable[_T2],
|
||||||
|
*,
|
||||||
|
offsets: _SizedIterable[int],
|
||||||
|
longest: bool = ...,
|
||||||
|
fillvalue: _U,
|
||||||
|
) -> Iterator[Tuple[Union[_T1, _U], Union[_T2, _U]]]: ...
|
||||||
|
@overload
|
||||||
|
def zip_offset(
|
||||||
|
__iter1: Iterable[_T],
|
||||||
|
__iter2: Iterable[_T],
|
||||||
|
__iter3: Iterable[_T],
|
||||||
|
*iterables: Iterable[_T],
|
||||||
|
offsets: _SizedIterable[int],
|
||||||
|
longest: bool = ...,
|
||||||
|
fillvalue: _U,
|
||||||
|
) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
|
||||||
|
def sort_together(
|
||||||
|
iterables: Iterable[Iterable[_T]],
|
||||||
|
key_list: Iterable[int] = ...,
|
||||||
|
key: Optional[Callable[..., Any]] = ...,
|
||||||
|
reverse: bool = ...,
|
||||||
|
) -> List[Tuple[_T, ...]]: ...
|
||||||
|
def unzip(iterable: Iterable[Sequence[_T]]) -> Tuple[Iterator[_T], ...]: ...
|
||||||
|
def divide(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ...
|
||||||
|
def always_iterable(
|
||||||
|
obj: object,
|
||||||
|
base_type: Union[
|
||||||
|
type, Tuple[Union[type, Tuple[Any, ...]], ...], None
|
||||||
|
] = ...,
|
||||||
|
) -> Iterator[Any]: ...
|
||||||
|
def adjacent(
|
||||||
|
predicate: Callable[[_T], bool],
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
distance: int = ...,
|
||||||
|
) -> Iterator[Tuple[bool, _T]]: ...
|
||||||
|
@overload
|
||||||
|
def groupby_transform(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: None = None,
|
||||||
|
valuefunc: None = None,
|
||||||
|
reducefunc: None = None,
|
||||||
|
) -> Iterator[Tuple[_T, Iterator[_T]]]: ...
|
||||||
|
@overload
|
||||||
|
def groupby_transform(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: Callable[[_T], _U],
|
||||||
|
valuefunc: None,
|
||||||
|
reducefunc: None,
|
||||||
|
) -> Iterator[Tuple[_U, Iterator[_T]]]: ...
|
||||||
|
@overload
|
||||||
|
def groupby_transform(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: None,
|
||||||
|
valuefunc: Callable[[_T], _V],
|
||||||
|
reducefunc: None,
|
||||||
|
) -> Iterable[Tuple[_T, Iterable[_V]]]: ...
|
||||||
|
@overload
|
||||||
|
def groupby_transform(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: Callable[[_T], _U],
|
||||||
|
valuefunc: Callable[[_T], _V],
|
||||||
|
reducefunc: None,
|
||||||
|
) -> Iterable[Tuple[_U, Iterator[_V]]]: ...
|
||||||
|
@overload
|
||||||
|
def groupby_transform(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: None,
|
||||||
|
valuefunc: None,
|
||||||
|
reducefunc: Callable[[Iterator[_T]], _W],
|
||||||
|
) -> Iterable[Tuple[_T, _W]]: ...
|
||||||
|
@overload
|
||||||
|
def groupby_transform(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: Callable[[_T], _U],
|
||||||
|
valuefunc: None,
|
||||||
|
reducefunc: Callable[[Iterator[_T]], _W],
|
||||||
|
) -> Iterable[Tuple[_U, _W]]: ...
|
||||||
|
@overload
|
||||||
|
def groupby_transform(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: None,
|
||||||
|
valuefunc: Callable[[_T], _V],
|
||||||
|
reducefunc: Callable[[Iterable[_V]], _W],
|
||||||
|
) -> Iterable[Tuple[_T, _W]]: ...
|
||||||
|
@overload
|
||||||
|
def groupby_transform(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: Callable[[_T], _U],
|
||||||
|
valuefunc: Callable[[_T], _V],
|
||||||
|
reducefunc: Callable[[Iterable[_V]], _W],
|
||||||
|
) -> Iterable[Tuple[_U, _W]]: ...
|
||||||
|
|
||||||
|
class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]):
|
||||||
|
@overload
|
||||||
|
def __init__(self, __stop: _T) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __init__(self, __start: _T, __stop: _T) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __init__(self, __start: _T, __stop: _T, __step: _U) -> None: ...
|
||||||
|
def __bool__(self) -> bool: ...
|
||||||
|
def __contains__(self, elem: object) -> bool: ...
|
||||||
|
def __eq__(self, other: object) -> bool: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, key: int) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, key: slice) -> numeric_range[_T, _U]: ...
|
||||||
|
def __hash__(self) -> int: ...
|
||||||
|
def __iter__(self) -> Iterator[_T]: ...
|
||||||
|
def __len__(self) -> int: ...
|
||||||
|
def __reduce__(
|
||||||
|
self,
|
||||||
|
) -> Tuple[Type[numeric_range[_T, _U]], Tuple[_T, _T, _U]]: ...
|
||||||
|
def __repr__(self) -> str: ...
|
||||||
|
def __reversed__(self) -> Iterator[_T]: ...
|
||||||
|
def count(self, value: _T) -> int: ...
|
||||||
|
def index(self, value: _T) -> int: ... # type: ignore
|
||||||
|
|
||||||
|
def count_cycle(
|
||||||
|
iterable: Iterable[_T], n: Optional[int] = ...
|
||||||
|
) -> Iterable[Tuple[int, _T]]: ...
|
||||||
|
def mark_ends(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
) -> Iterable[Tuple[bool, bool, _T]]: ...
|
||||||
|
def locate(
|
||||||
|
iterable: Iterable[object],
|
||||||
|
pred: Callable[..., Any] = ...,
|
||||||
|
window_size: Optional[int] = ...,
|
||||||
|
) -> Iterator[int]: ...
|
||||||
|
def lstrip(
|
||||||
|
iterable: Iterable[_T], pred: Callable[[_T], object]
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def rstrip(
|
||||||
|
iterable: Iterable[_T], pred: Callable[[_T], object]
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def strip(
|
||||||
|
iterable: Iterable[_T], pred: Callable[[_T], object]
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
|
||||||
|
class islice_extended(Generic[_T], Iterator[_T]):
|
||||||
|
def __init__(
|
||||||
|
self, iterable: Iterable[_T], *args: Optional[int]
|
||||||
|
) -> None: ...
|
||||||
|
def __iter__(self) -> islice_extended[_T]: ...
|
||||||
|
def __next__(self) -> _T: ...
|
||||||
|
def __getitem__(self, index: slice) -> islice_extended[_T]: ...
|
||||||
|
|
||||||
|
def always_reversible(iterable: Iterable[_T]) -> Iterator[_T]: ...
|
||||||
|
def consecutive_groups(
|
||||||
|
iterable: Iterable[_T], ordering: Callable[[_T], int] = ...
|
||||||
|
) -> Iterator[Iterator[_T]]: ...
|
||||||
|
@overload
|
||||||
|
def difference(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
func: Callable[[_T, _T], _U] = ...,
|
||||||
|
*,
|
||||||
|
initial: None = ...,
|
||||||
|
) -> Iterator[Union[_T, _U]]: ...
|
||||||
|
@overload
|
||||||
|
def difference(
|
||||||
|
iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, initial: _U
|
||||||
|
) -> Iterator[_U]: ...
|
||||||
|
|
||||||
|
class SequenceView(Generic[_T], Sequence[_T]):
|
||||||
|
def __init__(self, target: Sequence[_T]) -> None: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, index: int) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def __getitem__(self, index: slice) -> Sequence[_T]: ...
|
||||||
|
def __len__(self) -> int: ...
|
||||||
|
|
||||||
|
class seekable(Generic[_T], Iterator[_T]):
|
||||||
|
def __init__(
|
||||||
|
self, iterable: Iterable[_T], maxlen: Optional[int] = ...
|
||||||
|
) -> None: ...
|
||||||
|
def __iter__(self) -> seekable[_T]: ...
|
||||||
|
def __next__(self) -> _T: ...
|
||||||
|
def __bool__(self) -> bool: ...
|
||||||
|
@overload
|
||||||
|
def peek(self) -> _T: ...
|
||||||
|
@overload
|
||||||
|
def peek(self, default: _U) -> Union[_T, _U]: ...
|
||||||
|
def elements(self) -> SequenceView[_T]: ...
|
||||||
|
def seek(self, index: int) -> None: ...
|
||||||
|
|
||||||
|
class run_length:
|
||||||
|
@staticmethod
|
||||||
|
def encode(iterable: Iterable[_T]) -> Iterator[Tuple[_T, int]]: ...
|
||||||
|
@staticmethod
|
||||||
|
def decode(iterable: Iterable[Tuple[_T, int]]) -> Iterator[_T]: ...
|
||||||
|
|
||||||
|
def exactly_n(
|
||||||
|
iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ...
|
||||||
|
) -> bool: ...
|
||||||
|
def circular_shifts(iterable: Iterable[_T]) -> List[Tuple[_T, ...]]: ...
|
||||||
|
def make_decorator(
|
||||||
|
wrapping_func: Callable[..., _U], result_index: int = ...
|
||||||
|
) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ...
|
||||||
|
@overload
|
||||||
|
def map_reduce(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: Callable[[_T], _U],
|
||||||
|
valuefunc: None = ...,
|
||||||
|
reducefunc: None = ...,
|
||||||
|
) -> Dict[_U, List[_T]]: ...
|
||||||
|
@overload
|
||||||
|
def map_reduce(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: Callable[[_T], _U],
|
||||||
|
valuefunc: Callable[[_T], _V],
|
||||||
|
reducefunc: None = ...,
|
||||||
|
) -> Dict[_U, List[_V]]: ...
|
||||||
|
@overload
|
||||||
|
def map_reduce(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: Callable[[_T], _U],
|
||||||
|
valuefunc: None = ...,
|
||||||
|
reducefunc: Callable[[List[_T]], _W] = ...,
|
||||||
|
) -> Dict[_U, _W]: ...
|
||||||
|
@overload
|
||||||
|
def map_reduce(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
keyfunc: Callable[[_T], _U],
|
||||||
|
valuefunc: Callable[[_T], _V],
|
||||||
|
reducefunc: Callable[[List[_V]], _W],
|
||||||
|
) -> Dict[_U, _W]: ...
|
||||||
|
def rlocate(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
pred: Callable[..., object] = ...,
|
||||||
|
window_size: Optional[int] = ...,
|
||||||
|
) -> Iterator[int]: ...
|
||||||
|
def replace(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
pred: Callable[..., object],
|
||||||
|
substitutes: Iterable[_U],
|
||||||
|
count: Optional[int] = ...,
|
||||||
|
window_size: int = ...,
|
||||||
|
) -> Iterator[Union[_T, _U]]: ...
|
||||||
|
def partitions(iterable: Iterable[_T]) -> Iterator[List[List[_T]]]: ...
|
||||||
|
def set_partitions(
|
||||||
|
iterable: Iterable[_T], k: Optional[int] = ...
|
||||||
|
) -> Iterator[List[List[_T]]]: ...
|
||||||
|
|
||||||
|
class time_limited(Generic[_T], Iterator[_T]):
|
||||||
|
def __init__(
|
||||||
|
self, limit_seconds: float, iterable: Iterable[_T]
|
||||||
|
) -> None: ...
|
||||||
|
def __iter__(self) -> islice_extended[_T]: ...
|
||||||
|
def __next__(self) -> _T: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def only(
|
||||||
|
iterable: Iterable[_T], *, too_long: Optional[_Raisable] = ...
|
||||||
|
) -> Optional[_T]: ...
|
||||||
|
@overload
|
||||||
|
def only(
|
||||||
|
iterable: Iterable[_T], default: _U, too_long: Optional[_Raisable] = ...
|
||||||
|
) -> Union[_T, _U]: ...
|
||||||
|
def ichunked(iterable: Iterable[_T], n: int) -> Iterator[Iterator[_T]]: ...
|
||||||
|
def distinct_combinations(
|
||||||
|
iterable: Iterable[_T], r: int
|
||||||
|
) -> Iterator[Tuple[_T, ...]]: ...
|
||||||
|
def filter_except(
|
||||||
|
validator: Callable[[Any], object],
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
*exceptions: Type[BaseException],
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def map_except(
|
||||||
|
function: Callable[[Any], _U],
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
*exceptions: Type[BaseException],
|
||||||
|
) -> Iterator[_U]: ...
|
||||||
|
def map_if(
|
||||||
|
iterable: Iterable[Any],
|
||||||
|
pred: Callable[[Any], bool],
|
||||||
|
func: Callable[[Any], Any],
|
||||||
|
func_else: Optional[Callable[[Any], Any]] = ...,
|
||||||
|
) -> Iterator[Any]: ...
|
||||||
|
def sample(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
k: int,
|
||||||
|
weights: Optional[Iterable[float]] = ...,
|
||||||
|
) -> List[_T]: ...
|
||||||
|
def is_sorted(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
key: Optional[Callable[[_T], _U]] = ...,
|
||||||
|
reverse: bool = False,
|
||||||
|
strict: bool = False,
|
||||||
|
) -> bool: ...
|
||||||
|
|
||||||
|
class AbortThread(BaseException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class callback_iter(Generic[_T], Iterator[_T]):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Any],
|
||||||
|
callback_kwd: str = ...,
|
||||||
|
wait_seconds: float = ...,
|
||||||
|
) -> None: ...
|
||||||
|
def __enter__(self) -> callback_iter[_T]: ...
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: Optional[Type[BaseException]],
|
||||||
|
exc_value: Optional[BaseException],
|
||||||
|
traceback: Optional[TracebackType],
|
||||||
|
) -> Optional[bool]: ...
|
||||||
|
def __iter__(self) -> callback_iter[_T]: ...
|
||||||
|
def __next__(self) -> _T: ...
|
||||||
|
def _reader(self) -> Iterator[_T]: ...
|
||||||
|
@property
|
||||||
|
def done(self) -> bool: ...
|
||||||
|
@property
|
||||||
|
def result(self) -> Any: ...
|
||||||
|
|
||||||
|
def windowed_complete(
|
||||||
|
iterable: Iterable[_T], n: int
|
||||||
|
) -> Iterator[Tuple[_T, ...]]: ...
|
||||||
|
def all_unique(
|
||||||
|
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
|
||||||
|
) -> bool: ...
|
||||||
|
def nth_product(index: int, *args: Iterable[_T]) -> Tuple[_T, ...]: ...
|
||||||
|
def nth_permutation(
|
||||||
|
iterable: Iterable[_T], r: int, index: int
|
||||||
|
) -> Tuple[_T, ...]: ...
|
||||||
|
def value_chain(*args: Union[_T, Iterable[_T]]) -> Iterable[_T]: ...
|
||||||
|
def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ...
|
||||||
|
def combination_index(
|
||||||
|
element: Iterable[_T], iterable: Iterable[_T]
|
||||||
|
) -> int: ...
|
||||||
|
def permutation_index(
|
||||||
|
element: Iterable[_T], iterable: Iterable[_T]
|
||||||
|
) -> int: ...
|
||||||
|
def repeat_each(iterable: Iterable[_T], n: int = ...) -> Iterator[_T]: ...
|
||||||
|
|
||||||
|
class countable(Generic[_T], Iterator[_T]):
|
||||||
|
def __init__(self, iterable: Iterable[_T]) -> None: ...
|
||||||
|
def __iter__(self) -> countable[_T]: ...
|
||||||
|
def __next__(self) -> _T: ...
|
||||||
|
|
||||||
|
def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[List[_T]]: ...
|
||||||
|
def zip_broadcast(
|
||||||
|
*objects: Union[_T, Iterable[_T]],
|
||||||
|
scalar_types: Union[
|
||||||
|
type, Tuple[Union[type, Tuple[Any, ...]], ...], None
|
||||||
|
] = ...,
|
||||||
|
strict: bool = ...,
|
||||||
|
) -> Iterable[Tuple[_T, ...]]: ...
|
||||||
|
def unique_in_window(
|
||||||
|
iterable: Iterable[_T], n: int, key: Optional[Callable[[_T], _U]] = ...
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def duplicates_everseen(
|
||||||
|
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def duplicates_justseen(
|
||||||
|
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
|
||||||
|
class _SupportsLessThan(Protocol):
|
||||||
|
def __lt__(self, __other: Any) -> bool: ...
|
||||||
|
|
||||||
|
_SupportsLessThanT = TypeVar("_SupportsLessThanT", bound=_SupportsLessThan)
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def minmax(
|
||||||
|
iterable_or_value: Iterable[_SupportsLessThanT], *, key: None = None
|
||||||
|
) -> Tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
|
||||||
|
@overload
|
||||||
|
def minmax(
|
||||||
|
iterable_or_value: Iterable[_T], *, key: Callable[[_T], _SupportsLessThan]
|
||||||
|
) -> Tuple[_T, _T]: ...
|
||||||
|
@overload
|
||||||
|
def minmax(
|
||||||
|
iterable_or_value: Iterable[_SupportsLessThanT],
|
||||||
|
*,
|
||||||
|
key: None = None,
|
||||||
|
default: _U,
|
||||||
|
) -> Union[_U, Tuple[_SupportsLessThanT, _SupportsLessThanT]]: ...
|
||||||
|
@overload
|
||||||
|
def minmax(
|
||||||
|
iterable_or_value: Iterable[_T],
|
||||||
|
*,
|
||||||
|
key: Callable[[_T], _SupportsLessThan],
|
||||||
|
default: _U,
|
||||||
|
) -> Union[_U, Tuple[_T, _T]]: ...
|
||||||
|
@overload
|
||||||
|
def minmax(
|
||||||
|
iterable_or_value: _SupportsLessThanT,
|
||||||
|
__other: _SupportsLessThanT,
|
||||||
|
*others: _SupportsLessThanT,
|
||||||
|
) -> Tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
|
||||||
|
@overload
|
||||||
|
def minmax(
|
||||||
|
iterable_or_value: _T,
|
||||||
|
__other: _T,
|
||||||
|
*others: _T,
|
||||||
|
key: Callable[[_T], _SupportsLessThan],
|
||||||
|
) -> Tuple[_T, _T]: ...
|
||||||
|
def longest_common_prefix(
|
||||||
|
iterables: Iterable[Iterable[_T]],
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def iequals(*iterables: Iterable[object]) -> bool: ...
|
||||||
|
def constrained_batches(
|
||||||
|
iterable: Iterable[object],
|
||||||
|
max_size: int,
|
||||||
|
max_count: Optional[int] = ...,
|
||||||
|
get_len: Callable[[_T], object] = ...,
|
||||||
|
strict: bool = ...,
|
||||||
|
) -> Iterator[Tuple[_T]]: ...
|
0
libs/common/more_itertools/py.typed
Normal file
0
libs/common/more_itertools/py.typed
Normal file
841
libs/common/more_itertools/recipes.py
Normal file
841
libs/common/more_itertools/recipes.py
Normal file
|
@ -0,0 +1,841 @@
|
||||||
|
"""Imported from the recipes section of the itertools documentation.
|
||||||
|
|
||||||
|
All functions taken from the recipes section of the itertools library docs
|
||||||
|
[1]_.
|
||||||
|
Some backward-compatible usability improvements have been made.
|
||||||
|
|
||||||
|
.. [1] http://docs.python.org/library/itertools.html#recipes
|
||||||
|
|
||||||
|
"""
|
||||||
|
import math
|
||||||
|
import operator
|
||||||
|
|
||||||
|
from collections import deque
|
||||||
|
from collections.abc import Sized
|
||||||
|
from functools import reduce
|
||||||
|
from itertools import (
|
||||||
|
chain,
|
||||||
|
combinations,
|
||||||
|
compress,
|
||||||
|
count,
|
||||||
|
cycle,
|
||||||
|
groupby,
|
||||||
|
islice,
|
||||||
|
repeat,
|
||||||
|
starmap,
|
||||||
|
tee,
|
||||||
|
zip_longest,
|
||||||
|
)
|
||||||
|
from random import randrange, sample, choice
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'all_equal',
|
||||||
|
'batched',
|
||||||
|
'before_and_after',
|
||||||
|
'consume',
|
||||||
|
'convolve',
|
||||||
|
'dotproduct',
|
||||||
|
'first_true',
|
||||||
|
'flatten',
|
||||||
|
'grouper',
|
||||||
|
'iter_except',
|
||||||
|
'ncycles',
|
||||||
|
'nth',
|
||||||
|
'nth_combination',
|
||||||
|
'padnone',
|
||||||
|
'pad_none',
|
||||||
|
'pairwise',
|
||||||
|
'partition',
|
||||||
|
'polynomial_from_roots',
|
||||||
|
'powerset',
|
||||||
|
'prepend',
|
||||||
|
'quantify',
|
||||||
|
'random_combination_with_replacement',
|
||||||
|
'random_combination',
|
||||||
|
'random_permutation',
|
||||||
|
'random_product',
|
||||||
|
'repeatfunc',
|
||||||
|
'roundrobin',
|
||||||
|
'sieve',
|
||||||
|
'sliding_window',
|
||||||
|
'subslices',
|
||||||
|
'tabulate',
|
||||||
|
'tail',
|
||||||
|
'take',
|
||||||
|
'triplewise',
|
||||||
|
'unique_everseen',
|
||||||
|
'unique_justseen',
|
||||||
|
]
|
||||||
|
|
||||||
|
_marker = object()
|
||||||
|
|
||||||
|
|
||||||
|
def take(n, iterable):
|
||||||
|
"""Return first *n* items of the iterable as a list.
|
||||||
|
|
||||||
|
>>> take(3, range(10))
|
||||||
|
[0, 1, 2]
|
||||||
|
|
||||||
|
If there are fewer than *n* items in the iterable, all of them are
|
||||||
|
returned.
|
||||||
|
|
||||||
|
>>> take(10, range(3))
|
||||||
|
[0, 1, 2]
|
||||||
|
|
||||||
|
"""
|
||||||
|
return list(islice(iterable, n))
|
||||||
|
|
||||||
|
|
||||||
|
def tabulate(function, start=0):
|
||||||
|
"""Return an iterator over the results of ``func(start)``,
|
||||||
|
``func(start + 1)``, ``func(start + 2)``...
|
||||||
|
|
||||||
|
*func* should be a function that accepts one integer argument.
|
||||||
|
|
||||||
|
If *start* is not specified it defaults to 0. It will be incremented each
|
||||||
|
time the iterator is advanced.
|
||||||
|
|
||||||
|
>>> square = lambda x: x ** 2
|
||||||
|
>>> iterator = tabulate(square, -3)
|
||||||
|
>>> take(4, iterator)
|
||||||
|
[9, 4, 1, 0]
|
||||||
|
|
||||||
|
"""
|
||||||
|
return map(function, count(start))
|
||||||
|
|
||||||
|
|
||||||
|
def tail(n, iterable):
|
||||||
|
"""Return an iterator over the last *n* items of *iterable*.
|
||||||
|
|
||||||
|
>>> t = tail(3, 'ABCDEFG')
|
||||||
|
>>> list(t)
|
||||||
|
['E', 'F', 'G']
|
||||||
|
|
||||||
|
"""
|
||||||
|
# If the given iterable has a length, then we can use islice to get its
|
||||||
|
# final elements. Note that if the iterable is not actually Iterable,
|
||||||
|
# either islice or deque will throw a TypeError. This is why we don't
|
||||||
|
# check if it is Iterable.
|
||||||
|
if isinstance(iterable, Sized):
|
||||||
|
yield from islice(iterable, max(0, len(iterable) - n), None)
|
||||||
|
else:
|
||||||
|
yield from iter(deque(iterable, maxlen=n))
|
||||||
|
|
||||||
|
|
||||||
|
def consume(iterator, n=None):
|
||||||
|
"""Advance *iterable* by *n* steps. If *n* is ``None``, consume it
|
||||||
|
entirely.
|
||||||
|
|
||||||
|
Efficiently exhausts an iterator without returning values. Defaults to
|
||||||
|
consuming the whole iterator, but an optional second argument may be
|
||||||
|
provided to limit consumption.
|
||||||
|
|
||||||
|
>>> i = (x for x in range(10))
|
||||||
|
>>> next(i)
|
||||||
|
0
|
||||||
|
>>> consume(i, 3)
|
||||||
|
>>> next(i)
|
||||||
|
4
|
||||||
|
>>> consume(i)
|
||||||
|
>>> next(i)
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
StopIteration
|
||||||
|
|
||||||
|
If the iterator has fewer items remaining than the provided limit, the
|
||||||
|
whole iterator will be consumed.
|
||||||
|
|
||||||
|
>>> i = (x for x in range(3))
|
||||||
|
>>> consume(i, 5)
|
||||||
|
>>> next(i)
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
StopIteration
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Use functions that consume iterators at C speed.
|
||||||
|
if n is None:
|
||||||
|
# feed the entire iterator into a zero-length deque
|
||||||
|
deque(iterator, maxlen=0)
|
||||||
|
else:
|
||||||
|
# advance to the empty slice starting at position n
|
||||||
|
next(islice(iterator, n, n), None)
|
||||||
|
|
||||||
|
|
||||||
|
def nth(iterable, n, default=None):
|
||||||
|
"""Returns the nth item or a default value.
|
||||||
|
|
||||||
|
>>> l = range(10)
|
||||||
|
>>> nth(l, 3)
|
||||||
|
3
|
||||||
|
>>> nth(l, 20, "zebra")
|
||||||
|
'zebra'
|
||||||
|
|
||||||
|
"""
|
||||||
|
return next(islice(iterable, n, None), default)
|
||||||
|
|
||||||
|
|
||||||
|
def all_equal(iterable):
|
||||||
|
"""
|
||||||
|
Returns ``True`` if all the elements are equal to each other.
|
||||||
|
|
||||||
|
>>> all_equal('aaaa')
|
||||||
|
True
|
||||||
|
>>> all_equal('aaab')
|
||||||
|
False
|
||||||
|
|
||||||
|
"""
|
||||||
|
g = groupby(iterable)
|
||||||
|
return next(g, True) and not next(g, False)
|
||||||
|
|
||||||
|
|
||||||
|
def quantify(iterable, pred=bool):
|
||||||
|
"""Return the how many times the predicate is true.
|
||||||
|
|
||||||
|
>>> quantify([True, False, True])
|
||||||
|
2
|
||||||
|
|
||||||
|
"""
|
||||||
|
return sum(map(pred, iterable))
|
||||||
|
|
||||||
|
|
||||||
|
def pad_none(iterable):
|
||||||
|
"""Returns the sequence of elements and then returns ``None`` indefinitely.
|
||||||
|
|
||||||
|
>>> take(5, pad_none(range(3)))
|
||||||
|
[0, 1, 2, None, None]
|
||||||
|
|
||||||
|
Useful for emulating the behavior of the built-in :func:`map` function.
|
||||||
|
|
||||||
|
See also :func:`padded`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return chain(iterable, repeat(None))
|
||||||
|
|
||||||
|
|
||||||
|
padnone = pad_none
|
||||||
|
|
||||||
|
|
||||||
|
def ncycles(iterable, n):
|
||||||
|
"""Returns the sequence elements *n* times
|
||||||
|
|
||||||
|
>>> list(ncycles(["a", "b"], 3))
|
||||||
|
['a', 'b', 'a', 'b', 'a', 'b']
|
||||||
|
|
||||||
|
"""
|
||||||
|
return chain.from_iterable(repeat(tuple(iterable), n))
|
||||||
|
|
||||||
|
|
||||||
|
def dotproduct(vec1, vec2):
|
||||||
|
"""Returns the dot product of the two iterables.
|
||||||
|
|
||||||
|
>>> dotproduct([10, 10], [20, 20])
|
||||||
|
400
|
||||||
|
|
||||||
|
"""
|
||||||
|
return sum(map(operator.mul, vec1, vec2))
|
||||||
|
|
||||||
|
|
||||||
|
def flatten(listOfLists):
|
||||||
|
"""Return an iterator flattening one level of nesting in a list of lists.
|
||||||
|
|
||||||
|
>>> list(flatten([[0, 1], [2, 3]]))
|
||||||
|
[0, 1, 2, 3]
|
||||||
|
|
||||||
|
See also :func:`collapse`, which can flatten multiple levels of nesting.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return chain.from_iterable(listOfLists)
|
||||||
|
|
||||||
|
|
||||||
|
def repeatfunc(func, times=None, *args):
|
||||||
|
"""Call *func* with *args* repeatedly, returning an iterable over the
|
||||||
|
results.
|
||||||
|
|
||||||
|
If *times* is specified, the iterable will terminate after that many
|
||||||
|
repetitions:
|
||||||
|
|
||||||
|
>>> from operator import add
|
||||||
|
>>> times = 4
|
||||||
|
>>> args = 3, 5
|
||||||
|
>>> list(repeatfunc(add, times, *args))
|
||||||
|
[8, 8, 8, 8]
|
||||||
|
|
||||||
|
If *times* is ``None`` the iterable will not terminate:
|
||||||
|
|
||||||
|
>>> from random import randrange
|
||||||
|
>>> times = None
|
||||||
|
>>> args = 1, 11
|
||||||
|
>>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
|
||||||
|
[2, 4, 8, 1, 8, 4]
|
||||||
|
|
||||||
|
"""
|
||||||
|
if times is None:
|
||||||
|
return starmap(func, repeat(args))
|
||||||
|
return starmap(func, repeat(args, times))
|
||||||
|
|
||||||
|
|
||||||
|
def _pairwise(iterable):
|
||||||
|
"""Returns an iterator of paired items, overlapping, from the original
|
||||||
|
|
||||||
|
>>> take(4, pairwise(count()))
|
||||||
|
[(0, 1), (1, 2), (2, 3), (3, 4)]
|
||||||
|
|
||||||
|
On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
a, b = tee(iterable)
|
||||||
|
next(b, None)
|
||||||
|
yield from zip(a, b)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from itertools import pairwise as itertools_pairwise
|
||||||
|
except ImportError:
|
||||||
|
pairwise = _pairwise
|
||||||
|
else:
|
||||||
|
|
||||||
|
def pairwise(iterable):
|
||||||
|
yield from itertools_pairwise(iterable)
|
||||||
|
|
||||||
|
pairwise.__doc__ = _pairwise.__doc__
|
||||||
|
|
||||||
|
|
||||||
|
class UnequalIterablesError(ValueError):
|
||||||
|
def __init__(self, details=None):
|
||||||
|
msg = 'Iterables have different lengths'
|
||||||
|
if details is not None:
|
||||||
|
msg += (': index 0 has length {}; index {} has length {}').format(
|
||||||
|
*details
|
||||||
|
)
|
||||||
|
|
||||||
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def _zip_equal_generator(iterables):
|
||||||
|
for combo in zip_longest(*iterables, fillvalue=_marker):
|
||||||
|
for val in combo:
|
||||||
|
if val is _marker:
|
||||||
|
raise UnequalIterablesError()
|
||||||
|
yield combo
|
||||||
|
|
||||||
|
|
||||||
|
def _zip_equal(*iterables):
|
||||||
|
# Check whether the iterables are all the same size.
|
||||||
|
try:
|
||||||
|
first_size = len(iterables[0])
|
||||||
|
for i, it in enumerate(iterables[1:], 1):
|
||||||
|
size = len(it)
|
||||||
|
if size != first_size:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# If we didn't break out, we can use the built-in zip.
|
||||||
|
return zip(*iterables)
|
||||||
|
|
||||||
|
# If we did break out, there was a mismatch.
|
||||||
|
raise UnequalIterablesError(details=(first_size, i, size))
|
||||||
|
# If any one of the iterables didn't have a length, start reading
|
||||||
|
# them until one runs out.
|
||||||
|
except TypeError:
|
||||||
|
return _zip_equal_generator(iterables)
|
||||||
|
|
||||||
|
|
||||||
|
def grouper(iterable, n, incomplete='fill', fillvalue=None):
|
||||||
|
"""Group elements from *iterable* into fixed-length groups of length *n*.
|
||||||
|
|
||||||
|
>>> list(grouper('ABCDEF', 3))
|
||||||
|
[('A', 'B', 'C'), ('D', 'E', 'F')]
|
||||||
|
|
||||||
|
The keyword arguments *incomplete* and *fillvalue* control what happens for
|
||||||
|
iterables whose length is not a multiple of *n*.
|
||||||
|
|
||||||
|
When *incomplete* is `'fill'`, the last group will contain instances of
|
||||||
|
*fillvalue*.
|
||||||
|
|
||||||
|
>>> list(grouper('ABCDEFG', 3, incomplete='fill', fillvalue='x'))
|
||||||
|
[('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
|
||||||
|
|
||||||
|
When *incomplete* is `'ignore'`, the last group will not be emitted.
|
||||||
|
|
||||||
|
>>> list(grouper('ABCDEFG', 3, incomplete='ignore', fillvalue='x'))
|
||||||
|
[('A', 'B', 'C'), ('D', 'E', 'F')]
|
||||||
|
|
||||||
|
When *incomplete* is `'strict'`, a subclass of `ValueError` will be raised.
|
||||||
|
|
||||||
|
>>> it = grouper('ABCDEFG', 3, incomplete='strict')
|
||||||
|
>>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
UnequalIterablesError
|
||||||
|
|
||||||
|
"""
|
||||||
|
args = [iter(iterable)] * n
|
||||||
|
if incomplete == 'fill':
|
||||||
|
return zip_longest(*args, fillvalue=fillvalue)
|
||||||
|
if incomplete == 'strict':
|
||||||
|
return _zip_equal(*args)
|
||||||
|
if incomplete == 'ignore':
|
||||||
|
return zip(*args)
|
||||||
|
else:
|
||||||
|
raise ValueError('Expected fill, strict, or ignore')
|
||||||
|
|
||||||
|
|
||||||
|
def roundrobin(*iterables):
|
||||||
|
"""Yields an item from each iterable, alternating between them.
|
||||||
|
|
||||||
|
>>> list(roundrobin('ABC', 'D', 'EF'))
|
||||||
|
['A', 'D', 'E', 'B', 'F', 'C']
|
||||||
|
|
||||||
|
This function produces the same output as :func:`interleave_longest`, but
|
||||||
|
may perform better for some inputs (in particular when the number of
|
||||||
|
iterables is small).
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Recipe credited to George Sakkis
|
||||||
|
pending = len(iterables)
|
||||||
|
nexts = cycle(iter(it).__next__ for it in iterables)
|
||||||
|
while pending:
|
||||||
|
try:
|
||||||
|
for next in nexts:
|
||||||
|
yield next()
|
||||||
|
except StopIteration:
|
||||||
|
pending -= 1
|
||||||
|
nexts = cycle(islice(nexts, pending))
|
||||||
|
|
||||||
|
|
||||||
|
def partition(pred, iterable):
|
||||||
|
"""
|
||||||
|
Returns a 2-tuple of iterables derived from the input iterable.
|
||||||
|
The first yields the items that have ``pred(item) == False``.
|
||||||
|
The second yields the items that have ``pred(item) == True``.
|
||||||
|
|
||||||
|
>>> is_odd = lambda x: x % 2 != 0
|
||||||
|
>>> iterable = range(10)
|
||||||
|
>>> even_items, odd_items = partition(is_odd, iterable)
|
||||||
|
>>> list(even_items), list(odd_items)
|
||||||
|
([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
|
||||||
|
|
||||||
|
If *pred* is None, :func:`bool` is used.
|
||||||
|
|
||||||
|
>>> iterable = [0, 1, False, True, '', ' ']
|
||||||
|
>>> false_items, true_items = partition(None, iterable)
|
||||||
|
>>> list(false_items), list(true_items)
|
||||||
|
([0, False, ''], [1, True, ' '])
|
||||||
|
|
||||||
|
"""
|
||||||
|
if pred is None:
|
||||||
|
pred = bool
|
||||||
|
|
||||||
|
evaluations = ((pred(x), x) for x in iterable)
|
||||||
|
t1, t2 = tee(evaluations)
|
||||||
|
return (
|
||||||
|
(x for (cond, x) in t1 if not cond),
|
||||||
|
(x for (cond, x) in t2 if cond),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def powerset(iterable):
|
||||||
|
"""Yields all possible subsets of the iterable.
|
||||||
|
|
||||||
|
>>> list(powerset([1, 2, 3]))
|
||||||
|
[(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
|
||||||
|
|
||||||
|
:func:`powerset` will operate on iterables that aren't :class:`set`
|
||||||
|
instances, so repeated elements in the input will produce repeated elements
|
||||||
|
in the output. Use :func:`unique_everseen` on the input to avoid generating
|
||||||
|
duplicates:
|
||||||
|
|
||||||
|
>>> seq = [1, 1, 0]
|
||||||
|
>>> list(powerset(seq))
|
||||||
|
[(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
|
||||||
|
>>> from more_itertools import unique_everseen
|
||||||
|
>>> list(powerset(unique_everseen(seq)))
|
||||||
|
[(), (1,), (0,), (1, 0)]
|
||||||
|
|
||||||
|
"""
|
||||||
|
s = list(iterable)
|
||||||
|
return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
|
||||||
|
|
||||||
|
|
||||||
|
def unique_everseen(iterable, key=None):
|
||||||
|
"""
|
||||||
|
Yield unique elements, preserving order.
|
||||||
|
|
||||||
|
>>> list(unique_everseen('AAAABBBCCDAABBB'))
|
||||||
|
['A', 'B', 'C', 'D']
|
||||||
|
>>> list(unique_everseen('ABBCcAD', str.lower))
|
||||||
|
['A', 'B', 'C', 'D']
|
||||||
|
|
||||||
|
Sequences with a mix of hashable and unhashable items can be used.
|
||||||
|
The function will be slower (i.e., `O(n^2)`) for unhashable items.
|
||||||
|
|
||||||
|
Remember that ``list`` objects are unhashable - you can use the *key*
|
||||||
|
parameter to transform the list to a tuple (which is hashable) to
|
||||||
|
avoid a slowdown.
|
||||||
|
|
||||||
|
>>> iterable = ([1, 2], [2, 3], [1, 2])
|
||||||
|
>>> list(unique_everseen(iterable)) # Slow
|
||||||
|
[[1, 2], [2, 3]]
|
||||||
|
>>> list(unique_everseen(iterable, key=tuple)) # Faster
|
||||||
|
[[1, 2], [2, 3]]
|
||||||
|
|
||||||
|
Similary, you may want to convert unhashable ``set`` objects with
|
||||||
|
``key=frozenset``. For ``dict`` objects,
|
||||||
|
``key=lambda x: frozenset(x.items())`` can be used.
|
||||||
|
|
||||||
|
"""
|
||||||
|
seenset = set()
|
||||||
|
seenset_add = seenset.add
|
||||||
|
seenlist = []
|
||||||
|
seenlist_add = seenlist.append
|
||||||
|
use_key = key is not None
|
||||||
|
|
||||||
|
for element in iterable:
|
||||||
|
k = key(element) if use_key else element
|
||||||
|
try:
|
||||||
|
if k not in seenset:
|
||||||
|
seenset_add(k)
|
||||||
|
yield element
|
||||||
|
except TypeError:
|
||||||
|
if k not in seenlist:
|
||||||
|
seenlist_add(k)
|
||||||
|
yield element
|
||||||
|
|
||||||
|
|
||||||
|
def unique_justseen(iterable, key=None):
|
||||||
|
"""Yields elements in order, ignoring serial duplicates
|
||||||
|
|
||||||
|
>>> list(unique_justseen('AAAABBBCCDAABBB'))
|
||||||
|
['A', 'B', 'C', 'D', 'A', 'B']
|
||||||
|
>>> list(unique_justseen('ABBCcAD', str.lower))
|
||||||
|
['A', 'B', 'C', 'A', 'D']
|
||||||
|
|
||||||
|
"""
|
||||||
|
return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
|
||||||
|
|
||||||
|
|
||||||
|
def iter_except(func, exception, first=None):
|
||||||
|
"""Yields results from a function repeatedly until an exception is raised.
|
||||||
|
|
||||||
|
Converts a call-until-exception interface to an iterator interface.
|
||||||
|
Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
|
||||||
|
to end the loop.
|
||||||
|
|
||||||
|
>>> l = [0, 1, 2]
|
||||||
|
>>> list(iter_except(l.pop, IndexError))
|
||||||
|
[2, 1, 0]
|
||||||
|
|
||||||
|
Multiple exceptions can be specified as a stopping condition:
|
||||||
|
|
||||||
|
>>> l = [1, 2, 3, '...', 4, 5, 6]
|
||||||
|
>>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
|
||||||
|
[7, 6, 5]
|
||||||
|
>>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
|
||||||
|
[4, 3, 2]
|
||||||
|
>>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
|
||||||
|
[]
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if first is not None:
|
||||||
|
yield first()
|
||||||
|
while 1:
|
||||||
|
yield func()
|
||||||
|
except exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def first_true(iterable, default=None, pred=None):
|
||||||
|
"""
|
||||||
|
Returns the first true value in the iterable.
|
||||||
|
|
||||||
|
If no true value is found, returns *default*
|
||||||
|
|
||||||
|
If *pred* is not None, returns the first item for which
|
||||||
|
``pred(item) == True`` .
|
||||||
|
|
||||||
|
>>> first_true(range(10))
|
||||||
|
1
|
||||||
|
>>> first_true(range(10), pred=lambda x: x > 5)
|
||||||
|
6
|
||||||
|
>>> first_true(range(10), default='missing', pred=lambda x: x > 9)
|
||||||
|
'missing'
|
||||||
|
|
||||||
|
"""
|
||||||
|
return next(filter(pred, iterable), default)
|
||||||
|
|
||||||
|
|
||||||
|
def random_product(*args, repeat=1):
|
||||||
|
"""Draw an item at random from each of the input iterables.
|
||||||
|
|
||||||
|
>>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
|
||||||
|
('c', 3, 'Z')
|
||||||
|
|
||||||
|
If *repeat* is provided as a keyword argument, that many items will be
|
||||||
|
drawn from each iterable.
|
||||||
|
|
||||||
|
>>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
|
||||||
|
('a', 2, 'd', 3)
|
||||||
|
|
||||||
|
This equivalent to taking a random selection from
|
||||||
|
``itertools.product(*args, **kwarg)``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pools = [tuple(pool) for pool in args] * repeat
|
||||||
|
return tuple(choice(pool) for pool in pools)
|
||||||
|
|
||||||
|
|
||||||
|
def random_permutation(iterable, r=None):
|
||||||
|
"""Return a random *r* length permutation of the elements in *iterable*.
|
||||||
|
|
||||||
|
If *r* is not specified or is ``None``, then *r* defaults to the length of
|
||||||
|
*iterable*.
|
||||||
|
|
||||||
|
>>> random_permutation(range(5)) # doctest:+SKIP
|
||||||
|
(3, 4, 0, 1, 2)
|
||||||
|
|
||||||
|
This equivalent to taking a random selection from
|
||||||
|
``itertools.permutations(iterable, r)``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pool = tuple(iterable)
|
||||||
|
r = len(pool) if r is None else r
|
||||||
|
return tuple(sample(pool, r))
|
||||||
|
|
||||||
|
|
||||||
|
def random_combination(iterable, r):
|
||||||
|
"""Return a random *r* length subsequence of the elements in *iterable*.
|
||||||
|
|
||||||
|
>>> random_combination(range(5), 3) # doctest:+SKIP
|
||||||
|
(2, 3, 4)
|
||||||
|
|
||||||
|
This equivalent to taking a random selection from
|
||||||
|
``itertools.combinations(iterable, r)``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pool = tuple(iterable)
|
||||||
|
n = len(pool)
|
||||||
|
indices = sorted(sample(range(n), r))
|
||||||
|
return tuple(pool[i] for i in indices)
|
||||||
|
|
||||||
|
|
||||||
|
def random_combination_with_replacement(iterable, r):
|
||||||
|
"""Return a random *r* length subsequence of elements in *iterable*,
|
||||||
|
allowing individual elements to be repeated.
|
||||||
|
|
||||||
|
>>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
|
||||||
|
(0, 0, 1, 2, 2)
|
||||||
|
|
||||||
|
This equivalent to taking a random selection from
|
||||||
|
``itertools.combinations_with_replacement(iterable, r)``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pool = tuple(iterable)
|
||||||
|
n = len(pool)
|
||||||
|
indices = sorted(randrange(n) for i in range(r))
|
||||||
|
return tuple(pool[i] for i in indices)
|
||||||
|
|
||||||
|
|
||||||
|
def nth_combination(iterable, r, index):
|
||||||
|
"""Equivalent to ``list(combinations(iterable, r))[index]``.
|
||||||
|
|
||||||
|
The subsequences of *iterable* that are of length *r* can be ordered
|
||||||
|
lexicographically. :func:`nth_combination` computes the subsequence at
|
||||||
|
sort position *index* directly, without computing the previous
|
||||||
|
subsequences.
|
||||||
|
|
||||||
|
>>> nth_combination(range(5), 3, 5)
|
||||||
|
(0, 3, 4)
|
||||||
|
|
||||||
|
``ValueError`` will be raised If *r* is negative or greater than the length
|
||||||
|
of *iterable*.
|
||||||
|
``IndexError`` will be raised if the given *index* is invalid.
|
||||||
|
"""
|
||||||
|
pool = tuple(iterable)
|
||||||
|
n = len(pool)
|
||||||
|
if (r < 0) or (r > n):
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
c = 1
|
||||||
|
k = min(r, n - r)
|
||||||
|
for i in range(1, k + 1):
|
||||||
|
c = c * (n - k + i) // i
|
||||||
|
|
||||||
|
if index < 0:
|
||||||
|
index += c
|
||||||
|
|
||||||
|
if (index < 0) or (index >= c):
|
||||||
|
raise IndexError
|
||||||
|
|
||||||
|
result = []
|
||||||
|
while r:
|
||||||
|
c, n, r = c * r // n, n - 1, r - 1
|
||||||
|
while index >= c:
|
||||||
|
index -= c
|
||||||
|
c, n = c * (n - r) // n, n - 1
|
||||||
|
result.append(pool[-1 - n])
|
||||||
|
|
||||||
|
return tuple(result)
|
||||||
|
|
||||||
|
|
||||||
|
def prepend(value, iterator):
|
||||||
|
"""Yield *value*, followed by the elements in *iterator*.
|
||||||
|
|
||||||
|
>>> value = '0'
|
||||||
|
>>> iterator = ['1', '2', '3']
|
||||||
|
>>> list(prepend(value, iterator))
|
||||||
|
['0', '1', '2', '3']
|
||||||
|
|
||||||
|
To prepend multiple values, see :func:`itertools.chain`
|
||||||
|
or :func:`value_chain`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return chain([value], iterator)
|
||||||
|
|
||||||
|
|
||||||
|
def convolve(signal, kernel):
|
||||||
|
"""Convolve the iterable *signal* with the iterable *kernel*.
|
||||||
|
|
||||||
|
>>> signal = (1, 2, 3, 4, 5)
|
||||||
|
>>> kernel = [3, 2, 1]
|
||||||
|
>>> list(convolve(signal, kernel))
|
||||||
|
[3, 8, 14, 20, 26, 14, 5]
|
||||||
|
|
||||||
|
Note: the input arguments are not interchangeable, as the *kernel*
|
||||||
|
is immediately consumed and stored.
|
||||||
|
|
||||||
|
"""
|
||||||
|
kernel = tuple(kernel)[::-1]
|
||||||
|
n = len(kernel)
|
||||||
|
window = deque([0], maxlen=n) * n
|
||||||
|
for x in chain(signal, repeat(0, n - 1)):
|
||||||
|
window.append(x)
|
||||||
|
yield sum(map(operator.mul, kernel, window))
|
||||||
|
|
||||||
|
|
||||||
|
def before_and_after(predicate, it):
|
||||||
|
"""A variant of :func:`takewhile` that allows complete access to the
|
||||||
|
remainder of the iterator.
|
||||||
|
|
||||||
|
>>> it = iter('ABCdEfGhI')
|
||||||
|
>>> all_upper, remainder = before_and_after(str.isupper, it)
|
||||||
|
>>> ''.join(all_upper)
|
||||||
|
'ABC'
|
||||||
|
>>> ''.join(remainder) # takewhile() would lose the 'd'
|
||||||
|
'dEfGhI'
|
||||||
|
|
||||||
|
Note that the first iterator must be fully consumed before the second
|
||||||
|
iterator can generate valid results.
|
||||||
|
"""
|
||||||
|
it = iter(it)
|
||||||
|
transition = []
|
||||||
|
|
||||||
|
def true_iterator():
|
||||||
|
for elem in it:
|
||||||
|
if predicate(elem):
|
||||||
|
yield elem
|
||||||
|
else:
|
||||||
|
transition.append(elem)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Note: this is different from itertools recipes to allow nesting
|
||||||
|
# before_and_after remainders into before_and_after again. See tests
|
||||||
|
# for an example.
|
||||||
|
remainder_iterator = chain(transition, it)
|
||||||
|
|
||||||
|
return true_iterator(), remainder_iterator
|
||||||
|
|
||||||
|
|
||||||
|
def triplewise(iterable):
|
||||||
|
"""Return overlapping triplets from *iterable*.
|
||||||
|
|
||||||
|
>>> list(triplewise('ABCDE'))
|
||||||
|
[('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')]
|
||||||
|
|
||||||
|
"""
|
||||||
|
for (a, _), (b, c) in pairwise(pairwise(iterable)):
|
||||||
|
yield a, b, c
|
||||||
|
|
||||||
|
|
||||||
|
def sliding_window(iterable, n):
|
||||||
|
"""Return a sliding window of width *n* over *iterable*.
|
||||||
|
|
||||||
|
>>> list(sliding_window(range(6), 4))
|
||||||
|
[(0, 1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5)]
|
||||||
|
|
||||||
|
If *iterable* has fewer than *n* items, then nothing is yielded:
|
||||||
|
|
||||||
|
>>> list(sliding_window(range(3), 4))
|
||||||
|
[]
|
||||||
|
|
||||||
|
For a variant with more features, see :func:`windowed`.
|
||||||
|
"""
|
||||||
|
it = iter(iterable)
|
||||||
|
window = deque(islice(it, n), maxlen=n)
|
||||||
|
if len(window) == n:
|
||||||
|
yield tuple(window)
|
||||||
|
for x in it:
|
||||||
|
window.append(x)
|
||||||
|
yield tuple(window)
|
||||||
|
|
||||||
|
|
||||||
|
def subslices(iterable):
|
||||||
|
"""Return all contiguous non-empty subslices of *iterable*.
|
||||||
|
|
||||||
|
>>> list(subslices('ABC'))
|
||||||
|
[['A'], ['A', 'B'], ['A', 'B', 'C'], ['B'], ['B', 'C'], ['C']]
|
||||||
|
|
||||||
|
This is similar to :func:`substrings`, but emits items in a different
|
||||||
|
order.
|
||||||
|
"""
|
||||||
|
seq = list(iterable)
|
||||||
|
slices = starmap(slice, combinations(range(len(seq) + 1), 2))
|
||||||
|
return map(operator.getitem, repeat(seq), slices)
|
||||||
|
|
||||||
|
|
||||||
|
def polynomial_from_roots(roots):
|
||||||
|
"""Compute a polynomial's coefficients from its roots.
|
||||||
|
|
||||||
|
>>> roots = [5, -4, 3] # (x - 5) * (x + 4) * (x - 3)
|
||||||
|
>>> polynomial_from_roots(roots) # x^3 - 4 * x^2 - 17 * x + 60
|
||||||
|
[1, -4, -17, 60]
|
||||||
|
"""
|
||||||
|
# Use math.prod for Python 3.8+,
|
||||||
|
prod = getattr(math, 'prod', lambda x: reduce(operator.mul, x, 1))
|
||||||
|
roots = list(map(operator.neg, roots))
|
||||||
|
return [
|
||||||
|
sum(map(prod, combinations(roots, k))) for k in range(len(roots) + 1)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def sieve(n):
|
||||||
|
"""Yield the primes less than n.
|
||||||
|
|
||||||
|
>>> list(sieve(30))
|
||||||
|
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
|
||||||
|
"""
|
||||||
|
isqrt = getattr(math, 'isqrt', lambda x: int(math.sqrt(x)))
|
||||||
|
limit = isqrt(n) + 1
|
||||||
|
data = bytearray([1]) * n
|
||||||
|
data[:2] = 0, 0
|
||||||
|
for p in compress(range(limit), data):
|
||||||
|
data[p + p : n : p] = bytearray(len(range(p + p, n, p)))
|
||||||
|
|
||||||
|
return compress(count(), data)
|
||||||
|
|
||||||
|
|
||||||
|
def batched(iterable, n):
|
||||||
|
"""Batch data into lists of length *n*. The last batch may be shorter.
|
||||||
|
|
||||||
|
>>> list(batched('ABCDEFG', 3))
|
||||||
|
[['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]
|
||||||
|
|
||||||
|
This recipe is from the ``itertools`` docs. This library also provides
|
||||||
|
:func:`chunked`, which has a different implementation.
|
||||||
|
"""
|
||||||
|
it = iter(iterable)
|
||||||
|
while True:
|
||||||
|
batch = list(islice(it, n))
|
||||||
|
if not batch:
|
||||||
|
break
|
||||||
|
yield batch
|
110
libs/common/more_itertools/recipes.pyi
Normal file
110
libs/common/more_itertools/recipes.pyi
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
"""Stubs for more_itertools.recipes"""
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Iterable,
|
||||||
|
Iterator,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Sequence,
|
||||||
|
Tuple,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
from typing_extensions import overload, Type
|
||||||
|
|
||||||
|
# Type and type variable definitions
|
||||||
|
_T = TypeVar('_T')
|
||||||
|
_U = TypeVar('_U')
|
||||||
|
|
||||||
|
def take(n: int, iterable: Iterable[_T]) -> List[_T]: ...
|
||||||
|
def tabulate(
|
||||||
|
function: Callable[[int], _T], start: int = ...
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ...
|
||||||
|
def consume(iterator: Iterable[object], n: Optional[int] = ...) -> None: ...
|
||||||
|
@overload
|
||||||
|
def nth(iterable: Iterable[_T], n: int) -> Optional[_T]: ...
|
||||||
|
@overload
|
||||||
|
def nth(iterable: Iterable[_T], n: int, default: _U) -> Union[_T, _U]: ...
|
||||||
|
def all_equal(iterable: Iterable[object]) -> bool: ...
|
||||||
|
def quantify(
|
||||||
|
iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
|
||||||
|
) -> int: ...
|
||||||
|
def pad_none(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ...
|
||||||
|
def padnone(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ...
|
||||||
|
def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ...
|
||||||
|
def dotproduct(vec1: Iterable[object], vec2: Iterable[object]) -> object: ...
|
||||||
|
def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ...
|
||||||
|
def repeatfunc(
|
||||||
|
func: Callable[..., _U], times: Optional[int] = ..., *args: Any
|
||||||
|
) -> Iterator[_U]: ...
|
||||||
|
def pairwise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T]]: ...
|
||||||
|
def grouper(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
n: int,
|
||||||
|
incomplete: str = ...,
|
||||||
|
fillvalue: _U = ...,
|
||||||
|
) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
|
||||||
|
def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ...
|
||||||
|
def partition(
|
||||||
|
pred: Optional[Callable[[_T], object]], iterable: Iterable[_T]
|
||||||
|
) -> Tuple[Iterator[_T], Iterator[_T]]: ...
|
||||||
|
def powerset(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ...
|
||||||
|
def unique_everseen(
|
||||||
|
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
def unique_justseen(
|
||||||
|
iterable: Iterable[_T], key: Optional[Callable[[_T], object]] = ...
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
@overload
|
||||||
|
def iter_except(
|
||||||
|
func: Callable[[], _T],
|
||||||
|
exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
|
||||||
|
first: None = ...,
|
||||||
|
) -> Iterator[_T]: ...
|
||||||
|
@overload
|
||||||
|
def iter_except(
|
||||||
|
func: Callable[[], _T],
|
||||||
|
exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
|
||||||
|
first: Callable[[], _U],
|
||||||
|
) -> Iterator[Union[_T, _U]]: ...
|
||||||
|
@overload
|
||||||
|
def first_true(
|
||||||
|
iterable: Iterable[_T], *, pred: Optional[Callable[[_T], object]] = ...
|
||||||
|
) -> Optional[_T]: ...
|
||||||
|
@overload
|
||||||
|
def first_true(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
default: _U,
|
||||||
|
pred: Optional[Callable[[_T], object]] = ...,
|
||||||
|
) -> Union[_T, _U]: ...
|
||||||
|
def random_product(
|
||||||
|
*args: Iterable[_T], repeat: int = ...
|
||||||
|
) -> Tuple[_T, ...]: ...
|
||||||
|
def random_permutation(
|
||||||
|
iterable: Iterable[_T], r: Optional[int] = ...
|
||||||
|
) -> Tuple[_T, ...]: ...
|
||||||
|
def random_combination(iterable: Iterable[_T], r: int) -> Tuple[_T, ...]: ...
|
||||||
|
def random_combination_with_replacement(
|
||||||
|
iterable: Iterable[_T], r: int
|
||||||
|
) -> Tuple[_T, ...]: ...
|
||||||
|
def nth_combination(
|
||||||
|
iterable: Iterable[_T], r: int, index: int
|
||||||
|
) -> Tuple[_T, ...]: ...
|
||||||
|
def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[Union[_T, _U]]: ...
|
||||||
|
def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ...
|
||||||
|
def before_and_after(
|
||||||
|
predicate: Callable[[_T], bool], it: Iterable[_T]
|
||||||
|
) -> Tuple[Iterator[_T], Iterator[_T]]: ...
|
||||||
|
def triplewise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T, _T]]: ...
|
||||||
|
def sliding_window(
|
||||||
|
iterable: Iterable[_T], n: int
|
||||||
|
) -> Iterator[Tuple[_T, ...]]: ...
|
||||||
|
def subslices(iterable: Iterable[_T]) -> Iterator[List[_T]]: ...
|
||||||
|
def polynomial_from_roots(roots: Sequence[int]) -> List[int]: ...
|
||||||
|
def sieve(n: int) -> Iterator[int]: ...
|
||||||
|
def batched(
|
||||||
|
iterable: Iterable[_T],
|
||||||
|
n: int,
|
||||||
|
) -> Iterator[List[_T]]: ...
|
Binary file not shown.
220
libs/common/zipp.py
Normal file
220
libs/common/zipp.py
Normal file
|
@ -0,0 +1,220 @@
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
from __future__ import division
|
||||||
|
|
||||||
|
import io
|
||||||
|
import sys
|
||||||
|
import posixpath
|
||||||
|
import zipfile
|
||||||
|
import functools
|
||||||
|
import itertools
|
||||||
|
|
||||||
|
import more_itertools
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
def _parents(path):
|
||||||
|
"""
|
||||||
|
Given a path with elements separated by
|
||||||
|
posixpath.sep, generate all parents of that path.
|
||||||
|
|
||||||
|
>>> list(_parents('b/d'))
|
||||||
|
['b']
|
||||||
|
>>> list(_parents('/b/d/'))
|
||||||
|
['/b']
|
||||||
|
>>> list(_parents('b/d/f/'))
|
||||||
|
['b/d', 'b']
|
||||||
|
>>> list(_parents('b'))
|
||||||
|
[]
|
||||||
|
>>> list(_parents(''))
|
||||||
|
[]
|
||||||
|
"""
|
||||||
|
return itertools.islice(_ancestry(path), 1, None)
|
||||||
|
|
||||||
|
|
||||||
|
def _ancestry(path):
|
||||||
|
"""
|
||||||
|
Given a path with elements separated by
|
||||||
|
posixpath.sep, generate all elements of that path
|
||||||
|
|
||||||
|
>>> list(_ancestry('b/d'))
|
||||||
|
['b/d', 'b']
|
||||||
|
>>> list(_ancestry('/b/d/'))
|
||||||
|
['/b/d', '/b']
|
||||||
|
>>> list(_ancestry('b/d/f/'))
|
||||||
|
['b/d/f', 'b/d', 'b']
|
||||||
|
>>> list(_ancestry('b'))
|
||||||
|
['b']
|
||||||
|
>>> list(_ancestry(''))
|
||||||
|
[]
|
||||||
|
"""
|
||||||
|
path = path.rstrip(posixpath.sep)
|
||||||
|
while path and path != posixpath.sep:
|
||||||
|
yield path
|
||||||
|
path, tail = posixpath.split(path)
|
||||||
|
|
||||||
|
|
||||||
|
class Path:
|
||||||
|
"""
|
||||||
|
A pathlib-compatible interface for zip files.
|
||||||
|
|
||||||
|
Consider a zip file with this structure::
|
||||||
|
|
||||||
|
.
|
||||||
|
├── a.txt
|
||||||
|
└── b
|
||||||
|
├── c.txt
|
||||||
|
└── d
|
||||||
|
└── e.txt
|
||||||
|
|
||||||
|
>>> data = io.BytesIO()
|
||||||
|
>>> zf = zipfile.ZipFile(data, 'w')
|
||||||
|
>>> zf.writestr('a.txt', 'content of a')
|
||||||
|
>>> zf.writestr('b/c.txt', 'content of c')
|
||||||
|
>>> zf.writestr('b/d/e.txt', 'content of e')
|
||||||
|
>>> zf.filename = 'abcde.zip'
|
||||||
|
|
||||||
|
Path accepts the zipfile object itself or a filename
|
||||||
|
|
||||||
|
>>> root = Path(zf)
|
||||||
|
|
||||||
|
From there, several path operations are available.
|
||||||
|
|
||||||
|
Directory iteration (including the zip file itself):
|
||||||
|
|
||||||
|
>>> a, b = root.iterdir()
|
||||||
|
>>> a
|
||||||
|
Path('abcde.zip', 'a.txt')
|
||||||
|
>>> b
|
||||||
|
Path('abcde.zip', 'b/')
|
||||||
|
|
||||||
|
name property:
|
||||||
|
|
||||||
|
>>> b.name
|
||||||
|
'b'
|
||||||
|
|
||||||
|
join with divide operator:
|
||||||
|
|
||||||
|
>>> c = b / 'c.txt'
|
||||||
|
>>> c
|
||||||
|
Path('abcde.zip', 'b/c.txt')
|
||||||
|
>>> c.name
|
||||||
|
'c.txt'
|
||||||
|
|
||||||
|
Read text:
|
||||||
|
|
||||||
|
>>> c.read_text()
|
||||||
|
'content of c'
|
||||||
|
|
||||||
|
existence:
|
||||||
|
|
||||||
|
>>> c.exists()
|
||||||
|
True
|
||||||
|
>>> (b / 'missing.txt').exists()
|
||||||
|
False
|
||||||
|
|
||||||
|
Coercion to string:
|
||||||
|
|
||||||
|
>>> str(c)
|
||||||
|
'abcde.zip/b/c.txt'
|
||||||
|
"""
|
||||||
|
|
||||||
|
__repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
|
||||||
|
|
||||||
|
def __init__(self, root, at=""):
|
||||||
|
self.root = (
|
||||||
|
root
|
||||||
|
if isinstance(root, zipfile.ZipFile)
|
||||||
|
else zipfile.ZipFile(self._pathlib_compat(root))
|
||||||
|
)
|
||||||
|
self.at = at
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _pathlib_compat(path):
|
||||||
|
"""
|
||||||
|
For path-like objects, convert to a filename for compatibility
|
||||||
|
on Python 3.6.1 and earlier.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return path.__fspath__()
|
||||||
|
except AttributeError:
|
||||||
|
return str(path)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def open(self):
|
||||||
|
return functools.partial(self.root.open, self.at)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return posixpath.basename(self.at.rstrip("/"))
|
||||||
|
|
||||||
|
def read_text(self, *args, **kwargs):
|
||||||
|
with self.open() as strm:
|
||||||
|
return io.TextIOWrapper(strm, *args, **kwargs).read()
|
||||||
|
|
||||||
|
def read_bytes(self):
|
||||||
|
with self.open() as strm:
|
||||||
|
return strm.read()
|
||||||
|
|
||||||
|
def _is_child(self, path):
|
||||||
|
return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
|
||||||
|
|
||||||
|
def _next(self, at):
|
||||||
|
return Path(self.root, at)
|
||||||
|
|
||||||
|
def is_dir(self):
|
||||||
|
return not self.at or self.at.endswith("/")
|
||||||
|
|
||||||
|
def is_file(self):
|
||||||
|
return not self.is_dir()
|
||||||
|
|
||||||
|
def exists(self):
|
||||||
|
return self.at in self._names()
|
||||||
|
|
||||||
|
def iterdir(self):
|
||||||
|
if not self.is_dir():
|
||||||
|
raise ValueError("Can't listdir a file")
|
||||||
|
subs = map(self._next, self._names())
|
||||||
|
return filter(self._is_child, subs)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return posixpath.join(self.root.filename, self.at)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self.__repr.format(self=self)
|
||||||
|
|
||||||
|
def joinpath(self, add):
|
||||||
|
add = self._pathlib_compat(add)
|
||||||
|
next = posixpath.join(self.at, add)
|
||||||
|
next_dir = posixpath.join(self.at, add, "")
|
||||||
|
names = self._names()
|
||||||
|
return self._next(next_dir if next not in names and next_dir in names else next)
|
||||||
|
|
||||||
|
__truediv__ = joinpath
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _implied_dirs(names):
|
||||||
|
return more_itertools.unique_everseen(
|
||||||
|
parent + "/"
|
||||||
|
for name in names
|
||||||
|
for parent in _parents(name)
|
||||||
|
if parent + "/" not in names
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _add_implied_dirs(cls, names):
|
||||||
|
return names + list(cls._implied_dirs(names))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parent(self):
|
||||||
|
parent_at = posixpath.dirname(self.at.rstrip('/'))
|
||||||
|
if parent_at:
|
||||||
|
parent_at += '/'
|
||||||
|
return self._next(parent_at)
|
||||||
|
|
||||||
|
def _names(self):
|
||||||
|
return self._add_implied_dirs(self.root.namelist())
|
||||||
|
|
||||||
|
if sys.version_info < (3,):
|
||||||
|
__div__ = __truediv__
|
|
@ -1,381 +0,0 @@
|
||||||
import io
|
|
||||||
import posixpath
|
|
||||||
import zipfile
|
|
||||||
import itertools
|
|
||||||
import contextlib
|
|
||||||
import pathlib
|
|
||||||
import re
|
|
||||||
import fnmatch
|
|
||||||
|
|
||||||
from .py310compat import text_encoding
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['Path']
|
|
||||||
|
|
||||||
|
|
||||||
def _parents(path):
|
|
||||||
"""
|
|
||||||
Given a path with elements separated by
|
|
||||||
posixpath.sep, generate all parents of that path.
|
|
||||||
|
|
||||||
>>> list(_parents('b/d'))
|
|
||||||
['b']
|
|
||||||
>>> list(_parents('/b/d/'))
|
|
||||||
['/b']
|
|
||||||
>>> list(_parents('b/d/f/'))
|
|
||||||
['b/d', 'b']
|
|
||||||
>>> list(_parents('b'))
|
|
||||||
[]
|
|
||||||
>>> list(_parents(''))
|
|
||||||
[]
|
|
||||||
"""
|
|
||||||
return itertools.islice(_ancestry(path), 1, None)
|
|
||||||
|
|
||||||
|
|
||||||
def _ancestry(path):
|
|
||||||
"""
|
|
||||||
Given a path with elements separated by
|
|
||||||
posixpath.sep, generate all elements of that path
|
|
||||||
|
|
||||||
>>> list(_ancestry('b/d'))
|
|
||||||
['b/d', 'b']
|
|
||||||
>>> list(_ancestry('/b/d/'))
|
|
||||||
['/b/d', '/b']
|
|
||||||
>>> list(_ancestry('b/d/f/'))
|
|
||||||
['b/d/f', 'b/d', 'b']
|
|
||||||
>>> list(_ancestry('b'))
|
|
||||||
['b']
|
|
||||||
>>> list(_ancestry(''))
|
|
||||||
[]
|
|
||||||
"""
|
|
||||||
path = path.rstrip(posixpath.sep)
|
|
||||||
while path and path != posixpath.sep:
|
|
||||||
yield path
|
|
||||||
path, tail = posixpath.split(path)
|
|
||||||
|
|
||||||
|
|
||||||
_dedupe = dict.fromkeys
|
|
||||||
"""Deduplicate an iterable in original order"""
|
|
||||||
|
|
||||||
|
|
||||||
def _difference(minuend, subtrahend):
|
|
||||||
"""
|
|
||||||
Return items in minuend not in subtrahend, retaining order
|
|
||||||
with O(1) lookup.
|
|
||||||
"""
|
|
||||||
return itertools.filterfalse(set(subtrahend).__contains__, minuend)
|
|
||||||
|
|
||||||
|
|
||||||
class InitializedState:
|
|
||||||
"""
|
|
||||||
Mix-in to save the initialization state for pickling.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self.__args = args
|
|
||||||
self.__kwargs = kwargs
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def __getstate__(self):
|
|
||||||
return self.__args, self.__kwargs
|
|
||||||
|
|
||||||
def __setstate__(self, state):
|
|
||||||
args, kwargs = state
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class CompleteDirs(InitializedState, zipfile.ZipFile):
|
|
||||||
"""
|
|
||||||
A ZipFile subclass that ensures that implied directories
|
|
||||||
are always included in the namelist.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _implied_dirs(names):
|
|
||||||
parents = itertools.chain.from_iterable(map(_parents, names))
|
|
||||||
as_dirs = (p + posixpath.sep for p in parents)
|
|
||||||
return _dedupe(_difference(as_dirs, names))
|
|
||||||
|
|
||||||
def namelist(self):
|
|
||||||
names = super(CompleteDirs, self).namelist()
|
|
||||||
return names + list(self._implied_dirs(names))
|
|
||||||
|
|
||||||
def _name_set(self):
|
|
||||||
return set(self.namelist())
|
|
||||||
|
|
||||||
def resolve_dir(self, name):
|
|
||||||
"""
|
|
||||||
If the name represents a directory, return that name
|
|
||||||
as a directory (with the trailing slash).
|
|
||||||
"""
|
|
||||||
names = self._name_set()
|
|
||||||
dirname = name + '/'
|
|
||||||
dir_match = name not in names and dirname in names
|
|
||||||
return dirname if dir_match else name
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def make(cls, source):
|
|
||||||
"""
|
|
||||||
Given a source (filename or zipfile), return an
|
|
||||||
appropriate CompleteDirs subclass.
|
|
||||||
"""
|
|
||||||
if isinstance(source, CompleteDirs):
|
|
||||||
return source
|
|
||||||
|
|
||||||
if not isinstance(source, zipfile.ZipFile):
|
|
||||||
return cls(source)
|
|
||||||
|
|
||||||
# Only allow for FastLookup when supplied zipfile is read-only
|
|
||||||
if 'r' not in source.mode:
|
|
||||||
cls = CompleteDirs
|
|
||||||
|
|
||||||
source.__class__ = cls
|
|
||||||
return source
|
|
||||||
|
|
||||||
|
|
||||||
class FastLookup(CompleteDirs):
|
|
||||||
"""
|
|
||||||
ZipFile subclass to ensure implicit
|
|
||||||
dirs exist and are resolved rapidly.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def namelist(self):
|
|
||||||
with contextlib.suppress(AttributeError):
|
|
||||||
return self.__names
|
|
||||||
self.__names = super(FastLookup, self).namelist()
|
|
||||||
return self.__names
|
|
||||||
|
|
||||||
def _name_set(self):
|
|
||||||
with contextlib.suppress(AttributeError):
|
|
||||||
return self.__lookup
|
|
||||||
self.__lookup = super(FastLookup, self)._name_set()
|
|
||||||
return self.__lookup
|
|
||||||
|
|
||||||
|
|
||||||
class Path:
|
|
||||||
"""
|
|
||||||
A pathlib-compatible interface for zip files.
|
|
||||||
|
|
||||||
Consider a zip file with this structure::
|
|
||||||
|
|
||||||
.
|
|
||||||
├── a.txt
|
|
||||||
└── b
|
|
||||||
├── c.txt
|
|
||||||
└── d
|
|
||||||
└── e.txt
|
|
||||||
|
|
||||||
>>> data = io.BytesIO()
|
|
||||||
>>> zf = zipfile.ZipFile(data, 'w')
|
|
||||||
>>> zf.writestr('a.txt', 'content of a')
|
|
||||||
>>> zf.writestr('b/c.txt', 'content of c')
|
|
||||||
>>> zf.writestr('b/d/e.txt', 'content of e')
|
|
||||||
>>> zf.filename = 'mem/abcde.zip'
|
|
||||||
|
|
||||||
Path accepts the zipfile object itself or a filename
|
|
||||||
|
|
||||||
>>> root = Path(zf)
|
|
||||||
|
|
||||||
From there, several path operations are available.
|
|
||||||
|
|
||||||
Directory iteration (including the zip file itself):
|
|
||||||
|
|
||||||
>>> a, b = root.iterdir()
|
|
||||||
>>> a
|
|
||||||
Path('mem/abcde.zip', 'a.txt')
|
|
||||||
>>> b
|
|
||||||
Path('mem/abcde.zip', 'b/')
|
|
||||||
|
|
||||||
name property:
|
|
||||||
|
|
||||||
>>> b.name
|
|
||||||
'b'
|
|
||||||
|
|
||||||
join with divide operator:
|
|
||||||
|
|
||||||
>>> c = b / 'c.txt'
|
|
||||||
>>> c
|
|
||||||
Path('mem/abcde.zip', 'b/c.txt')
|
|
||||||
>>> c.name
|
|
||||||
'c.txt'
|
|
||||||
|
|
||||||
Read text:
|
|
||||||
|
|
||||||
>>> c.read_text()
|
|
||||||
'content of c'
|
|
||||||
|
|
||||||
existence:
|
|
||||||
|
|
||||||
>>> c.exists()
|
|
||||||
True
|
|
||||||
>>> (b / 'missing.txt').exists()
|
|
||||||
False
|
|
||||||
|
|
||||||
Coercion to string:
|
|
||||||
|
|
||||||
>>> import os
|
|
||||||
>>> str(c).replace(os.sep, posixpath.sep)
|
|
||||||
'mem/abcde.zip/b/c.txt'
|
|
||||||
|
|
||||||
At the root, ``name``, ``filename``, and ``parent``
|
|
||||||
resolve to the zipfile. Note these attributes are not
|
|
||||||
valid and will raise a ``ValueError`` if the zipfile
|
|
||||||
has no filename.
|
|
||||||
|
|
||||||
>>> root.name
|
|
||||||
'abcde.zip'
|
|
||||||
>>> str(root.filename).replace(os.sep, posixpath.sep)
|
|
||||||
'mem/abcde.zip'
|
|
||||||
>>> str(root.parent)
|
|
||||||
'mem'
|
|
||||||
"""
|
|
||||||
|
|
||||||
__repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
|
|
||||||
|
|
||||||
def __init__(self, root, at=""):
|
|
||||||
"""
|
|
||||||
Construct a Path from a ZipFile or filename.
|
|
||||||
|
|
||||||
Note: When the source is an existing ZipFile object,
|
|
||||||
its type (__class__) will be mutated to a
|
|
||||||
specialized type. If the caller wishes to retain the
|
|
||||||
original type, the caller should either create a
|
|
||||||
separate ZipFile object or pass a filename.
|
|
||||||
"""
|
|
||||||
self.root = FastLookup.make(root)
|
|
||||||
self.at = at
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
"""
|
|
||||||
>>> Path(zipfile.ZipFile(io.BytesIO(), 'w')) == 'foo'
|
|
||||||
False
|
|
||||||
"""
|
|
||||||
if self.__class__ is not other.__class__:
|
|
||||||
return NotImplemented
|
|
||||||
return (self.root, self.at) == (other.root, other.at)
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash((self.root, self.at))
|
|
||||||
|
|
||||||
def open(self, mode='r', *args, pwd=None, **kwargs):
|
|
||||||
"""
|
|
||||||
Open this entry as text or binary following the semantics
|
|
||||||
of ``pathlib.Path.open()`` by passing arguments through
|
|
||||||
to io.TextIOWrapper().
|
|
||||||
"""
|
|
||||||
if self.is_dir():
|
|
||||||
raise IsADirectoryError(self)
|
|
||||||
zip_mode = mode[0]
|
|
||||||
if not self.exists() and zip_mode == 'r':
|
|
||||||
raise FileNotFoundError(self)
|
|
||||||
stream = self.root.open(self.at, zip_mode, pwd=pwd)
|
|
||||||
if 'b' in mode:
|
|
||||||
if args or kwargs:
|
|
||||||
raise ValueError("encoding args invalid for binary operation")
|
|
||||||
return stream
|
|
||||||
else:
|
|
||||||
kwargs["encoding"] = text_encoding(kwargs.get("encoding"))
|
|
||||||
return io.TextIOWrapper(stream, *args, **kwargs)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
return pathlib.Path(self.at).name or self.filename.name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def suffix(self):
|
|
||||||
return pathlib.Path(self.at).suffix or self.filename.suffix
|
|
||||||
|
|
||||||
@property
|
|
||||||
def suffixes(self):
|
|
||||||
return pathlib.Path(self.at).suffixes or self.filename.suffixes
|
|
||||||
|
|
||||||
@property
|
|
||||||
def stem(self):
|
|
||||||
return pathlib.Path(self.at).stem or self.filename.stem
|
|
||||||
|
|
||||||
@property
|
|
||||||
def filename(self):
|
|
||||||
return pathlib.Path(self.root.filename).joinpath(self.at)
|
|
||||||
|
|
||||||
def read_text(self, *args, **kwargs):
|
|
||||||
kwargs["encoding"] = text_encoding(kwargs.get("encoding"))
|
|
||||||
with self.open('r', *args, **kwargs) as strm:
|
|
||||||
return strm.read()
|
|
||||||
|
|
||||||
def read_bytes(self):
|
|
||||||
with self.open('rb') as strm:
|
|
||||||
return strm.read()
|
|
||||||
|
|
||||||
def _is_child(self, path):
|
|
||||||
return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
|
|
||||||
|
|
||||||
def _next(self, at):
|
|
||||||
return self.__class__(self.root, at)
|
|
||||||
|
|
||||||
def is_dir(self):
|
|
||||||
return not self.at or self.at.endswith("/")
|
|
||||||
|
|
||||||
def is_file(self):
|
|
||||||
return self.exists() and not self.is_dir()
|
|
||||||
|
|
||||||
def exists(self):
|
|
||||||
return self.at in self.root._name_set()
|
|
||||||
|
|
||||||
def iterdir(self):
|
|
||||||
if not self.is_dir():
|
|
||||||
raise ValueError("Can't listdir a file")
|
|
||||||
subs = map(self._next, self.root.namelist())
|
|
||||||
return filter(self._is_child, subs)
|
|
||||||
|
|
||||||
def match(self, path_pattern):
|
|
||||||
return pathlib.Path(self.at).match(path_pattern)
|
|
||||||
|
|
||||||
def is_symlink(self):
|
|
||||||
"""
|
|
||||||
Return whether this path is a symlink. Always false (python/cpython#82102).
|
|
||||||
"""
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _descendants(self):
|
|
||||||
for child in self.iterdir():
|
|
||||||
yield child
|
|
||||||
if child.is_dir():
|
|
||||||
yield from child._descendants()
|
|
||||||
|
|
||||||
def glob(self, pattern):
|
|
||||||
if not pattern:
|
|
||||||
raise ValueError("Unacceptable pattern: {!r}".format(pattern))
|
|
||||||
|
|
||||||
matches = re.compile(fnmatch.translate(pattern)).fullmatch
|
|
||||||
return (
|
|
||||||
child
|
|
||||||
for child in self._descendants()
|
|
||||||
if matches(str(child.relative_to(self)))
|
|
||||||
)
|
|
||||||
|
|
||||||
def rglob(self, pattern):
|
|
||||||
return self.glob(f'**/{pattern}')
|
|
||||||
|
|
||||||
def relative_to(self, other, *extra):
|
|
||||||
return posixpath.relpath(str(self), str(other.joinpath(*extra)))
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return posixpath.join(self.root.filename, self.at)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return self.__repr.format(self=self)
|
|
||||||
|
|
||||||
def joinpath(self, *other):
|
|
||||||
next = posixpath.join(self.at, *other)
|
|
||||||
return self._next(self.root.resolve_dir(next))
|
|
||||||
|
|
||||||
__truediv__ = joinpath
|
|
||||||
|
|
||||||
@property
|
|
||||||
def parent(self):
|
|
||||||
if not self.at:
|
|
||||||
return self.filename.parent
|
|
||||||
parent_at = posixpath.dirname(self.at.rstrip('/'))
|
|
||||||
if parent_at:
|
|
||||||
parent_at += '/'
|
|
||||||
return self._next(parent_at)
|
|
|
@ -1,12 +0,0 @@
|
||||||
import sys
|
|
||||||
import io
|
|
||||||
|
|
||||||
|
|
||||||
te_impl = 'lambda encoding, stacklevel=2, /: encoding'
|
|
||||||
te_impl_37 = te_impl.replace(', /', '')
|
|
||||||
_text_encoding = eval(te_impl) if sys.version_info > (3, 8) else eval(te_impl_37)
|
|
||||||
|
|
||||||
|
|
||||||
text_encoding = (
|
|
||||||
io.text_encoding if sys.version_info > (3, 10) else _text_encoding # type: ignore
|
|
||||||
)
|
|
Loading…
Add table
Add a link
Reference in a new issue