mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-19 21:03:14 -07:00
Updated stevedore to 2.0.1
This commit is contained in:
parent
f1624a586f
commit
fb6011f88d
52 changed files with 581 additions and 1960 deletions
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,4 +1,4 @@
|
||||||
__version__ = "1.1.8"
|
__version__ = '0.9.2'
|
||||||
|
|
||||||
from .lock import Lock # noqa
|
from .lock import Lock # noqa
|
||||||
from .lock import NeedRegenerationException # noqa
|
from .lock import NeedRegenerationException # noqa
|
||||||
|
|
442
libs/common/dogpile/cache/api.py
vendored
442
libs/common/dogpile/cache/api.py
vendored
|
@ -1,19 +1,13 @@
|
||||||
import abc
|
import operator
|
||||||
import pickle
|
|
||||||
from typing import Any
|
from ..util.compat import py3k
|
||||||
from typing import Callable
|
|
||||||
from typing import cast
|
|
||||||
from typing import Mapping
|
|
||||||
from typing import NamedTuple
|
|
||||||
from typing import Optional
|
|
||||||
from typing import Sequence
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
|
|
||||||
class NoValue:
|
class NoValue(object):
|
||||||
"""Describe a missing cache value.
|
"""Describe a missing cache value.
|
||||||
|
|
||||||
The :data:`.NO_VALUE` constant should be used.
|
The :attr:`.NO_VALUE` module global
|
||||||
|
should be used.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -28,123 +22,51 @@ class NoValue:
|
||||||
"""
|
"""
|
||||||
return "<dogpile.cache.api.NoValue object>"
|
return "<dogpile.cache.api.NoValue object>"
|
||||||
|
|
||||||
|
if py3k:
|
||||||
|
|
||||||
def __bool__(self): # pragma NO COVERAGE
|
def __bool__(self): # pragma NO COVERAGE
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def __nonzero__(self): # pragma NO COVERAGE
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
NO_VALUE = NoValue()
|
NO_VALUE = NoValue()
|
||||||
"""Value returned from ``get()`` that describes
|
"""Value returned from ``get()`` that describes
|
||||||
a key not present."""
|
a key not present."""
|
||||||
|
|
||||||
MetaDataType = Mapping[str, Any]
|
|
||||||
|
|
||||||
|
class CachedValue(tuple):
|
||||||
KeyType = str
|
|
||||||
"""A cache key."""
|
|
||||||
|
|
||||||
ValuePayload = Any
|
|
||||||
"""An object to be placed in the cache against a key."""
|
|
||||||
|
|
||||||
|
|
||||||
KeyManglerType = Callable[[KeyType], KeyType]
|
|
||||||
Serializer = Callable[[ValuePayload], bytes]
|
|
||||||
Deserializer = Callable[[bytes], ValuePayload]
|
|
||||||
|
|
||||||
|
|
||||||
class CacheMutex(abc.ABC):
|
|
||||||
"""Describes a mutexing object with acquire and release methods.
|
|
||||||
|
|
||||||
This is an abstract base class; any object that has acquire/release
|
|
||||||
methods may be used.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
|
|
||||||
.. seealso::
|
|
||||||
|
|
||||||
:meth:`.CacheBackend.get_mutex` - the backend method that optionally
|
|
||||||
returns this locking object.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def acquire(self, wait: bool = True) -> bool:
|
|
||||||
"""Acquire the mutex.
|
|
||||||
|
|
||||||
:param wait: if True, block until available, else return True/False
|
|
||||||
immediately.
|
|
||||||
|
|
||||||
:return: True if the lock succeeded.
|
|
||||||
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def release(self) -> None:
|
|
||||||
"""Release the mutex."""
|
|
||||||
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def locked(self) -> bool:
|
|
||||||
"""Check if the mutex was acquired.
|
|
||||||
|
|
||||||
:return: true if the lock is acquired.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.2
|
|
||||||
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __subclasshook__(cls, C):
|
|
||||||
return hasattr(C, "acquire") and hasattr(C, "release")
|
|
||||||
|
|
||||||
|
|
||||||
class CachedValue(NamedTuple):
|
|
||||||
"""Represent a value stored in the cache.
|
"""Represent a value stored in the cache.
|
||||||
|
|
||||||
:class:`.CachedValue` is a two-tuple of
|
:class:`.CachedValue` is a two-tuple of
|
||||||
``(payload, metadata)``, where ``metadata``
|
``(payload, metadata)``, where ``metadata``
|
||||||
is dogpile.cache's tracking information (
|
is dogpile.cache's tracking information (
|
||||||
currently the creation time).
|
currently the creation time). The metadata
|
||||||
|
and tuple structure is pickleable, if
|
||||||
|
the backend requires serialization.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
payload: ValuePayload
|
payload = property(operator.itemgetter(0))
|
||||||
|
"""Named accessor for the payload."""
|
||||||
|
|
||||||
metadata: MetaDataType
|
metadata = property(operator.itemgetter(1))
|
||||||
|
"""Named accessor for the dogpile.cache metadata dictionary."""
|
||||||
|
|
||||||
|
def __new__(cls, payload, metadata):
|
||||||
|
return tuple.__new__(cls, (payload, metadata))
|
||||||
|
|
||||||
|
def __reduce__(self):
|
||||||
|
return CachedValue, (self.payload, self.metadata)
|
||||||
|
|
||||||
|
|
||||||
CacheReturnType = Union[CachedValue, NoValue]
|
class CacheBackend(object):
|
||||||
"""The non-serialized form of what may be returned from a backend
|
"""Base class for backend implementations."""
|
||||||
get method.
|
|
||||||
|
|
||||||
"""
|
key_mangler = None
|
||||||
|
|
||||||
SerializedReturnType = Union[bytes, NoValue]
|
|
||||||
"""the serialized form of what may be returned from a backend get method."""
|
|
||||||
|
|
||||||
BackendFormatted = Union[CacheReturnType, SerializedReturnType]
|
|
||||||
"""Describes the type returned from the :meth:`.CacheBackend.get` method."""
|
|
||||||
|
|
||||||
BackendSetType = Union[CachedValue, bytes]
|
|
||||||
"""Describes the value argument passed to the :meth:`.CacheBackend.set`
|
|
||||||
method."""
|
|
||||||
|
|
||||||
BackendArguments = Mapping[str, Any]
|
|
||||||
|
|
||||||
|
|
||||||
class CacheBackend:
|
|
||||||
"""Base class for backend implementations.
|
|
||||||
|
|
||||||
Backends which set and get Python object values should subclass this
|
|
||||||
backend. For backends in which the value that's stored is ultimately
|
|
||||||
a stream of bytes, the :class:`.BytesBackend` should be used.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
key_mangler: Optional[Callable[[KeyType], KeyType]] = None
|
|
||||||
"""Key mangling function.
|
"""Key mangling function.
|
||||||
|
|
||||||
May be None, or otherwise declared
|
May be None, or otherwise declared
|
||||||
|
@ -152,23 +74,7 @@ class CacheBackend:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
serializer: Union[None, Serializer] = None
|
def __init__(self, arguments): # pragma NO COVERAGE
|
||||||
"""Serializer function that will be used by default if not overridden
|
|
||||||
by the region.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
deserializer: Union[None, Deserializer] = None
|
|
||||||
"""deserializer function that will be used by default if not overridden
|
|
||||||
by the region.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, arguments: BackendArguments): # pragma NO COVERAGE
|
|
||||||
"""Construct a new :class:`.CacheBackend`.
|
"""Construct a new :class:`.CacheBackend`.
|
||||||
|
|
||||||
Subclasses should override this to
|
Subclasses should override this to
|
||||||
|
@ -191,10 +97,10 @@ class CacheBackend:
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def has_lock_timeout(self) -> bool:
|
def has_lock_timeout(self):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_mutex(self, key: KeyType) -> Optional[CacheMutex]:
|
def get_mutex(self, key):
|
||||||
"""Return an optional mutexing object for the given key.
|
"""Return an optional mutexing object for the given key.
|
||||||
|
|
||||||
This object need only provide an ``acquire()``
|
This object need only provide an ``acquire()``
|
||||||
|
@ -227,141 +133,48 @@ class CacheBackend:
|
||||||
"""
|
"""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get(self, key: KeyType) -> BackendFormatted: # pragma NO COVERAGE
|
def get(self, key): # pragma NO COVERAGE
|
||||||
"""Retrieve an optionally serialized value from the cache.
|
"""Retrieve a value from the cache.
|
||||||
|
|
||||||
:param key: String key that was passed to the :meth:`.CacheRegion.get`
|
The returned value should be an instance of
|
||||||
method, which will also be processed by the "key mangling" function
|
:class:`.CachedValue`, or ``NO_VALUE`` if
|
||||||
if one was present.
|
not present.
|
||||||
|
|
||||||
:return: the Python object that corresponds to
|
|
||||||
what was established via the :meth:`.CacheBackend.set` method,
|
|
||||||
or the :data:`.NO_VALUE` constant if not present.
|
|
||||||
|
|
||||||
If a serializer is in use, this method will only be called if the
|
|
||||||
:meth:`.CacheBackend.get_serialized` method is not overridden.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def get_multi(
|
def get_multi(self, keys): # pragma NO COVERAGE
|
||||||
self, keys: Sequence[KeyType]
|
"""Retrieve multiple values from the cache.
|
||||||
) -> Sequence[BackendFormatted]: # pragma NO COVERAGE
|
|
||||||
"""Retrieve multiple optionally serialized values from the cache.
|
|
||||||
|
|
||||||
:param keys: sequence of string keys that was passed to the
|
The returned value should be a list, corresponding
|
||||||
:meth:`.CacheRegion.get_multi` method, which will also be processed
|
|
||||||
by the "key mangling" function if one was present.
|
|
||||||
|
|
||||||
:return a list of values as would be returned
|
|
||||||
individually via the :meth:`.CacheBackend.get` method, corresponding
|
|
||||||
to the list of keys given.
|
to the list of keys given.
|
||||||
|
|
||||||
If a serializer is in use, this method will only be called if the
|
|
||||||
:meth:`.CacheBackend.get_serialized_multi` method is not overridden.
|
|
||||||
|
|
||||||
.. versionadded:: 0.5.0
|
.. versionadded:: 0.5.0
|
||||||
|
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def get_serialized(self, key: KeyType) -> SerializedReturnType:
|
def set(self, key, value): # pragma NO COVERAGE
|
||||||
"""Retrieve a serialized value from the cache.
|
"""Set a value in the cache.
|
||||||
|
|
||||||
:param key: String key that was passed to the :meth:`.CacheRegion.get`
|
The key will be whatever was passed
|
||||||
method, which will also be processed by the "key mangling" function
|
to the registry, processed by the
|
||||||
if one was present.
|
"key mangling" function, if any.
|
||||||
|
The value will always be an instance
|
||||||
:return: a bytes object, or :data:`.NO_VALUE`
|
of :class:`.CachedValue`.
|
||||||
constant if not present.
|
|
||||||
|
|
||||||
The default implementation of this method for :class:`.CacheBackend`
|
|
||||||
returns the value of the :meth:`.CacheBackend.get` method.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
.. seealso::
|
|
||||||
|
|
||||||
:class:`.BytesBackend`
|
|
||||||
|
|
||||||
"""
|
|
||||||
return cast(SerializedReturnType, self.get(key))
|
|
||||||
|
|
||||||
def get_serialized_multi(
|
|
||||||
self, keys: Sequence[KeyType]
|
|
||||||
) -> Sequence[SerializedReturnType]: # pragma NO COVERAGE
|
|
||||||
"""Retrieve multiple serialized values from the cache.
|
|
||||||
|
|
||||||
:param keys: sequence of string keys that was passed to the
|
|
||||||
:meth:`.CacheRegion.get_multi` method, which will also be processed
|
|
||||||
by the "key mangling" function if one was present.
|
|
||||||
|
|
||||||
:return: list of bytes objects
|
|
||||||
|
|
||||||
The default implementation of this method for :class:`.CacheBackend`
|
|
||||||
returns the value of the :meth:`.CacheBackend.get_multi` method.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
.. seealso::
|
|
||||||
|
|
||||||
:class:`.BytesBackend`
|
|
||||||
|
|
||||||
"""
|
|
||||||
return cast(Sequence[SerializedReturnType], self.get_multi(keys))
|
|
||||||
|
|
||||||
def set(
|
|
||||||
self, key: KeyType, value: BackendSetType
|
|
||||||
) -> None: # pragma NO COVERAGE
|
|
||||||
"""Set an optionally serialized value in the cache.
|
|
||||||
|
|
||||||
:param key: String key that was passed to the :meth:`.CacheRegion.set`
|
|
||||||
method, which will also be processed by the "key mangling" function
|
|
||||||
if one was present.
|
|
||||||
|
|
||||||
:param value: The optionally serialized :class:`.CachedValue` object.
|
|
||||||
May be an instance of :class:`.CachedValue` or a bytes object
|
|
||||||
depending on if a serializer is in use with the region and if the
|
|
||||||
:meth:`.CacheBackend.set_serialized` method is not overridden.
|
|
||||||
|
|
||||||
.. seealso::
|
|
||||||
|
|
||||||
:meth:`.CacheBackend.set_serialized`
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def set_serialized(
|
def set_multi(self, mapping): # pragma NO COVERAGE
|
||||||
self, key: KeyType, value: bytes
|
|
||||||
) -> None: # pragma NO COVERAGE
|
|
||||||
"""Set a serialized value in the cache.
|
|
||||||
|
|
||||||
:param key: String key that was passed to the :meth:`.CacheRegion.set`
|
|
||||||
method, which will also be processed by the "key mangling" function
|
|
||||||
if one was present.
|
|
||||||
|
|
||||||
:param value: a bytes object to be stored.
|
|
||||||
|
|
||||||
The default implementation of this method for :class:`.CacheBackend`
|
|
||||||
calls upon the :meth:`.CacheBackend.set` method.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
.. seealso::
|
|
||||||
|
|
||||||
:class:`.BytesBackend`
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.set(key, value)
|
|
||||||
|
|
||||||
def set_multi(
|
|
||||||
self, mapping: Mapping[KeyType, BackendSetType]
|
|
||||||
) -> None: # pragma NO COVERAGE
|
|
||||||
"""Set multiple values in the cache.
|
"""Set multiple values in the cache.
|
||||||
|
|
||||||
:param mapping: a dict in which the key will be whatever was passed to
|
``mapping`` is a dict in which
|
||||||
the :meth:`.CacheRegion.set_multi` method, processed by the "key
|
the key will be whatever was passed
|
||||||
mangling" function, if any.
|
to the registry, processed by the
|
||||||
|
"key mangling" function, if any.
|
||||||
|
The value will always be an instance
|
||||||
|
of :class:`.CachedValue`.
|
||||||
|
|
||||||
When implementing a new :class:`.CacheBackend` or cutomizing via
|
When implementing a new :class:`.CacheBackend` or cutomizing via
|
||||||
:class:`.ProxyBackend`, be aware that when this method is invoked by
|
:class:`.ProxyBackend`, be aware that when this method is invoked by
|
||||||
|
@ -371,52 +184,17 @@ class CacheBackend:
|
||||||
-- that will have the undesirable effect of modifying the returned
|
-- that will have the undesirable effect of modifying the returned
|
||||||
values as well.
|
values as well.
|
||||||
|
|
||||||
If a serializer is in use, this method will only be called if the
|
|
||||||
:meth:`.CacheBackend.set_serialized_multi` method is not overridden.
|
|
||||||
|
|
||||||
|
|
||||||
.. versionadded:: 0.5.0
|
.. versionadded:: 0.5.0
|
||||||
|
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def set_serialized_multi(
|
def delete(self, key): # pragma NO COVERAGE
|
||||||
self, mapping: Mapping[KeyType, bytes]
|
|
||||||
) -> None: # pragma NO COVERAGE
|
|
||||||
"""Set multiple serialized values in the cache.
|
|
||||||
|
|
||||||
:param mapping: a dict in which the key will be whatever was passed to
|
|
||||||
the :meth:`.CacheRegion.set_multi` method, processed by the "key
|
|
||||||
mangling" function, if any.
|
|
||||||
|
|
||||||
When implementing a new :class:`.CacheBackend` or cutomizing via
|
|
||||||
:class:`.ProxyBackend`, be aware that when this method is invoked by
|
|
||||||
:meth:`.Region.get_or_create_multi`, the ``mapping`` values are the
|
|
||||||
same ones returned to the upstream caller. If the subclass alters the
|
|
||||||
values in any way, it must not do so 'in-place' on the ``mapping`` dict
|
|
||||||
-- that will have the undesirable effect of modifying the returned
|
|
||||||
values as well.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
The default implementation of this method for :class:`.CacheBackend`
|
|
||||||
calls upon the :meth:`.CacheBackend.set_multi` method.
|
|
||||||
|
|
||||||
.. seealso::
|
|
||||||
|
|
||||||
:class:`.BytesBackend`
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.set_multi(mapping)
|
|
||||||
|
|
||||||
def delete(self, key: KeyType) -> None: # pragma NO COVERAGE
|
|
||||||
"""Delete a value from the cache.
|
"""Delete a value from the cache.
|
||||||
|
|
||||||
:param key: String key that was passed to the
|
The key will be whatever was passed
|
||||||
:meth:`.CacheRegion.delete`
|
to the registry, processed by the
|
||||||
method, which will also be processed by the "key mangling" function
|
"key mangling" function, if any.
|
||||||
if one was present.
|
|
||||||
|
|
||||||
The behavior here should be idempotent,
|
The behavior here should be idempotent,
|
||||||
that is, can be called any number of times
|
that is, can be called any number of times
|
||||||
|
@ -425,14 +203,12 @@ class CacheBackend:
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def delete_multi(
|
def delete_multi(self, keys): # pragma NO COVERAGE
|
||||||
self, keys: Sequence[KeyType]
|
|
||||||
) -> None: # pragma NO COVERAGE
|
|
||||||
"""Delete multiple values from the cache.
|
"""Delete multiple values from the cache.
|
||||||
|
|
||||||
:param keys: sequence of string keys that was passed to the
|
The key will be whatever was passed
|
||||||
:meth:`.CacheRegion.delete_multi` method, which will also be processed
|
to the registry, processed by the
|
||||||
by the "key mangling" function if one was present.
|
"key mangling" function, if any.
|
||||||
|
|
||||||
The behavior here should be idempotent,
|
The behavior here should be idempotent,
|
||||||
that is, can be called any number of times
|
that is, can be called any number of times
|
||||||
|
@ -443,95 +219,3 @@ class CacheBackend:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
class DefaultSerialization:
|
|
||||||
serializer: Union[None, Serializer] = staticmethod( # type: ignore
|
|
||||||
pickle.dumps
|
|
||||||
)
|
|
||||||
deserializer: Union[None, Deserializer] = staticmethod( # type: ignore
|
|
||||||
pickle.loads
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BytesBackend(DefaultSerialization, CacheBackend):
|
|
||||||
"""A cache backend that receives and returns series of bytes.
|
|
||||||
|
|
||||||
This backend only supports the "serialized" form of values; subclasses
|
|
||||||
should implement :meth:`.BytesBackend.get_serialized`,
|
|
||||||
:meth:`.BytesBackend.get_serialized_multi`,
|
|
||||||
:meth:`.BytesBackend.set_serialized`,
|
|
||||||
:meth:`.BytesBackend.set_serialized_multi`.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def get_serialized(self, key: KeyType) -> SerializedReturnType:
|
|
||||||
"""Retrieve a serialized value from the cache.
|
|
||||||
|
|
||||||
:param key: String key that was passed to the :meth:`.CacheRegion.get`
|
|
||||||
method, which will also be processed by the "key mangling" function
|
|
||||||
if one was present.
|
|
||||||
|
|
||||||
:return: a bytes object, or :data:`.NO_VALUE`
|
|
||||||
constant if not present.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def get_serialized_multi(
|
|
||||||
self, keys: Sequence[KeyType]
|
|
||||||
) -> Sequence[SerializedReturnType]: # pragma NO COVERAGE
|
|
||||||
"""Retrieve multiple serialized values from the cache.
|
|
||||||
|
|
||||||
:param keys: sequence of string keys that was passed to the
|
|
||||||
:meth:`.CacheRegion.get_multi` method, which will also be processed
|
|
||||||
by the "key mangling" function if one was present.
|
|
||||||
|
|
||||||
:return: list of bytes objects
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def set_serialized(
|
|
||||||
self, key: KeyType, value: bytes
|
|
||||||
) -> None: # pragma NO COVERAGE
|
|
||||||
"""Set a serialized value in the cache.
|
|
||||||
|
|
||||||
:param key: String key that was passed to the :meth:`.CacheRegion.set`
|
|
||||||
method, which will also be processed by the "key mangling" function
|
|
||||||
if one was present.
|
|
||||||
|
|
||||||
:param value: a bytes object to be stored.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def set_serialized_multi(
|
|
||||||
self, mapping: Mapping[KeyType, bytes]
|
|
||||||
) -> None: # pragma NO COVERAGE
|
|
||||||
"""Set multiple serialized values in the cache.
|
|
||||||
|
|
||||||
:param mapping: a dict in which the key will be whatever was passed to
|
|
||||||
the :meth:`.CacheRegion.set_multi` method, processed by the "key
|
|
||||||
mangling" function, if any.
|
|
||||||
|
|
||||||
When implementing a new :class:`.CacheBackend` or cutomizing via
|
|
||||||
:class:`.ProxyBackend`, be aware that when this method is invoked by
|
|
||||||
:meth:`.Region.get_or_create_multi`, the ``mapping`` values are the
|
|
||||||
same ones returned to the upstream caller. If the subclass alters the
|
|
||||||
values in any way, it must not do so 'in-place' on the ``mapping`` dict
|
|
||||||
-- that will have the undesirable effect of modifying the returned
|
|
||||||
values as well.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
|
||||||
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
10
libs/common/dogpile/cache/backends/__init__.py
vendored
10
libs/common/dogpile/cache/backends/__init__.py
vendored
|
@ -24,11 +24,6 @@ register_backend(
|
||||||
"dogpile.cache.backends.memcached",
|
"dogpile.cache.backends.memcached",
|
||||||
"MemcachedBackend",
|
"MemcachedBackend",
|
||||||
)
|
)
|
||||||
register_backend(
|
|
||||||
"dogpile.cache.pymemcache",
|
|
||||||
"dogpile.cache.backends.memcached",
|
|
||||||
"PyMemcacheBackend",
|
|
||||||
)
|
|
||||||
register_backend(
|
register_backend(
|
||||||
"dogpile.cache.memory", "dogpile.cache.backends.memory", "MemoryBackend"
|
"dogpile.cache.memory", "dogpile.cache.backends.memory", "MemoryBackend"
|
||||||
)
|
)
|
||||||
|
@ -40,8 +35,3 @@ register_backend(
|
||||||
register_backend(
|
register_backend(
|
||||||
"dogpile.cache.redis", "dogpile.cache.backends.redis", "RedisBackend"
|
"dogpile.cache.redis", "dogpile.cache.backends.redis", "RedisBackend"
|
||||||
)
|
)
|
||||||
register_backend(
|
|
||||||
"dogpile.cache.redis_sentinel",
|
|
||||||
"dogpile.cache.backends.redis",
|
|
||||||
"RedisSentinelBackend",
|
|
||||||
)
|
|
||||||
|
|
64
libs/common/dogpile/cache/backends/file.py
vendored
64
libs/common/dogpile/cache/backends/file.py
vendored
|
@ -9,18 +9,17 @@ Provides backends that deal with local filesystem access.
|
||||||
from __future__ import with_statement
|
from __future__ import with_statement
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
import dbm
|
|
||||||
import os
|
import os
|
||||||
import threading
|
|
||||||
|
|
||||||
from ..api import BytesBackend
|
from ..api import CacheBackend
|
||||||
from ..api import NO_VALUE
|
from ..api import NO_VALUE
|
||||||
from ... import util
|
from ... import util
|
||||||
|
from ...util import compat
|
||||||
|
|
||||||
__all__ = ["DBMBackend", "FileLock", "AbstractFileLock"]
|
__all__ = ["DBMBackend", "FileLock", "AbstractFileLock"]
|
||||||
|
|
||||||
|
|
||||||
class DBMBackend(BytesBackend):
|
class DBMBackend(CacheBackend):
|
||||||
"""A file-backend using a dbm file to store keys.
|
"""A file-backend using a dbm file to store keys.
|
||||||
|
|
||||||
Basic usage::
|
Basic usage::
|
||||||
|
@ -157,6 +156,12 @@ class DBMBackend(BytesBackend):
|
||||||
util.KeyReentrantMutex.factory,
|
util.KeyReentrantMutex.factory,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: make this configurable
|
||||||
|
if compat.py3k:
|
||||||
|
import dbm
|
||||||
|
else:
|
||||||
|
import anydbm as dbm
|
||||||
|
self.dbmmodule = dbm
|
||||||
self._init_dbm_file()
|
self._init_dbm_file()
|
||||||
|
|
||||||
def _init_lock(self, argument, suffix, basedir, basefile, wrapper=None):
|
def _init_lock(self, argument, suffix, basedir, basefile, wrapper=None):
|
||||||
|
@ -180,7 +185,7 @@ class DBMBackend(BytesBackend):
|
||||||
exists = True
|
exists = True
|
||||||
break
|
break
|
||||||
if not exists:
|
if not exists:
|
||||||
fh = dbm.open(self.filename, "c")
|
fh = self.dbmmodule.open(self.filename, "c")
|
||||||
fh.close()
|
fh.close()
|
||||||
|
|
||||||
def get_mutex(self, key):
|
def get_mutex(self, key):
|
||||||
|
@ -210,50 +215,57 @@ class DBMBackend(BytesBackend):
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _dbm_file(self, write):
|
def _dbm_file(self, write):
|
||||||
with self._use_rw_lock(write):
|
with self._use_rw_lock(write):
|
||||||
with dbm.open(self.filename, "w" if write else "r") as dbm_obj:
|
dbm = self.dbmmodule.open(self.filename, "w" if write else "r")
|
||||||
yield dbm_obj
|
yield dbm
|
||||||
|
dbm.close()
|
||||||
|
|
||||||
def get_serialized(self, key):
|
def get(self, key):
|
||||||
with self._dbm_file(False) as dbm_obj:
|
with self._dbm_file(False) as dbm:
|
||||||
if hasattr(dbm_obj, "get"):
|
if hasattr(dbm, "get"):
|
||||||
value = dbm_obj.get(key, NO_VALUE)
|
value = dbm.get(key, NO_VALUE)
|
||||||
else:
|
else:
|
||||||
# gdbm objects lack a .get method
|
# gdbm objects lack a .get method
|
||||||
try:
|
try:
|
||||||
value = dbm_obj[key]
|
value = dbm[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
value = NO_VALUE
|
value = NO_VALUE
|
||||||
|
if value is not NO_VALUE:
|
||||||
|
value = compat.pickle.loads(value)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def get_serialized_multi(self, keys):
|
def get_multi(self, keys):
|
||||||
return [self.get_serialized(key) for key in keys]
|
return [self.get(key) for key in keys]
|
||||||
|
|
||||||
def set_serialized(self, key, value):
|
def set(self, key, value):
|
||||||
with self._dbm_file(True) as dbm_obj:
|
with self._dbm_file(True) as dbm:
|
||||||
dbm_obj[key] = value
|
dbm[key] = compat.pickle.dumps(
|
||||||
|
value, compat.pickle.HIGHEST_PROTOCOL
|
||||||
|
)
|
||||||
|
|
||||||
def set_serialized_multi(self, mapping):
|
def set_multi(self, mapping):
|
||||||
with self._dbm_file(True) as dbm_obj:
|
with self._dbm_file(True) as dbm:
|
||||||
for key, value in mapping.items():
|
for key, value in mapping.items():
|
||||||
dbm_obj[key] = value
|
dbm[key] = compat.pickle.dumps(
|
||||||
|
value, compat.pickle.HIGHEST_PROTOCOL
|
||||||
|
)
|
||||||
|
|
||||||
def delete(self, key):
|
def delete(self, key):
|
||||||
with self._dbm_file(True) as dbm_obj:
|
with self._dbm_file(True) as dbm:
|
||||||
try:
|
try:
|
||||||
del dbm_obj[key]
|
del dbm[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def delete_multi(self, keys):
|
def delete_multi(self, keys):
|
||||||
with self._dbm_file(True) as dbm_obj:
|
with self._dbm_file(True) as dbm:
|
||||||
for key in keys:
|
for key in keys:
|
||||||
try:
|
try:
|
||||||
del dbm_obj[key]
|
del dbm[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class AbstractFileLock:
|
class AbstractFileLock(object):
|
||||||
"""Coordinate read/write access to a file.
|
"""Coordinate read/write access to a file.
|
||||||
|
|
||||||
typically is a file-based lock but doesn't necessarily have to be.
|
typically is a file-based lock but doesn't necessarily have to be.
|
||||||
|
@ -385,7 +397,7 @@ class FileLock(AbstractFileLock):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, filename):
|
def __init__(self, filename):
|
||||||
self._filedescriptor = threading.local()
|
self._filedescriptor = compat.threading.local()
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
|
|
||||||
@util.memoized_property
|
@util.memoized_property
|
||||||
|
|
329
libs/common/dogpile/cache/backends/memcached.py
vendored
329
libs/common/dogpile/cache/backends/memcached.py
vendored
|
@ -7,42 +7,29 @@ Provides backends for talking to `memcached <http://memcached.org>`_.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import random
|
import random
|
||||||
import threading
|
|
||||||
import time
|
import time
|
||||||
import typing
|
|
||||||
from typing import Any
|
|
||||||
from typing import Mapping
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from ..api import CacheBackend
|
from ..api import CacheBackend
|
||||||
from ..api import NO_VALUE
|
from ..api import NO_VALUE
|
||||||
from ... import util
|
from ... import util
|
||||||
|
from ...util import compat
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
|
||||||
import bmemcached
|
|
||||||
import memcache
|
|
||||||
import pylibmc
|
|
||||||
import pymemcache
|
|
||||||
else:
|
|
||||||
# delayed import
|
|
||||||
bmemcached = None # noqa F811
|
|
||||||
memcache = None # noqa F811
|
|
||||||
pylibmc = None # noqa F811
|
|
||||||
pymemcache = None # noqa F811
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
"GenericMemcachedBackend",
|
"GenericMemcachedBackend",
|
||||||
"MemcachedBackend",
|
"MemcachedBackend",
|
||||||
"PylibmcBackend",
|
"PylibmcBackend",
|
||||||
"PyMemcacheBackend",
|
|
||||||
"BMemcachedBackend",
|
"BMemcachedBackend",
|
||||||
"MemcachedLock",
|
"MemcachedLock",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class MemcachedLock(object):
|
class MemcachedLock(object):
|
||||||
"""Simple distributed lock using memcached."""
|
"""Simple distributed lock using memcached.
|
||||||
|
|
||||||
|
This is an adaptation of the lock featured at
|
||||||
|
http://amix.dk/blog/post/19386
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, client_fn, key, timeout=0):
|
def __init__(self, client_fn, key, timeout=0):
|
||||||
self.client_fn = client_fn
|
self.client_fn = client_fn
|
||||||
|
@ -58,15 +45,11 @@ class MemcachedLock(object):
|
||||||
elif not wait:
|
elif not wait:
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
sleep_time = (((i + 1) * random.random()) + 2**i) / 2.5
|
sleep_time = (((i + 1) * random.random()) + 2 ** i) / 2.5
|
||||||
time.sleep(sleep_time)
|
time.sleep(sleep_time)
|
||||||
if i < 15:
|
if i < 15:
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
def locked(self):
|
|
||||||
client = self.client_fn()
|
|
||||||
return client.get(self.key) is not None
|
|
||||||
|
|
||||||
def release(self):
|
def release(self):
|
||||||
client = self.client_fn()
|
client = self.client_fn()
|
||||||
client.delete(self.key)
|
client.delete(self.key)
|
||||||
|
@ -124,17 +107,10 @@ class GenericMemcachedBackend(CacheBackend):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
set_arguments: Mapping[str, Any] = {}
|
set_arguments = {}
|
||||||
"""Additional arguments which will be passed
|
"""Additional arguments which will be passed
|
||||||
to the :meth:`set` method."""
|
to the :meth:`set` method."""
|
||||||
|
|
||||||
# No need to override serializer, as all the memcached libraries
|
|
||||||
# handles that themselves. Still, we support customizing the
|
|
||||||
# serializer/deserializer to use better default pickle protocol
|
|
||||||
# or completely different serialization mechanism
|
|
||||||
serializer = None
|
|
||||||
deserializer = None
|
|
||||||
|
|
||||||
def __init__(self, arguments):
|
def __init__(self, arguments):
|
||||||
self._imports()
|
self._imports()
|
||||||
# using a plain threading.local here. threading.local
|
# using a plain threading.local here. threading.local
|
||||||
|
@ -162,7 +138,7 @@ class GenericMemcachedBackend(CacheBackend):
|
||||||
def _clients(self):
|
def _clients(self):
|
||||||
backend = self
|
backend = self
|
||||||
|
|
||||||
class ClientPool(threading.local):
|
class ClientPool(compat.threading.local):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.memcached = backend._create_client()
|
self.memcached = backend._create_client()
|
||||||
|
|
||||||
|
@ -197,17 +173,12 @@ class GenericMemcachedBackend(CacheBackend):
|
||||||
|
|
||||||
def get_multi(self, keys):
|
def get_multi(self, keys):
|
||||||
values = self.client.get_multi(keys)
|
values = self.client.get_multi(keys)
|
||||||
|
return [NO_VALUE if key not in values else values[key] for key in keys]
|
||||||
return [
|
|
||||||
NO_VALUE if val is None else val
|
|
||||||
for val in [values.get(key, NO_VALUE) for key in keys]
|
|
||||||
]
|
|
||||||
|
|
||||||
def set(self, key, value):
|
def set(self, key, value):
|
||||||
self.client.set(key, value, **self.set_arguments)
|
self.client.set(key, value, **self.set_arguments)
|
||||||
|
|
||||||
def set_multi(self, mapping):
|
def set_multi(self, mapping):
|
||||||
mapping = {key: value for key, value in mapping.items()}
|
|
||||||
self.client.set_multi(mapping, **self.set_arguments)
|
self.client.set_multi(mapping, **self.set_arguments)
|
||||||
|
|
||||||
def delete(self, key):
|
def delete(self, key):
|
||||||
|
@ -217,9 +188,10 @@ class GenericMemcachedBackend(CacheBackend):
|
||||||
self.client.delete_multi(keys)
|
self.client.delete_multi(keys)
|
||||||
|
|
||||||
|
|
||||||
class MemcacheArgs(GenericMemcachedBackend):
|
class MemcacheArgs(object):
|
||||||
"""Mixin which provides support for the 'time' argument to set(),
|
"""Mixin which provides support for the 'time' argument to set(),
|
||||||
'min_compress_len' to other methods.
|
'min_compress_len' to other methods.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, arguments):
|
def __init__(self, arguments):
|
||||||
|
@ -235,6 +207,9 @@ class MemcacheArgs(GenericMemcachedBackend):
|
||||||
super(MemcacheArgs, self).__init__(arguments)
|
super(MemcacheArgs, self).__init__(arguments)
|
||||||
|
|
||||||
|
|
||||||
|
pylibmc = None
|
||||||
|
|
||||||
|
|
||||||
class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend):
|
class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend):
|
||||||
"""A backend for the
|
"""A backend for the
|
||||||
`pylibmc <http://sendapatch.se/projects/pylibmc/index.html>`_
|
`pylibmc <http://sendapatch.se/projects/pylibmc/index.html>`_
|
||||||
|
@ -284,6 +259,9 @@ class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
memcache = None
|
||||||
|
|
||||||
|
|
||||||
class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend):
|
class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend):
|
||||||
"""A backend using the standard
|
"""A backend using the standard
|
||||||
`Python-memcached <http://www.tummy.com/Community/software/\
|
`Python-memcached <http://www.tummy.com/Community/software/\
|
||||||
|
@ -302,39 +280,17 @@ class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
:param dead_retry: Number of seconds memcached server is considered dead
|
|
||||||
before it is tried again. Will be passed to ``memcache.Client``
|
|
||||||
as the ``dead_retry`` parameter.
|
|
||||||
|
|
||||||
.. versionchanged:: 1.1.8 Moved the ``dead_retry`` argument which was
|
|
||||||
erroneously added to "set_parameters" to
|
|
||||||
be part of the Memcached connection arguments.
|
|
||||||
|
|
||||||
:param socket_timeout: Timeout in seconds for every call to a server.
|
|
||||||
Will be passed to ``memcache.Client`` as the ``socket_timeout``
|
|
||||||
parameter.
|
|
||||||
|
|
||||||
.. versionchanged:: 1.1.8 Moved the ``socket_timeout`` argument which
|
|
||||||
was erroneously added to "set_parameters"
|
|
||||||
to be part of the Memcached connection arguments.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, arguments):
|
|
||||||
self.dead_retry = arguments.get("dead_retry", 30)
|
|
||||||
self.socket_timeout = arguments.get("socket_timeout", 3)
|
|
||||||
super(MemcachedBackend, self).__init__(arguments)
|
|
||||||
|
|
||||||
def _imports(self):
|
def _imports(self):
|
||||||
global memcache
|
global memcache
|
||||||
import memcache # noqa
|
import memcache # noqa
|
||||||
|
|
||||||
def _create_client(self):
|
def _create_client(self):
|
||||||
return memcache.Client(
|
return memcache.Client(self.url)
|
||||||
self.url,
|
|
||||||
dead_retry=self.dead_retry,
|
|
||||||
socket_timeout=self.socket_timeout,
|
bmemcached = None
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BMemcachedBackend(GenericMemcachedBackend):
|
class BMemcachedBackend(GenericMemcachedBackend):
|
||||||
|
@ -343,11 +299,9 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
||||||
python-binary-memcached>`_
|
python-binary-memcached>`_
|
||||||
memcached client.
|
memcached client.
|
||||||
|
|
||||||
This is a pure Python memcached client which includes
|
This is a pure Python memcached client which
|
||||||
security features like SASL and SSL/TLS.
|
includes the ability to authenticate with a memcached
|
||||||
|
server using SASL.
|
||||||
SASL is a standard for adding authentication mechanisms
|
|
||||||
to protocols in a way that is protocol independent.
|
|
||||||
|
|
||||||
A typical configuration using username/password::
|
A typical configuration using username/password::
|
||||||
|
|
||||||
|
@ -363,25 +317,6 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
A typical configuration using tls_context::
|
|
||||||
|
|
||||||
import ssl
|
|
||||||
from dogpile.cache import make_region
|
|
||||||
|
|
||||||
ctx = ssl.create_default_context(cafile="/path/to/my-ca.pem")
|
|
||||||
|
|
||||||
region = make_region().configure(
|
|
||||||
'dogpile.cache.bmemcached',
|
|
||||||
expiration_time = 3600,
|
|
||||||
arguments = {
|
|
||||||
'url':["127.0.0.1"],
|
|
||||||
'tls_context':ctx,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
For advanced ways to configure TLS creating a more complex
|
|
||||||
tls_context visit https://docs.python.org/3/library/ssl.html
|
|
||||||
|
|
||||||
Arguments which can be passed to the ``arguments``
|
Arguments which can be passed to the ``arguments``
|
||||||
dictionary include:
|
dictionary include:
|
||||||
|
|
||||||
|
@ -389,17 +324,12 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
||||||
SASL authentication.
|
SASL authentication.
|
||||||
:param password: optional password, will be used for
|
:param password: optional password, will be used for
|
||||||
SASL authentication.
|
SASL authentication.
|
||||||
:param tls_context: optional TLS context, will be used for
|
|
||||||
TLS connections.
|
|
||||||
|
|
||||||
.. versionadded:: 1.0.2
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, arguments):
|
def __init__(self, arguments):
|
||||||
self.username = arguments.get("username", None)
|
self.username = arguments.get("username", None)
|
||||||
self.password = arguments.get("password", None)
|
self.password = arguments.get("password", None)
|
||||||
self.tls_context = arguments.get("tls_context", None)
|
|
||||||
super(BMemcachedBackend, self).__init__(arguments)
|
super(BMemcachedBackend, self).__init__(arguments)
|
||||||
|
|
||||||
def _imports(self):
|
def _imports(self):
|
||||||
|
@ -425,215 +355,10 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
||||||
|
|
||||||
def _create_client(self):
|
def _create_client(self):
|
||||||
return self.Client(
|
return self.Client(
|
||||||
self.url,
|
self.url, username=self.username, password=self.password
|
||||||
username=self.username,
|
|
||||||
password=self.password,
|
|
||||||
tls_context=self.tls_context,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def delete_multi(self, keys):
|
def delete_multi(self, keys):
|
||||||
"""python-binary-memcached api does not implements delete_multi"""
|
"""python-binary-memcached api does not implements delete_multi"""
|
||||||
for key in keys:
|
for key in keys:
|
||||||
self.delete(key)
|
self.delete(key)
|
||||||
|
|
||||||
|
|
||||||
class PyMemcacheBackend(GenericMemcachedBackend):
|
|
||||||
"""A backend for the
|
|
||||||
`pymemcache <https://github.com/pinterest/pymemcache>`_
|
|
||||||
memcached client.
|
|
||||||
|
|
||||||
A comprehensive, fast, pure Python memcached client
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.2
|
|
||||||
|
|
||||||
pymemcache supports the following features:
|
|
||||||
|
|
||||||
* Complete implementation of the memcached text protocol.
|
|
||||||
* Configurable timeouts for socket connect and send/recv calls.
|
|
||||||
* Access to the "noreply" flag, which can significantly increase
|
|
||||||
the speed of writes.
|
|
||||||
* Flexible, simple approach to serialization and deserialization.
|
|
||||||
* The (optional) ability to treat network and memcached errors as
|
|
||||||
cache misses.
|
|
||||||
|
|
||||||
dogpile.cache uses the ``HashClient`` from pymemcache in order to reduce
|
|
||||||
API differences when compared to other memcached client drivers.
|
|
||||||
This allows the user to provide a single server or a list of memcached
|
|
||||||
servers.
|
|
||||||
|
|
||||||
Arguments which can be passed to the ``arguments``
|
|
||||||
dictionary include:
|
|
||||||
|
|
||||||
:param tls_context: optional TLS context, will be used for
|
|
||||||
TLS connections.
|
|
||||||
|
|
||||||
A typical configuration using tls_context::
|
|
||||||
|
|
||||||
import ssl
|
|
||||||
from dogpile.cache import make_region
|
|
||||||
|
|
||||||
ctx = ssl.create_default_context(cafile="/path/to/my-ca.pem")
|
|
||||||
|
|
||||||
region = make_region().configure(
|
|
||||||
'dogpile.cache.pymemcache',
|
|
||||||
expiration_time = 3600,
|
|
||||||
arguments = {
|
|
||||||
'url':["127.0.0.1"],
|
|
||||||
'tls_context':ctx,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
.. seealso::
|
|
||||||
|
|
||||||
`<https://docs.python.org/3/library/ssl.html>`_ - additional TLS
|
|
||||||
documentation.
|
|
||||||
|
|
||||||
:param serde: optional "serde". Defaults to
|
|
||||||
``pymemcache.serde.pickle_serde``.
|
|
||||||
|
|
||||||
:param default_noreply: defaults to False. When set to True this flag
|
|
||||||
enables the pymemcache "noreply" feature. See the pymemcache
|
|
||||||
documentation for further details.
|
|
||||||
|
|
||||||
:param socket_keepalive: optional socket keepalive, will be used for
|
|
||||||
TCP keepalive configuration. Use of this parameter requires pymemcache
|
|
||||||
3.5.0 or greater. This parameter
|
|
||||||
accepts a
|
|
||||||
`pymemcache.client.base.KeepAliveOpts
|
|
||||||
<https://pymemcache.readthedocs.io/en/latest/apidoc/pymemcache.client.base.html#pymemcache.client.base.KeepaliveOpts>`_
|
|
||||||
object.
|
|
||||||
|
|
||||||
A typical configuration using ``socket_keepalive``::
|
|
||||||
|
|
||||||
from pymemcache import KeepaliveOpts
|
|
||||||
from dogpile.cache import make_region
|
|
||||||
|
|
||||||
# Using the default keepalive configuration
|
|
||||||
socket_keepalive = KeepaliveOpts()
|
|
||||||
|
|
||||||
region = make_region().configure(
|
|
||||||
'dogpile.cache.pymemcache',
|
|
||||||
expiration_time = 3600,
|
|
||||||
arguments = {
|
|
||||||
'url':["127.0.0.1"],
|
|
||||||
'socket_keepalive': socket_keepalive
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.4 - added support for ``socket_keepalive``.
|
|
||||||
|
|
||||||
:param enable_retry_client: optional flag to enable retry client
|
|
||||||
mechanisms to handle failure. Defaults to False. When set to ``True``,
|
|
||||||
the :paramref:`.PyMemcacheBackend.retry_attempts` parameter must also
|
|
||||||
be set, along with optional parameters
|
|
||||||
:paramref:`.PyMemcacheBackend.retry_delay`.
|
|
||||||
:paramref:`.PyMemcacheBackend.retry_for`,
|
|
||||||
:paramref:`.PyMemcacheBackend.do_not_retry_for`.
|
|
||||||
|
|
||||||
.. seealso::
|
|
||||||
|
|
||||||
`<https://pymemcache.readthedocs.io/en/latest/getting_started.html#using-the-built-in-retrying-mechanism>`_ -
|
|
||||||
in the pymemcache documentation
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.4
|
|
||||||
|
|
||||||
:param retry_attempts: how many times to attempt an action with
|
|
||||||
pymemcache's retrying wrapper before failing. Must be 1 or above.
|
|
||||||
Defaults to None.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.4
|
|
||||||
|
|
||||||
:param retry_delay: optional int|float, how many seconds to sleep between
|
|
||||||
each attempt. Used by the retry wrapper. Defaults to None.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.4
|
|
||||||
|
|
||||||
:param retry_for: optional None|tuple|set|list, what exceptions to
|
|
||||||
allow retries for. Will allow retries for all exceptions if None.
|
|
||||||
Example: ``(MemcacheClientError, MemcacheUnexpectedCloseError)``
|
|
||||||
Accepts any class that is a subclass of Exception. Defaults to None.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.4
|
|
||||||
|
|
||||||
:param do_not_retry_for: optional None|tuple|set|list, what
|
|
||||||
exceptions should be retried. Will not block retries for any Exception if
|
|
||||||
None. Example: ``(IOError, MemcacheIllegalInputError)``
|
|
||||||
Accepts any class that is a subclass of Exception. Defaults to None.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.4
|
|
||||||
|
|
||||||
:param hashclient_retry_attempts: Amount of times a client should be tried
|
|
||||||
before it is marked dead and removed from the pool in the HashClient's
|
|
||||||
internal mechanisms.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.5
|
|
||||||
|
|
||||||
:param hashclient_retry_timeout: Time in seconds that should pass between
|
|
||||||
retry attempts in the HashClient's internal mechanisms.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.5
|
|
||||||
|
|
||||||
:param dead_timeout: Time in seconds before attempting to add a node
|
|
||||||
back in the pool in the HashClient's internal mechanisms.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.5
|
|
||||||
|
|
||||||
""" # noqa E501
|
|
||||||
|
|
||||||
def __init__(self, arguments):
|
|
||||||
super().__init__(arguments)
|
|
||||||
|
|
||||||
self.serde = arguments.get("serde", pymemcache.serde.pickle_serde)
|
|
||||||
self.default_noreply = arguments.get("default_noreply", False)
|
|
||||||
self.tls_context = arguments.get("tls_context", None)
|
|
||||||
self.socket_keepalive = arguments.get("socket_keepalive", None)
|
|
||||||
self.enable_retry_client = arguments.get("enable_retry_client", False)
|
|
||||||
self.retry_attempts = arguments.get("retry_attempts", None)
|
|
||||||
self.retry_delay = arguments.get("retry_delay", None)
|
|
||||||
self.retry_for = arguments.get("retry_for", None)
|
|
||||||
self.do_not_retry_for = arguments.get("do_not_retry_for", None)
|
|
||||||
self.hashclient_retry_attempts = arguments.get(
|
|
||||||
"hashclient_retry_attempts", 2
|
|
||||||
)
|
|
||||||
self.hashclient_retry_timeout = arguments.get(
|
|
||||||
"hashclient_retry_timeout", 1
|
|
||||||
)
|
|
||||||
self.dead_timeout = arguments.get("hashclient_dead_timeout", 60)
|
|
||||||
if (
|
|
||||||
self.retry_delay is not None
|
|
||||||
or self.retry_attempts is not None
|
|
||||||
or self.retry_for is not None
|
|
||||||
or self.do_not_retry_for is not None
|
|
||||||
) and not self.enable_retry_client:
|
|
||||||
warnings.warn(
|
|
||||||
"enable_retry_client is not set; retry options "
|
|
||||||
"will be ignored"
|
|
||||||
)
|
|
||||||
|
|
||||||
def _imports(self):
|
|
||||||
global pymemcache
|
|
||||||
import pymemcache
|
|
||||||
|
|
||||||
def _create_client(self):
|
|
||||||
_kwargs = {
|
|
||||||
"serde": self.serde,
|
|
||||||
"default_noreply": self.default_noreply,
|
|
||||||
"tls_context": self.tls_context,
|
|
||||||
"retry_attempts": self.hashclient_retry_attempts,
|
|
||||||
"retry_timeout": self.hashclient_retry_timeout,
|
|
||||||
"dead_timeout": self.dead_timeout,
|
|
||||||
}
|
|
||||||
if self.socket_keepalive is not None:
|
|
||||||
_kwargs.update({"socket_keepalive": self.socket_keepalive})
|
|
||||||
|
|
||||||
client = pymemcache.client.hash.HashClient(self.url, **_kwargs)
|
|
||||||
if self.enable_retry_client:
|
|
||||||
return pymemcache.client.retrying.RetryingClient(
|
|
||||||
client,
|
|
||||||
attempts=self.retry_attempts,
|
|
||||||
retry_delay=self.retry_delay,
|
|
||||||
retry_for=self.retry_for,
|
|
||||||
do_not_retry_for=self.do_not_retry_for,
|
|
||||||
)
|
|
||||||
|
|
||||||
return client
|
|
||||||
|
|
27
libs/common/dogpile/cache/backends/memory.py
vendored
27
libs/common/dogpile/cache/backends/memory.py
vendored
|
@ -10,10 +10,9 @@ places the value as given into the dictionary.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
from ..api import CacheBackend
|
from ..api import CacheBackend
|
||||||
from ..api import DefaultSerialization
|
|
||||||
from ..api import NO_VALUE
|
from ..api import NO_VALUE
|
||||||
|
from ...util.compat import pickle
|
||||||
|
|
||||||
|
|
||||||
class MemoryBackend(CacheBackend):
|
class MemoryBackend(CacheBackend):
|
||||||
|
@ -50,20 +49,36 @@ class MemoryBackend(CacheBackend):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
pickle_values = False
|
||||||
|
|
||||||
def __init__(self, arguments):
|
def __init__(self, arguments):
|
||||||
self._cache = arguments.pop("cache_dict", {})
|
self._cache = arguments.pop("cache_dict", {})
|
||||||
|
|
||||||
def get(self, key):
|
def get(self, key):
|
||||||
return self._cache.get(key, NO_VALUE)
|
value = self._cache.get(key, NO_VALUE)
|
||||||
|
if value is not NO_VALUE and self.pickle_values:
|
||||||
|
value = pickle.loads(value)
|
||||||
|
return value
|
||||||
|
|
||||||
def get_multi(self, keys):
|
def get_multi(self, keys):
|
||||||
return [self._cache.get(key, NO_VALUE) for key in keys]
|
ret = [self._cache.get(key, NO_VALUE) for key in keys]
|
||||||
|
if self.pickle_values:
|
||||||
|
ret = [
|
||||||
|
pickle.loads(value) if value is not NO_VALUE else value
|
||||||
|
for value in ret
|
||||||
|
]
|
||||||
|
return ret
|
||||||
|
|
||||||
def set(self, key, value):
|
def set(self, key, value):
|
||||||
|
if self.pickle_values:
|
||||||
|
value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
||||||
self._cache[key] = value
|
self._cache[key] = value
|
||||||
|
|
||||||
def set_multi(self, mapping):
|
def set_multi(self, mapping):
|
||||||
|
pickle_values = self.pickle_values
|
||||||
for key, value in mapping.items():
|
for key, value in mapping.items():
|
||||||
|
if pickle_values:
|
||||||
|
value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
||||||
self._cache[key] = value
|
self._cache[key] = value
|
||||||
|
|
||||||
def delete(self, key):
|
def delete(self, key):
|
||||||
|
@ -74,7 +89,7 @@ class MemoryBackend(CacheBackend):
|
||||||
self._cache.pop(key, None)
|
self._cache.pop(key, None)
|
||||||
|
|
||||||
|
|
||||||
class MemoryPickleBackend(DefaultSerialization, MemoryBackend):
|
class MemoryPickleBackend(MemoryBackend):
|
||||||
"""A backend that uses a plain dictionary, but serializes objects on
|
"""A backend that uses a plain dictionary, but serializes objects on
|
||||||
:meth:`.MemoryBackend.set` and deserializes :meth:`.MemoryBackend.get`.
|
:meth:`.MemoryBackend.set` and deserializes :meth:`.MemoryBackend.get`.
|
||||||
|
|
||||||
|
@ -105,3 +120,5 @@ class MemoryPickleBackend(DefaultSerialization, MemoryBackend):
|
||||||
.. versionadded:: 0.5.3
|
.. versionadded:: 0.5.3
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
pickle_values = True
|
||||||
|
|
3
libs/common/dogpile/cache/backends/null.py
vendored
3
libs/common/dogpile/cache/backends/null.py
vendored
|
@ -24,9 +24,6 @@ class NullLock(object):
|
||||||
def release(self):
|
def release(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def locked(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class NullBackend(CacheBackend):
|
class NullBackend(CacheBackend):
|
||||||
"""A "null" backend that effectively disables all cache operations.
|
"""A "null" backend that effectively disables all cache operations.
|
||||||
|
|
243
libs/common/dogpile/cache/backends/redis.py
vendored
243
libs/common/dogpile/cache/backends/redis.py
vendored
|
@ -8,23 +8,20 @@ Provides backends for talking to `Redis <http://redis.io>`_.
|
||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import typing
|
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from ..api import BytesBackend
|
from ..api import CacheBackend
|
||||||
from ..api import NO_VALUE
|
from ..api import NO_VALUE
|
||||||
|
from ...util.compat import pickle
|
||||||
|
from ...util.compat import u
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
redis = None
|
||||||
import redis
|
|
||||||
else:
|
|
||||||
# delayed import
|
|
||||||
redis = None # noqa F811
|
|
||||||
|
|
||||||
__all__ = ("RedisBackend", "RedisSentinelBackend")
|
__all__ = ("RedisBackend",)
|
||||||
|
|
||||||
|
|
||||||
class RedisBackend(BytesBackend):
|
class RedisBackend(CacheBackend):
|
||||||
r"""A `Redis <http://redis.io/>`_ backend, using the
|
"""A `Redis <http://redis.io/>`_ backend, using the
|
||||||
`redis-py <http://pypi.python.org/pypi/redis/>`_ backend.
|
`redis-py <http://pypi.python.org/pypi/redis/>`_ backend.
|
||||||
|
|
||||||
Example configuration::
|
Example configuration::
|
||||||
|
@ -49,10 +46,14 @@ class RedisBackend(BytesBackend):
|
||||||
:param url: string. If provided, will override separate host/port/db
|
:param url: string. If provided, will override separate host/port/db
|
||||||
params. The format is that accepted by ``StrictRedis.from_url()``.
|
params. The format is that accepted by ``StrictRedis.from_url()``.
|
||||||
|
|
||||||
|
.. versionadded:: 0.4.1
|
||||||
|
|
||||||
:param host: string, default is ``localhost``.
|
:param host: string, default is ``localhost``.
|
||||||
|
|
||||||
:param password: string, default is no password.
|
:param password: string, default is no password.
|
||||||
|
|
||||||
|
.. versionadded:: 0.4.1
|
||||||
|
|
||||||
:param port: integer, default is ``6379``.
|
:param port: integer, default is ``6379``.
|
||||||
|
|
||||||
:param db: integer, default is ``0``.
|
:param db: integer, default is ``0``.
|
||||||
|
@ -70,31 +71,33 @@ class RedisBackend(BytesBackend):
|
||||||
Redis should expire it. This argument is only valid when
|
Redis should expire it. This argument is only valid when
|
||||||
``distributed_lock`` is ``True``.
|
``distributed_lock`` is ``True``.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5.0
|
||||||
|
|
||||||
:param socket_timeout: float, seconds for socket timeout.
|
:param socket_timeout: float, seconds for socket timeout.
|
||||||
Default is None (no timeout).
|
Default is None (no timeout).
|
||||||
|
|
||||||
|
.. versionadded:: 0.5.4
|
||||||
|
|
||||||
:param lock_sleep: integer, number of seconds to sleep when failed to
|
:param lock_sleep: integer, number of seconds to sleep when failed to
|
||||||
acquire a lock. This argument is only valid when
|
acquire a lock. This argument is only valid when
|
||||||
``distributed_lock`` is ``True``.
|
``distributed_lock`` is ``True``.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5.0
|
||||||
|
|
||||||
:param connection_pool: ``redis.ConnectionPool`` object. If provided,
|
:param connection_pool: ``redis.ConnectionPool`` object. If provided,
|
||||||
this object supersedes other connection arguments passed to the
|
this object supersedes other connection arguments passed to the
|
||||||
``redis.StrictRedis`` instance, including url and/or host as well as
|
``redis.StrictRedis`` instance, including url and/or host as well as
|
||||||
socket_timeout, and will be passed to ``redis.StrictRedis`` as the
|
socket_timeout, and will be passed to ``redis.StrictRedis`` as the
|
||||||
source of connectivity.
|
source of connectivity.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5.4
|
||||||
|
|
||||||
:param thread_local_lock: bool, whether a thread-local Redis lock object
|
:param thread_local_lock: bool, whether a thread-local Redis lock object
|
||||||
should be used. This is the default, but is not compatible with
|
should be used. This is the default, but is not compatible with
|
||||||
asynchronous runners, as they run in a different thread than the one
|
asynchronous runners, as they run in a different thread than the one
|
||||||
used to create the lock.
|
used to create the lock.
|
||||||
|
|
||||||
:param connection_kwargs: dict, additional keyword arguments are passed
|
.. versionadded:: 0.9.1
|
||||||
along to the
|
|
||||||
``StrictRedis.from_url()`` method or ``StrictRedis()`` constructor
|
|
||||||
directly, including parameters like ``ssl``, ``ssl_certfile``,
|
|
||||||
``charset``, etc.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.6 Added ``connection_kwargs`` parameter.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -106,12 +109,12 @@ class RedisBackend(BytesBackend):
|
||||||
self.password = arguments.pop("password", None)
|
self.password = arguments.pop("password", None)
|
||||||
self.port = arguments.pop("port", 6379)
|
self.port = arguments.pop("port", 6379)
|
||||||
self.db = arguments.pop("db", 0)
|
self.db = arguments.pop("db", 0)
|
||||||
self.distributed_lock = arguments.pop("distributed_lock", False)
|
self.distributed_lock = arguments.get("distributed_lock", False)
|
||||||
self.socket_timeout = arguments.pop("socket_timeout", None)
|
self.socket_timeout = arguments.pop("socket_timeout", None)
|
||||||
self.lock_timeout = arguments.pop("lock_timeout", None)
|
|
||||||
self.lock_sleep = arguments.pop("lock_sleep", 0.1)
|
self.lock_timeout = arguments.get("lock_timeout", None)
|
||||||
self.thread_local_lock = arguments.pop("thread_local_lock", True)
|
self.lock_sleep = arguments.get("lock_sleep", 0.1)
|
||||||
self.connection_kwargs = arguments.pop("connection_kwargs", {})
|
self.thread_local_lock = arguments.get("thread_local_lock", True)
|
||||||
|
|
||||||
if self.distributed_lock and self.thread_local_lock:
|
if self.distributed_lock and self.thread_local_lock:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
|
@ -120,8 +123,8 @@ class RedisBackend(BytesBackend):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.redis_expiration_time = arguments.pop("redis_expiration_time", 0)
|
self.redis_expiration_time = arguments.pop("redis_expiration_time", 0)
|
||||||
self.connection_pool = arguments.pop("connection_pool", None)
|
self.connection_pool = arguments.get("connection_pool", None)
|
||||||
self._create_client()
|
self.client = self._create_client()
|
||||||
|
|
||||||
def _imports(self):
|
def _imports(self):
|
||||||
# defer imports until backend is used
|
# defer imports until backend is used
|
||||||
|
@ -133,20 +136,15 @@ class RedisBackend(BytesBackend):
|
||||||
# the connection pool already has all other connection
|
# the connection pool already has all other connection
|
||||||
# options present within, so here we disregard socket_timeout
|
# options present within, so here we disregard socket_timeout
|
||||||
# and others.
|
# and others.
|
||||||
self.writer_client = redis.StrictRedis(
|
return redis.StrictRedis(connection_pool=self.connection_pool)
|
||||||
connection_pool=self.connection_pool
|
|
||||||
)
|
|
||||||
self.reader_client = self.writer_client
|
|
||||||
else:
|
|
||||||
args = {}
|
args = {}
|
||||||
args.update(self.connection_kwargs)
|
|
||||||
if self.socket_timeout:
|
if self.socket_timeout:
|
||||||
args["socket_timeout"] = self.socket_timeout
|
args["socket_timeout"] = self.socket_timeout
|
||||||
|
|
||||||
if self.url is not None:
|
if self.url is not None:
|
||||||
args.update(url=self.url)
|
args.update(url=self.url)
|
||||||
self.writer_client = redis.StrictRedis.from_url(**args)
|
return redis.StrictRedis.from_url(**args)
|
||||||
self.reader_client = self.writer_client
|
|
||||||
else:
|
else:
|
||||||
args.update(
|
args.update(
|
||||||
host=self.host,
|
host=self.host,
|
||||||
|
@ -154,186 +152,57 @@ class RedisBackend(BytesBackend):
|
||||||
port=self.port,
|
port=self.port,
|
||||||
db=self.db,
|
db=self.db,
|
||||||
)
|
)
|
||||||
self.writer_client = redis.StrictRedis(**args)
|
return redis.StrictRedis(**args)
|
||||||
self.reader_client = self.writer_client
|
|
||||||
|
|
||||||
def get_mutex(self, key):
|
def get_mutex(self, key):
|
||||||
if self.distributed_lock:
|
if self.distributed_lock:
|
||||||
return _RedisLockWrapper(
|
return self.client.lock(
|
||||||
self.writer_client.lock(
|
u("_lock{0}").format(key),
|
||||||
"_lock{0}".format(key),
|
|
||||||
timeout=self.lock_timeout,
|
timeout=self.lock_timeout,
|
||||||
sleep=self.lock_sleep,
|
sleep=self.lock_sleep,
|
||||||
thread_local=self.thread_local_lock,
|
thread_local=self.thread_local_lock,
|
||||||
)
|
)
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_serialized(self, key):
|
def get(self, key):
|
||||||
value = self.reader_client.get(key)
|
value = self.client.get(key)
|
||||||
if value is None:
|
if value is None:
|
||||||
return NO_VALUE
|
return NO_VALUE
|
||||||
return value
|
return pickle.loads(value)
|
||||||
|
|
||||||
def get_serialized_multi(self, keys):
|
def get_multi(self, keys):
|
||||||
if not keys:
|
if not keys:
|
||||||
return []
|
return []
|
||||||
values = self.reader_client.mget(keys)
|
values = self.client.mget(keys)
|
||||||
return [v if v is not None else NO_VALUE for v in values]
|
return [pickle.loads(v) if v is not None else NO_VALUE for v in values]
|
||||||
|
|
||||||
def set_serialized(self, key, value):
|
def set(self, key, value):
|
||||||
if self.redis_expiration_time:
|
if self.redis_expiration_time:
|
||||||
self.writer_client.setex(key, self.redis_expiration_time, value)
|
self.client.setex(
|
||||||
|
key,
|
||||||
|
self.redis_expiration_time,
|
||||||
|
pickle.dumps(value, pickle.HIGHEST_PROTOCOL),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.writer_client.set(key, value)
|
self.client.set(key, pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
|
||||||
|
|
||||||
|
def set_multi(self, mapping):
|
||||||
|
mapping = dict(
|
||||||
|
(k, pickle.dumps(v, pickle.HIGHEST_PROTOCOL))
|
||||||
|
for k, v in mapping.items()
|
||||||
|
)
|
||||||
|
|
||||||
def set_serialized_multi(self, mapping):
|
|
||||||
if not self.redis_expiration_time:
|
if not self.redis_expiration_time:
|
||||||
self.writer_client.mset(mapping)
|
self.client.mset(mapping)
|
||||||
else:
|
else:
|
||||||
pipe = self.writer_client.pipeline()
|
pipe = self.client.pipeline()
|
||||||
for key, value in mapping.items():
|
for key, value in mapping.items():
|
||||||
pipe.setex(key, self.redis_expiration_time, value)
|
pipe.setex(key, self.redis_expiration_time, value)
|
||||||
pipe.execute()
|
pipe.execute()
|
||||||
|
|
||||||
def delete(self, key):
|
def delete(self, key):
|
||||||
self.writer_client.delete(key)
|
self.client.delete(key)
|
||||||
|
|
||||||
def delete_multi(self, keys):
|
def delete_multi(self, keys):
|
||||||
self.writer_client.delete(*keys)
|
self.client.delete(*keys)
|
||||||
|
|
||||||
|
|
||||||
class _RedisLockWrapper:
|
|
||||||
__slots__ = ("mutex", "__weakref__")
|
|
||||||
|
|
||||||
def __init__(self, mutex: typing.Any):
|
|
||||||
self.mutex = mutex
|
|
||||||
|
|
||||||
def acquire(self, wait: bool = True) -> typing.Any:
|
|
||||||
return self.mutex.acquire(blocking=wait)
|
|
||||||
|
|
||||||
def release(self) -> typing.Any:
|
|
||||||
return self.mutex.release()
|
|
||||||
|
|
||||||
def locked(self) -> bool:
|
|
||||||
return self.mutex.locked() # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
class RedisSentinelBackend(RedisBackend):
|
|
||||||
"""A `Redis <http://redis.io/>`_ backend, using the
|
|
||||||
`redis-py <http://pypi.python.org/pypi/redis/>`_ backend.
|
|
||||||
It will use the Sentinel of a Redis cluster.
|
|
||||||
|
|
||||||
.. versionadded:: 1.0.0
|
|
||||||
|
|
||||||
Example configuration::
|
|
||||||
|
|
||||||
from dogpile.cache import make_region
|
|
||||||
|
|
||||||
region = make_region().configure(
|
|
||||||
'dogpile.cache.redis_sentinel',
|
|
||||||
arguments = {
|
|
||||||
'sentinels': [
|
|
||||||
['redis_sentinel_1', 26379],
|
|
||||||
['redis_sentinel_2', 26379]
|
|
||||||
],
|
|
||||||
'db': 0,
|
|
||||||
'redis_expiration_time': 60*60*2, # 2 hours
|
|
||||||
'distributed_lock': True,
|
|
||||||
'thread_local_lock': False
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
Arguments accepted in the arguments dictionary:
|
|
||||||
|
|
||||||
:param db: integer, default is ``0``.
|
|
||||||
|
|
||||||
:param redis_expiration_time: integer, number of seconds after setting
|
|
||||||
a value that Redis should expire it. This should be larger than dogpile's
|
|
||||||
cache expiration. By default no expiration is set.
|
|
||||||
|
|
||||||
:param distributed_lock: boolean, when True, will use a
|
|
||||||
redis-lock as the dogpile lock. Use this when multiple processes will be
|
|
||||||
talking to the same redis instance. When False, dogpile will
|
|
||||||
coordinate on a regular threading mutex, Default is True.
|
|
||||||
|
|
||||||
:param lock_timeout: integer, number of seconds after acquiring a lock that
|
|
||||||
Redis should expire it. This argument is only valid when
|
|
||||||
``distributed_lock`` is ``True``.
|
|
||||||
|
|
||||||
:param socket_timeout: float, seconds for socket timeout.
|
|
||||||
Default is None (no timeout).
|
|
||||||
|
|
||||||
:param sentinels: is a list of sentinel nodes. Each node is represented by
|
|
||||||
a pair (hostname, port).
|
|
||||||
Default is None (not in sentinel mode).
|
|
||||||
|
|
||||||
:param service_name: str, the service name.
|
|
||||||
Default is 'mymaster'.
|
|
||||||
|
|
||||||
:param sentinel_kwargs: is a dictionary of connection arguments used when
|
|
||||||
connecting to sentinel instances. Any argument that can be passed to
|
|
||||||
a normal Redis connection can be specified here.
|
|
||||||
Default is {}.
|
|
||||||
|
|
||||||
:param connection_kwargs: dict, additional keyword arguments are passed
|
|
||||||
along to the
|
|
||||||
``StrictRedis.from_url()`` method or ``StrictRedis()`` constructor
|
|
||||||
directly, including parameters like ``ssl``, ``ssl_certfile``,
|
|
||||||
``charset``, etc.
|
|
||||||
|
|
||||||
:param lock_sleep: integer, number of seconds to sleep when failed to
|
|
||||||
acquire a lock. This argument is only valid when
|
|
||||||
``distributed_lock`` is ``True``.
|
|
||||||
|
|
||||||
:param thread_local_lock: bool, whether a thread-local Redis lock object
|
|
||||||
should be used. This is the default, but is not compatible with
|
|
||||||
asynchronous runners, as they run in a different thread than the one
|
|
||||||
used to create the lock.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, arguments):
|
|
||||||
arguments = arguments.copy()
|
|
||||||
|
|
||||||
self.sentinels = arguments.pop("sentinels", None)
|
|
||||||
self.service_name = arguments.pop("service_name", "mymaster")
|
|
||||||
self.sentinel_kwargs = arguments.pop("sentinel_kwargs", {})
|
|
||||||
|
|
||||||
super().__init__(
|
|
||||||
arguments={
|
|
||||||
"distributed_lock": True,
|
|
||||||
"thread_local_lock": False,
|
|
||||||
**arguments,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
def _imports(self):
|
|
||||||
# defer imports until backend is used
|
|
||||||
global redis
|
|
||||||
import redis.sentinel # noqa
|
|
||||||
|
|
||||||
def _create_client(self):
|
|
||||||
sentinel_kwargs = {}
|
|
||||||
sentinel_kwargs.update(self.sentinel_kwargs)
|
|
||||||
sentinel_kwargs.setdefault("password", self.password)
|
|
||||||
|
|
||||||
connection_kwargs = {}
|
|
||||||
connection_kwargs.update(self.connection_kwargs)
|
|
||||||
connection_kwargs.setdefault("password", self.password)
|
|
||||||
|
|
||||||
if self.db is not None:
|
|
||||||
connection_kwargs.setdefault("db", self.db)
|
|
||||||
sentinel_kwargs.setdefault("db", self.db)
|
|
||||||
if self.socket_timeout is not None:
|
|
||||||
connection_kwargs.setdefault("socket_timeout", self.socket_timeout)
|
|
||||||
|
|
||||||
sentinel = redis.sentinel.Sentinel(
|
|
||||||
self.sentinels,
|
|
||||||
sentinel_kwargs=sentinel_kwargs,
|
|
||||||
**connection_kwargs,
|
|
||||||
)
|
|
||||||
self.writer_client = sentinel.master_for(self.service_name)
|
|
||||||
self.reader_client = sentinel.slave_for(self.service_name)
|
|
||||||
|
|
45
libs/common/dogpile/cache/proxy.py
vendored
45
libs/common/dogpile/cache/proxy.py
vendored
|
@ -10,16 +10,7 @@ base backend.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Mapping
|
|
||||||
from typing import Optional
|
|
||||||
from typing import Sequence
|
|
||||||
|
|
||||||
from .api import BackendFormatted
|
|
||||||
from .api import BackendSetType
|
|
||||||
from .api import CacheBackend
|
from .api import CacheBackend
|
||||||
from .api import CacheMutex
|
|
||||||
from .api import KeyType
|
|
||||||
from .api import SerializedReturnType
|
|
||||||
|
|
||||||
|
|
||||||
class ProxyBackend(CacheBackend):
|
class ProxyBackend(CacheBackend):
|
||||||
|
@ -64,11 +55,11 @@ class ProxyBackend(CacheBackend):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *arg, **kw):
|
def __init__(self, *args, **kwargs):
|
||||||
pass
|
self.proxied = None
|
||||||
|
|
||||||
def wrap(self, backend: CacheBackend) -> "ProxyBackend":
|
def wrap(self, backend):
|
||||||
"""Take a backend as an argument and setup the self.proxied property.
|
""" Take a backend as an argument and setup the self.proxied property.
|
||||||
Return an object that be used as a backend by a :class:`.CacheRegion`
|
Return an object that be used as a backend by a :class:`.CacheRegion`
|
||||||
object.
|
object.
|
||||||
"""
|
"""
|
||||||
|
@ -82,37 +73,23 @@ class ProxyBackend(CacheBackend):
|
||||||
# Delegate any functions that are not already overridden to
|
# Delegate any functions that are not already overridden to
|
||||||
# the proxies backend
|
# the proxies backend
|
||||||
#
|
#
|
||||||
def get(self, key: KeyType) -> BackendFormatted:
|
def get(self, key):
|
||||||
return self.proxied.get(key)
|
return self.proxied.get(key)
|
||||||
|
|
||||||
def set(self, key: KeyType, value: BackendSetType) -> None:
|
def set(self, key, value):
|
||||||
self.proxied.set(key, value)
|
self.proxied.set(key, value)
|
||||||
|
|
||||||
def delete(self, key: KeyType) -> None:
|
def delete(self, key):
|
||||||
self.proxied.delete(key)
|
self.proxied.delete(key)
|
||||||
|
|
||||||
def get_multi(self, keys: Sequence[KeyType]) -> Sequence[BackendFormatted]:
|
def get_multi(self, keys):
|
||||||
return self.proxied.get_multi(keys)
|
return self.proxied.get_multi(keys)
|
||||||
|
|
||||||
def set_multi(self, mapping: Mapping[KeyType, BackendSetType]) -> None:
|
def set_multi(self, mapping):
|
||||||
self.proxied.set_multi(mapping)
|
self.proxied.set_multi(mapping)
|
||||||
|
|
||||||
def delete_multi(self, keys: Sequence[KeyType]) -> None:
|
def delete_multi(self, keys):
|
||||||
self.proxied.delete_multi(keys)
|
self.proxied.delete_multi(keys)
|
||||||
|
|
||||||
def get_mutex(self, key: KeyType) -> Optional[CacheMutex]:
|
def get_mutex(self, key):
|
||||||
return self.proxied.get_mutex(key)
|
return self.proxied.get_mutex(key)
|
||||||
|
|
||||||
def get_serialized(self, key: KeyType) -> SerializedReturnType:
|
|
||||||
return self.proxied.get_serialized(key)
|
|
||||||
|
|
||||||
def get_serialized_multi(
|
|
||||||
self, keys: Sequence[KeyType]
|
|
||||||
) -> Sequence[SerializedReturnType]:
|
|
||||||
return self.proxied.get_serialized_multi(keys)
|
|
||||||
|
|
||||||
def set_serialized(self, key: KeyType, value: bytes) -> None:
|
|
||||||
self.proxied.set_serialized(key, value)
|
|
||||||
|
|
||||||
def set_serialized_multi(self, mapping: Mapping[KeyType, bytes]) -> None:
|
|
||||||
self.proxied.set_serialized_multi(mapping)
|
|
||||||
|
|
542
libs/common/dogpile/cache/region.py
vendored
542
libs/common/dogpile/cache/region.py
vendored
|
@ -4,35 +4,16 @@ import contextlib
|
||||||
import datetime
|
import datetime
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
from numbers import Number
|
from numbers import Number
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from typing import Any
|
|
||||||
from typing import Callable
|
|
||||||
from typing import cast
|
|
||||||
from typing import Mapping
|
|
||||||
from typing import Optional
|
|
||||||
from typing import Sequence
|
|
||||||
from typing import Tuple
|
|
||||||
from typing import Type
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
from decorator import decorate
|
from decorator import decorate
|
||||||
|
|
||||||
from . import exception
|
from . import exception
|
||||||
from .api import BackendArguments
|
|
||||||
from .api import BackendFormatted
|
|
||||||
from .api import CachedValue
|
from .api import CachedValue
|
||||||
from .api import CacheMutex
|
|
||||||
from .api import CacheReturnType
|
|
||||||
from .api import KeyType
|
|
||||||
from .api import MetaDataType
|
|
||||||
from .api import NO_VALUE
|
from .api import NO_VALUE
|
||||||
from .api import SerializedReturnType
|
|
||||||
from .api import Serializer
|
|
||||||
from .api import ValuePayload
|
|
||||||
from .backends import _backend_loader
|
from .backends import _backend_loader
|
||||||
from .backends import register_backend # noqa
|
from .backends import register_backend # noqa
|
||||||
from .proxy import ProxyBackend
|
from .proxy import ProxyBackend
|
||||||
|
@ -42,11 +23,12 @@ from .util import repr_obj
|
||||||
from .. import Lock
|
from .. import Lock
|
||||||
from .. import NeedRegenerationException
|
from .. import NeedRegenerationException
|
||||||
from ..util import coerce_string_conf
|
from ..util import coerce_string_conf
|
||||||
|
from ..util import compat
|
||||||
from ..util import memoized_property
|
from ..util import memoized_property
|
||||||
from ..util import NameRegistry
|
from ..util import NameRegistry
|
||||||
from ..util import PluginLoader
|
from ..util import PluginLoader
|
||||||
|
|
||||||
value_version = 2
|
value_version = 1
|
||||||
"""An integer placed in the :class:`.CachedValue`
|
"""An integer placed in the :class:`.CachedValue`
|
||||||
so that new versions of dogpile.cache can detect cached
|
so that new versions of dogpile.cache can detect cached
|
||||||
values from a previous, backwards-incompatible version.
|
values from a previous, backwards-incompatible version.
|
||||||
|
@ -56,20 +38,7 @@ values from a previous, backwards-incompatible version.
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
AsyncCreator = Callable[
|
class RegionInvalidationStrategy(object):
|
||||||
["CacheRegion", KeyType, Callable[[], ValuePayload], CacheMutex], None
|
|
||||||
]
|
|
||||||
|
|
||||||
ExpirationTimeCallable = Callable[[], float]
|
|
||||||
|
|
||||||
ToStr = Callable[[Any], str]
|
|
||||||
|
|
||||||
FunctionKeyGenerator = Callable[..., Callable[..., KeyType]]
|
|
||||||
|
|
||||||
FunctionMultiKeyGenerator = Callable[..., Callable[..., Sequence[KeyType]]]
|
|
||||||
|
|
||||||
|
|
||||||
class RegionInvalidationStrategy:
|
|
||||||
"""Region invalidation strategy interface
|
"""Region invalidation strategy interface
|
||||||
|
|
||||||
Implement this interface and pass implementation instance
|
Implement this interface and pass implementation instance
|
||||||
|
@ -141,7 +110,7 @@ class RegionInvalidationStrategy:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def invalidate(self, hard: bool = True) -> None:
|
def invalidate(self, hard=True):
|
||||||
"""Region invalidation.
|
"""Region invalidation.
|
||||||
|
|
||||||
:class:`.CacheRegion` propagated call.
|
:class:`.CacheRegion` propagated call.
|
||||||
|
@ -153,7 +122,7 @@ class RegionInvalidationStrategy:
|
||||||
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def is_hard_invalidated(self, timestamp: float) -> bool:
|
def is_hard_invalidated(self, timestamp):
|
||||||
"""Check timestamp to determine if it was hard invalidated.
|
"""Check timestamp to determine if it was hard invalidated.
|
||||||
|
|
||||||
:return: Boolean. True if ``timestamp`` is older than
|
:return: Boolean. True if ``timestamp`` is older than
|
||||||
|
@ -164,7 +133,7 @@ class RegionInvalidationStrategy:
|
||||||
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def is_soft_invalidated(self, timestamp: float) -> bool:
|
def is_soft_invalidated(self, timestamp):
|
||||||
"""Check timestamp to determine if it was soft invalidated.
|
"""Check timestamp to determine if it was soft invalidated.
|
||||||
|
|
||||||
:return: Boolean. True if ``timestamp`` is older than
|
:return: Boolean. True if ``timestamp`` is older than
|
||||||
|
@ -175,7 +144,7 @@ class RegionInvalidationStrategy:
|
||||||
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def is_invalidated(self, timestamp: float) -> bool:
|
def is_invalidated(self, timestamp):
|
||||||
"""Check timestamp to determine if it was invalidated.
|
"""Check timestamp to determine if it was invalidated.
|
||||||
|
|
||||||
:return: Boolean. True if ``timestamp`` is older than
|
:return: Boolean. True if ``timestamp`` is older than
|
||||||
|
@ -185,7 +154,7 @@ class RegionInvalidationStrategy:
|
||||||
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def was_soft_invalidated(self) -> bool:
|
def was_soft_invalidated(self):
|
||||||
"""Indicate the region was invalidated in soft mode.
|
"""Indicate the region was invalidated in soft mode.
|
||||||
|
|
||||||
:return: Boolean. True if region was invalidated in soft mode.
|
:return: Boolean. True if region was invalidated in soft mode.
|
||||||
|
@ -194,7 +163,7 @@ class RegionInvalidationStrategy:
|
||||||
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def was_hard_invalidated(self) -> bool:
|
def was_hard_invalidated(self):
|
||||||
"""Indicate the region was invalidated in hard mode.
|
"""Indicate the region was invalidated in hard mode.
|
||||||
|
|
||||||
:return: Boolean. True if region was invalidated in hard mode.
|
:return: Boolean. True if region was invalidated in hard mode.
|
||||||
|
@ -209,27 +178,27 @@ class DefaultInvalidationStrategy(RegionInvalidationStrategy):
|
||||||
self._is_hard_invalidated = None
|
self._is_hard_invalidated = None
|
||||||
self._invalidated = None
|
self._invalidated = None
|
||||||
|
|
||||||
def invalidate(self, hard: bool = True) -> None:
|
def invalidate(self, hard=True):
|
||||||
self._is_hard_invalidated = bool(hard)
|
self._is_hard_invalidated = bool(hard)
|
||||||
self._invalidated = time.time()
|
self._invalidated = time.time()
|
||||||
|
|
||||||
def is_invalidated(self, timestamp: float) -> bool:
|
def is_invalidated(self, timestamp):
|
||||||
return self._invalidated is not None and timestamp < self._invalidated
|
return self._invalidated is not None and timestamp < self._invalidated
|
||||||
|
|
||||||
def was_hard_invalidated(self) -> bool:
|
def was_hard_invalidated(self):
|
||||||
return self._is_hard_invalidated is True
|
return self._is_hard_invalidated is True
|
||||||
|
|
||||||
def is_hard_invalidated(self, timestamp: float) -> bool:
|
def is_hard_invalidated(self, timestamp):
|
||||||
return self.was_hard_invalidated() and self.is_invalidated(timestamp)
|
return self.was_hard_invalidated() and self.is_invalidated(timestamp)
|
||||||
|
|
||||||
def was_soft_invalidated(self) -> bool:
|
def was_soft_invalidated(self):
|
||||||
return self._is_hard_invalidated is False
|
return self._is_hard_invalidated is False
|
||||||
|
|
||||||
def is_soft_invalidated(self, timestamp: float) -> bool:
|
def is_soft_invalidated(self, timestamp):
|
||||||
return self.was_soft_invalidated() and self.is_invalidated(timestamp)
|
return self.was_soft_invalidated() and self.is_invalidated(timestamp)
|
||||||
|
|
||||||
|
|
||||||
class CacheRegion:
|
class CacheRegion(object):
|
||||||
r"""A front end to a particular cache backend.
|
r"""A front end to a particular cache backend.
|
||||||
|
|
||||||
:param name: Optional, a string name for the region.
|
:param name: Optional, a string name for the region.
|
||||||
|
@ -315,21 +284,6 @@ class CacheRegion:
|
||||||
to convert non-string or Unicode keys to bytestrings, which is
|
to convert non-string or Unicode keys to bytestrings, which is
|
||||||
needed when using a backend such as bsddb or dbm under Python 2.x
|
needed when using a backend such as bsddb or dbm under Python 2.x
|
||||||
in conjunction with Unicode keys.
|
in conjunction with Unicode keys.
|
||||||
|
|
||||||
:param serializer: function which will be applied to all values before
|
|
||||||
passing to the backend. Defaults to ``None``, in which case the
|
|
||||||
serializer recommended by the backend will be used. Typical
|
|
||||||
serializers include ``pickle.dumps`` and ``json.dumps``.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.0
|
|
||||||
|
|
||||||
:param deserializer: function which will be applied to all values returned
|
|
||||||
by the backend. Defaults to ``None``, in which case the
|
|
||||||
deserializer recommended by the backend will be used. Typical
|
|
||||||
deserializers include ``pickle.dumps`` and ``json.dumps``.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.0
|
|
||||||
|
|
||||||
:param async_creation_runner: A callable that, when specified,
|
:param async_creation_runner: A callable that, when specified,
|
||||||
will be passed to and called by dogpile.lock when
|
will be passed to and called by dogpile.lock when
|
||||||
there is a stale value present in the cache. It will be passed the
|
there is a stale value present in the cache. It will be passed the
|
||||||
|
@ -385,37 +339,31 @@ class CacheRegion:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: Optional[str] = None,
|
name=None,
|
||||||
function_key_generator: FunctionKeyGenerator = function_key_generator,
|
function_key_generator=function_key_generator,
|
||||||
function_multi_key_generator: FunctionMultiKeyGenerator = function_multi_key_generator, # noqa E501
|
function_multi_key_generator=function_multi_key_generator,
|
||||||
key_mangler: Optional[Callable[[KeyType], KeyType]] = None,
|
key_mangler=None,
|
||||||
serializer: Optional[Callable[[ValuePayload], bytes]] = None,
|
async_creation_runner=None,
|
||||||
deserializer: Optional[Callable[[bytes], ValuePayload]] = None,
|
|
||||||
async_creation_runner: Optional[AsyncCreator] = None,
|
|
||||||
):
|
):
|
||||||
"""Construct a new :class:`.CacheRegion`."""
|
"""Construct a new :class:`.CacheRegion`."""
|
||||||
self.name = name
|
self.name = name
|
||||||
self.function_key_generator = function_key_generator
|
self.function_key_generator = function_key_generator
|
||||||
self.function_multi_key_generator = function_multi_key_generator
|
self.function_multi_key_generator = function_multi_key_generator
|
||||||
self.key_mangler = self._user_defined_key_mangler = key_mangler
|
self.key_mangler = self._user_defined_key_mangler = key_mangler
|
||||||
self.serializer = self._user_defined_serializer = serializer
|
|
||||||
self.deserializer = self._user_defined_deserializer = deserializer
|
|
||||||
self.async_creation_runner = async_creation_runner
|
self.async_creation_runner = async_creation_runner
|
||||||
self.region_invalidator: RegionInvalidationStrategy = (
|
self.region_invalidator = DefaultInvalidationStrategy()
|
||||||
DefaultInvalidationStrategy()
|
|
||||||
)
|
|
||||||
|
|
||||||
def configure(
|
def configure(
|
||||||
self,
|
self,
|
||||||
backend: str,
|
backend,
|
||||||
expiration_time: Optional[Union[float, datetime.timedelta]] = None,
|
expiration_time=None,
|
||||||
arguments: Optional[BackendArguments] = None,
|
arguments=None,
|
||||||
_config_argument_dict: Optional[Mapping[str, Any]] = None,
|
_config_argument_dict=None,
|
||||||
_config_prefix: Optional[str] = None,
|
_config_prefix=None,
|
||||||
wrap: Sequence[Union[ProxyBackend, Type[ProxyBackend]]] = (),
|
wrap=None,
|
||||||
replace_existing_backend: bool = False,
|
replace_existing_backend=False,
|
||||||
region_invalidator: Optional[RegionInvalidationStrategy] = None,
|
region_invalidator=None,
|
||||||
) -> "CacheRegion":
|
):
|
||||||
"""Configure a :class:`.CacheRegion`.
|
"""Configure a :class:`.CacheRegion`.
|
||||||
|
|
||||||
The :class:`.CacheRegion` itself
|
The :class:`.CacheRegion` itself
|
||||||
|
@ -490,12 +438,12 @@ class CacheRegion:
|
||||||
else:
|
else:
|
||||||
self.backend = backend_cls(arguments or {})
|
self.backend = backend_cls(arguments or {})
|
||||||
|
|
||||||
self.expiration_time: Union[float, None]
|
|
||||||
|
|
||||||
if not expiration_time or isinstance(expiration_time, Number):
|
if not expiration_time or isinstance(expiration_time, Number):
|
||||||
self.expiration_time = cast(Union[None, float], expiration_time)
|
self.expiration_time = expiration_time
|
||||||
elif isinstance(expiration_time, datetime.timedelta):
|
elif isinstance(expiration_time, datetime.timedelta):
|
||||||
self.expiration_time = int(expiration_time.total_seconds())
|
self.expiration_time = int(
|
||||||
|
compat.timedelta_total_seconds(expiration_time)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise exception.ValidationError(
|
raise exception.ValidationError(
|
||||||
"expiration_time is not a number or timedelta."
|
"expiration_time is not a number or timedelta."
|
||||||
|
@ -504,12 +452,6 @@ class CacheRegion:
|
||||||
if not self._user_defined_key_mangler:
|
if not self._user_defined_key_mangler:
|
||||||
self.key_mangler = self.backend.key_mangler
|
self.key_mangler = self.backend.key_mangler
|
||||||
|
|
||||||
if not self._user_defined_serializer:
|
|
||||||
self.serializer = self.backend.serializer
|
|
||||||
|
|
||||||
if not self._user_defined_deserializer:
|
|
||||||
self.deserializer = self.backend.deserializer
|
|
||||||
|
|
||||||
self._lock_registry = NameRegistry(self._create_mutex)
|
self._lock_registry = NameRegistry(self._create_mutex)
|
||||||
|
|
||||||
if getattr(wrap, "__iter__", False):
|
if getattr(wrap, "__iter__", False):
|
||||||
|
@ -521,28 +463,26 @@ class CacheRegion:
|
||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def wrap(self, proxy: Union[ProxyBackend, Type[ProxyBackend]]) -> None:
|
def wrap(self, proxy):
|
||||||
"""Takes a ProxyBackend instance or class and wraps the
|
""" Takes a ProxyBackend instance or class and wraps the
|
||||||
attached backend."""
|
attached backend. """
|
||||||
|
|
||||||
# if we were passed a type rather than an instance then
|
# if we were passed a type rather than an instance then
|
||||||
# initialize it.
|
# initialize it.
|
||||||
if isinstance(proxy, type):
|
if type(proxy) == type:
|
||||||
proxy_instance = proxy()
|
proxy = proxy()
|
||||||
else:
|
|
||||||
proxy_instance = proxy
|
|
||||||
|
|
||||||
if not isinstance(proxy_instance, ProxyBackend):
|
if not issubclass(type(proxy), ProxyBackend):
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
"%r is not a valid ProxyBackend" % (proxy_instance,)
|
"Type %s is not a valid ProxyBackend" % type(proxy)
|
||||||
)
|
)
|
||||||
|
|
||||||
self.backend = proxy_instance.wrap(self.backend)
|
self.backend = proxy.wrap(self.backend)
|
||||||
|
|
||||||
def _mutex(self, key):
|
def _mutex(self, key):
|
||||||
return self._lock_registry.get(key)
|
return self._lock_registry.get(key)
|
||||||
|
|
||||||
class _LockWrapper(CacheMutex):
|
class _LockWrapper(object):
|
||||||
"""weakref-capable wrapper for threading.Lock"""
|
"""weakref-capable wrapper for threading.Lock"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -554,9 +494,6 @@ class CacheRegion:
|
||||||
def release(self):
|
def release(self):
|
||||||
self.lock.release()
|
self.lock.release()
|
||||||
|
|
||||||
def locked(self):
|
|
||||||
return self.lock.locked()
|
|
||||||
|
|
||||||
def _create_mutex(self, key):
|
def _create_mutex(self, key):
|
||||||
mutex = self.backend.get_mutex(key)
|
mutex = self.backend.get_mutex(key)
|
||||||
if mutex is not None:
|
if mutex is not None:
|
||||||
|
@ -762,7 +699,7 @@ class CacheRegion:
|
||||||
|
|
||||||
if self.key_mangler:
|
if self.key_mangler:
|
||||||
key = self.key_mangler(key)
|
key = self.key_mangler(key)
|
||||||
value = self._get_from_backend(key)
|
value = self.backend.get(key)
|
||||||
value = self._unexpired_value_fn(expiration_time, ignore_expiration)(
|
value = self._unexpired_value_fn(expiration_time, ignore_expiration)(
|
||||||
value
|
value
|
||||||
)
|
)
|
||||||
|
@ -830,10 +767,10 @@ class CacheRegion:
|
||||||
if not keys:
|
if not keys:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if self.key_mangler is not None:
|
if self.key_mangler:
|
||||||
keys = [self.key_mangler(key) for key in keys]
|
keys = list(map(lambda key: self.key_mangler(key), keys))
|
||||||
|
|
||||||
backend_values = self._get_multi_from_backend(keys)
|
backend_values = self.backend.get_multi(keys)
|
||||||
|
|
||||||
_unexpired_value_fn = self._unexpired_value_fn(
|
_unexpired_value_fn = self._unexpired_value_fn(
|
||||||
expiration_time, ignore_expiration
|
expiration_time, ignore_expiration
|
||||||
|
@ -868,26 +805,14 @@ class CacheRegion:
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def key_is_locked(self, key: KeyType) -> bool:
|
|
||||||
"""Return True if a particular cache key is currently being generated
|
|
||||||
within the dogpile lock.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.2
|
|
||||||
|
|
||||||
"""
|
|
||||||
mutex = self._mutex(key)
|
|
||||||
locked: bool = mutex.locked()
|
|
||||||
return locked
|
|
||||||
|
|
||||||
def get_or_create(
|
def get_or_create(
|
||||||
self,
|
self,
|
||||||
key: KeyType,
|
key,
|
||||||
creator: Callable[..., ValuePayload],
|
creator,
|
||||||
expiration_time: Optional[float] = None,
|
expiration_time=None,
|
||||||
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
|
should_cache_fn=None,
|
||||||
creator_args: Optional[Tuple[Any, Mapping[str, Any]]] = None,
|
creator_args=None,
|
||||||
) -> ValuePayload:
|
):
|
||||||
|
|
||||||
"""Return a cached value based on the given key.
|
"""Return a cached value based on the given key.
|
||||||
|
|
||||||
If the value does not exist or is considered to be expired
|
If the value does not exist or is considered to be expired
|
||||||
|
@ -974,17 +899,12 @@ class CacheRegion:
|
||||||
key = self.key_mangler(key)
|
key = self.key_mangler(key)
|
||||||
|
|
||||||
def get_value():
|
def get_value():
|
||||||
value = self._get_from_backend(key)
|
value = self.backend.get(key)
|
||||||
if self._is_cache_miss(value, orig_key):
|
if self._is_cache_miss(value, orig_key):
|
||||||
raise NeedRegenerationException()
|
raise NeedRegenerationException()
|
||||||
|
|
||||||
ct = cast(CachedValue, value).metadata["ct"]
|
ct = value.metadata["ct"]
|
||||||
if self.region_invalidator.is_soft_invalidated(ct):
|
if self.region_invalidator.is_soft_invalidated(ct):
|
||||||
if expiration_time is None:
|
|
||||||
raise exception.DogpileCacheException(
|
|
||||||
"Non-None expiration time required "
|
|
||||||
"for soft invalidation"
|
|
||||||
)
|
|
||||||
ct = time.time() - expiration_time - 0.0001
|
ct = time.time() - expiration_time - 0.0001
|
||||||
|
|
||||||
return value.payload, ct
|
return value.payload, ct
|
||||||
|
@ -999,42 +919,37 @@ class CacheRegion:
|
||||||
created_value = creator()
|
created_value = creator()
|
||||||
value = self._value(created_value)
|
value = self._value(created_value)
|
||||||
|
|
||||||
if (
|
|
||||||
expiration_time is None
|
|
||||||
and self.region_invalidator.was_soft_invalidated()
|
|
||||||
):
|
|
||||||
raise exception.DogpileCacheException(
|
|
||||||
"Non-None expiration time required "
|
|
||||||
"for soft invalidation"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not should_cache_fn or should_cache_fn(created_value):
|
if not should_cache_fn or should_cache_fn(created_value):
|
||||||
self._set_cached_value_to_backend(key, value)
|
self.backend.set(key, value)
|
||||||
|
|
||||||
return value.payload, value.metadata["ct"]
|
return value.payload, value.metadata["ct"]
|
||||||
|
|
||||||
if expiration_time is None:
|
if expiration_time is None:
|
||||||
expiration_time = self.expiration_time
|
expiration_time = self.expiration_time
|
||||||
|
|
||||||
|
if (
|
||||||
|
expiration_time is None
|
||||||
|
and self.region_invalidator.was_soft_invalidated()
|
||||||
|
):
|
||||||
|
raise exception.DogpileCacheException(
|
||||||
|
"Non-None expiration time required " "for soft invalidation"
|
||||||
|
)
|
||||||
|
|
||||||
if expiration_time == -1:
|
if expiration_time == -1:
|
||||||
expiration_time = None
|
expiration_time = None
|
||||||
|
|
||||||
async_creator: Optional[Callable[[CacheMutex], AsyncCreator]]
|
|
||||||
if self.async_creation_runner:
|
if self.async_creation_runner:
|
||||||
acr = self.async_creation_runner
|
|
||||||
|
|
||||||
def async_creator(mutex):
|
def async_creator(mutex):
|
||||||
if creator_args:
|
if creator_args:
|
||||||
|
|
||||||
ca = creator_args
|
|
||||||
|
|
||||||
@wraps(creator)
|
@wraps(creator)
|
||||||
def go():
|
def go():
|
||||||
return creator(*ca[0], **ca[1])
|
return creator(*creator_args[0], **creator_args[1])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
go = creator
|
go = creator
|
||||||
return acr(self, orig_key, go, mutex)
|
return self.async_creation_runner(self, orig_key, go, mutex)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
async_creator = None
|
async_creator = None
|
||||||
|
@ -1049,12 +964,8 @@ class CacheRegion:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def get_or_create_multi(
|
def get_or_create_multi(
|
||||||
self,
|
self, keys, creator, expiration_time=None, should_cache_fn=None
|
||||||
keys: Sequence[KeyType],
|
):
|
||||||
creator: Callable[[], ValuePayload],
|
|
||||||
expiration_time: Optional[float] = None,
|
|
||||||
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
|
|
||||||
) -> Sequence[ValuePayload]:
|
|
||||||
"""Return a sequence of cached values based on a sequence of keys.
|
"""Return a sequence of cached values based on a sequence of keys.
|
||||||
|
|
||||||
The behavior for generation of values based on keys corresponds
|
The behavior for generation of values based on keys corresponds
|
||||||
|
@ -1109,29 +1020,34 @@ class CacheRegion:
|
||||||
# _has_value() will return False.
|
# _has_value() will return False.
|
||||||
return value.payload, 0
|
return value.payload, 0
|
||||||
else:
|
else:
|
||||||
ct = cast(CachedValue, value).metadata["ct"]
|
ct = value.metadata["ct"]
|
||||||
if self.region_invalidator.is_soft_invalidated(ct):
|
if self.region_invalidator.is_soft_invalidated(ct):
|
||||||
if expiration_time is None:
|
|
||||||
raise exception.DogpileCacheException(
|
|
||||||
"Non-None expiration time required "
|
|
||||||
"for soft invalidation"
|
|
||||||
)
|
|
||||||
ct = time.time() - expiration_time - 0.0001
|
ct = time.time() - expiration_time - 0.0001
|
||||||
|
|
||||||
return value.payload, ct
|
return value.payload, ct
|
||||||
|
|
||||||
def gen_value() -> ValuePayload:
|
def gen_value():
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def async_creator(mutexes, key, mutex):
|
def async_creator(key, mutex):
|
||||||
mutexes[key] = mutex
|
mutexes[key] = mutex
|
||||||
|
|
||||||
if expiration_time is None:
|
if expiration_time is None:
|
||||||
expiration_time = self.expiration_time
|
expiration_time = self.expiration_time
|
||||||
|
|
||||||
|
if (
|
||||||
|
expiration_time is None
|
||||||
|
and self.region_invalidator.was_soft_invalidated()
|
||||||
|
):
|
||||||
|
raise exception.DogpileCacheException(
|
||||||
|
"Non-None expiration time required " "for soft invalidation"
|
||||||
|
)
|
||||||
|
|
||||||
if expiration_time == -1:
|
if expiration_time == -1:
|
||||||
expiration_time = None
|
expiration_time = None
|
||||||
|
|
||||||
|
mutexes = {}
|
||||||
|
|
||||||
sorted_unique_keys = sorted(set(keys))
|
sorted_unique_keys = sorted(set(keys))
|
||||||
|
|
||||||
if self.key_mangler:
|
if self.key_mangler:
|
||||||
|
@ -1141,11 +1057,7 @@ class CacheRegion:
|
||||||
|
|
||||||
orig_to_mangled = dict(zip(sorted_unique_keys, mangled_keys))
|
orig_to_mangled = dict(zip(sorted_unique_keys, mangled_keys))
|
||||||
|
|
||||||
values = dict(
|
values = dict(zip(mangled_keys, self.backend.get_multi(mangled_keys)))
|
||||||
zip(mangled_keys, self._get_multi_from_backend(mangled_keys))
|
|
||||||
)
|
|
||||||
|
|
||||||
mutexes: Mapping[KeyType, Any] = {}
|
|
||||||
|
|
||||||
for orig_key, mangled_key in orig_to_mangled.items():
|
for orig_key, mangled_key in orig_to_mangled.items():
|
||||||
with Lock(
|
with Lock(
|
||||||
|
@ -1153,9 +1065,7 @@ class CacheRegion:
|
||||||
gen_value,
|
gen_value,
|
||||||
lambda: get_value(mangled_key),
|
lambda: get_value(mangled_key),
|
||||||
expiration_time,
|
expiration_time,
|
||||||
async_creator=lambda mutex: async_creator(
|
async_creator=lambda mutex: async_creator(orig_key, mutex),
|
||||||
mutexes, orig_key, mutex
|
|
||||||
),
|
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
|
@ -1167,194 +1077,59 @@ class CacheRegion:
|
||||||
with self._log_time(keys_to_get):
|
with self._log_time(keys_to_get):
|
||||||
new_values = creator(*keys_to_get)
|
new_values = creator(*keys_to_get)
|
||||||
|
|
||||||
values_w_created = {
|
values_w_created = dict(
|
||||||
orig_to_mangled[k]: self._value(v)
|
(orig_to_mangled[k], self._value(v))
|
||||||
for k, v in zip(keys_to_get, new_values)
|
for k, v in zip(keys_to_get, new_values)
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
expiration_time is None
|
|
||||||
and self.region_invalidator.was_soft_invalidated()
|
|
||||||
):
|
|
||||||
raise exception.DogpileCacheException(
|
|
||||||
"Non-None expiration time required "
|
|
||||||
"for soft invalidation"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if not should_cache_fn:
|
if not should_cache_fn:
|
||||||
self._set_multi_cached_value_to_backend(values_w_created)
|
self.backend.set_multi(values_w_created)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self._set_multi_cached_value_to_backend(
|
values_to_cache = dict(
|
||||||
{
|
(k, v)
|
||||||
k: v
|
|
||||||
for k, v in values_w_created.items()
|
for k, v in values_w_created.items()
|
||||||
if should_cache_fn(v.payload)
|
if should_cache_fn(v[0])
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if values_to_cache:
|
||||||
|
self.backend.set_multi(values_to_cache)
|
||||||
|
|
||||||
values.update(values_w_created)
|
values.update(values_w_created)
|
||||||
return [values[orig_to_mangled[k]].payload for k in keys]
|
return [values[orig_to_mangled[k]].payload for k in keys]
|
||||||
finally:
|
finally:
|
||||||
for mutex in mutexes.values():
|
for mutex in mutexes.values():
|
||||||
mutex.release()
|
mutex.release()
|
||||||
|
|
||||||
def _value(
|
def _value(self, value):
|
||||||
self, value: Any, metadata: Optional[MetaDataType] = None
|
|
||||||
) -> CachedValue:
|
|
||||||
"""Return a :class:`.CachedValue` given a value."""
|
"""Return a :class:`.CachedValue` given a value."""
|
||||||
|
return CachedValue(value, {"ct": time.time(), "v": value_version})
|
||||||
|
|
||||||
if metadata is None:
|
def set(self, key, value):
|
||||||
metadata = self._gen_metadata()
|
|
||||||
return CachedValue(value, metadata)
|
|
||||||
|
|
||||||
def _parse_serialized_from_backend(
|
|
||||||
self, value: SerializedReturnType
|
|
||||||
) -> CacheReturnType:
|
|
||||||
if value in (None, NO_VALUE):
|
|
||||||
return NO_VALUE
|
|
||||||
|
|
||||||
assert self.deserializer
|
|
||||||
byte_value = cast(bytes, value)
|
|
||||||
|
|
||||||
bytes_metadata, _, bytes_payload = byte_value.partition(b"|")
|
|
||||||
metadata = json.loads(bytes_metadata)
|
|
||||||
payload = self.deserializer(bytes_payload)
|
|
||||||
return CachedValue(payload, metadata)
|
|
||||||
|
|
||||||
def _serialize_cached_value_elements(
|
|
||||||
self, payload: ValuePayload, metadata: MetaDataType
|
|
||||||
) -> bytes:
|
|
||||||
serializer = cast(Serializer, self.serializer)
|
|
||||||
|
|
||||||
return b"%b|%b" % (
|
|
||||||
json.dumps(metadata).encode("ascii"),
|
|
||||||
serializer(payload),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _serialized_payload(
|
|
||||||
self, payload: ValuePayload, metadata: Optional[MetaDataType] = None
|
|
||||||
) -> BackendFormatted:
|
|
||||||
"""Return a backend formatted representation of a value.
|
|
||||||
|
|
||||||
If a serializer is in use then this will return a string representation
|
|
||||||
with the value formatted by the serializer.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if metadata is None:
|
|
||||||
metadata = self._gen_metadata()
|
|
||||||
|
|
||||||
return self._serialize_cached_value_elements(payload, metadata)
|
|
||||||
|
|
||||||
def _serialized_cached_value(self, value: CachedValue) -> BackendFormatted:
|
|
||||||
"""Return a backend formatted representation of a :class:`.CachedValue`.
|
|
||||||
|
|
||||||
If a serializer is in use then this will return a string representation
|
|
||||||
with the value formatted by the serializer.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
assert self.serializer
|
|
||||||
return self._serialize_cached_value_elements(
|
|
||||||
value.payload, value.metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_from_backend(self, key: KeyType) -> CacheReturnType:
|
|
||||||
if self.deserializer:
|
|
||||||
return self._parse_serialized_from_backend(
|
|
||||||
self.backend.get_serialized(key)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return cast(CacheReturnType, self.backend.get(key))
|
|
||||||
|
|
||||||
def _get_multi_from_backend(
|
|
||||||
self, keys: Sequence[KeyType]
|
|
||||||
) -> Sequence[CacheReturnType]:
|
|
||||||
if self.deserializer:
|
|
||||||
return [
|
|
||||||
self._parse_serialized_from_backend(v)
|
|
||||||
for v in self.backend.get_serialized_multi(keys)
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
return cast(
|
|
||||||
Sequence[CacheReturnType], self.backend.get_multi(keys)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _set_cached_value_to_backend(
|
|
||||||
self, key: KeyType, value: CachedValue
|
|
||||||
) -> None:
|
|
||||||
if self.serializer:
|
|
||||||
self.backend.set_serialized(
|
|
||||||
key, self._serialized_cached_value(value)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.backend.set(key, value)
|
|
||||||
|
|
||||||
def _set_multi_cached_value_to_backend(
|
|
||||||
self, mapping: Mapping[KeyType, CachedValue]
|
|
||||||
) -> None:
|
|
||||||
if not mapping:
|
|
||||||
return
|
|
||||||
|
|
||||||
if self.serializer:
|
|
||||||
self.backend.set_serialized_multi(
|
|
||||||
{
|
|
||||||
k: self._serialized_cached_value(v)
|
|
||||||
for k, v in mapping.items()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.backend.set_multi(mapping)
|
|
||||||
|
|
||||||
def _gen_metadata(self) -> MetaDataType:
|
|
||||||
return {"ct": time.time(), "v": value_version}
|
|
||||||
|
|
||||||
def set(self, key: KeyType, value: ValuePayload) -> None:
|
|
||||||
"""Place a new value in the cache under the given key."""
|
"""Place a new value in the cache under the given key."""
|
||||||
|
|
||||||
if self.key_mangler:
|
if self.key_mangler:
|
||||||
key = self.key_mangler(key)
|
key = self.key_mangler(key)
|
||||||
|
|
||||||
if self.serializer:
|
|
||||||
self.backend.set_serialized(key, self._serialized_payload(value))
|
|
||||||
else:
|
|
||||||
self.backend.set(key, self._value(value))
|
self.backend.set(key, self._value(value))
|
||||||
|
|
||||||
def set_multi(self, mapping: Mapping[KeyType, ValuePayload]) -> None:
|
def set_multi(self, mapping):
|
||||||
"""Place new values in the cache under the given keys."""
|
"""Place new values in the cache under the given keys.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5.0
|
||||||
|
|
||||||
|
"""
|
||||||
if not mapping:
|
if not mapping:
|
||||||
return
|
return
|
||||||
|
|
||||||
metadata = self._gen_metadata()
|
|
||||||
|
|
||||||
if self.serializer:
|
|
||||||
if self.key_mangler:
|
if self.key_mangler:
|
||||||
mapping = {
|
mapping = dict(
|
||||||
self.key_mangler(k): self._serialized_payload(
|
(self.key_mangler(k), self._value(v))
|
||||||
v, metadata=metadata
|
for k, v in mapping.items()
|
||||||
)
|
)
|
||||||
for k, v in mapping.items()
|
|
||||||
}
|
|
||||||
else:
|
else:
|
||||||
mapping = {
|
mapping = dict((k, self._value(v)) for k, v in mapping.items())
|
||||||
k: self._serialized_payload(v, metadata=metadata)
|
|
||||||
for k, v in mapping.items()
|
|
||||||
}
|
|
||||||
self.backend.set_serialized_multi(mapping)
|
|
||||||
else:
|
|
||||||
if self.key_mangler:
|
|
||||||
mapping = {
|
|
||||||
self.key_mangler(k): self._value(v, metadata=metadata)
|
|
||||||
for k, v in mapping.items()
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
mapping = {
|
|
||||||
k: self._value(v, metadata=metadata)
|
|
||||||
for k, v in mapping.items()
|
|
||||||
}
|
|
||||||
self.backend.set_multi(mapping)
|
self.backend.set_multi(mapping)
|
||||||
|
|
||||||
def delete(self, key: KeyType) -> None:
|
def delete(self, key):
|
||||||
"""Remove a value from the cache.
|
"""Remove a value from the cache.
|
||||||
|
|
||||||
This operation is idempotent (can be called multiple times, or on a
|
This operation is idempotent (can be called multiple times, or on a
|
||||||
|
@ -1366,7 +1141,7 @@ class CacheRegion:
|
||||||
|
|
||||||
self.backend.delete(key)
|
self.backend.delete(key)
|
||||||
|
|
||||||
def delete_multi(self, keys: Sequence[KeyType]) -> None:
|
def delete_multi(self, keys):
|
||||||
"""Remove multiple values from the cache.
|
"""Remove multiple values from the cache.
|
||||||
|
|
||||||
This operation is idempotent (can be called multiple times, or on a
|
This operation is idempotent (can be called multiple times, or on a
|
||||||
|
@ -1377,19 +1152,18 @@ class CacheRegion:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self.key_mangler:
|
if self.key_mangler:
|
||||||
km = self.key_mangler
|
keys = list(map(lambda key: self.key_mangler(key), keys))
|
||||||
keys = [km(key) for key in keys]
|
|
||||||
|
|
||||||
self.backend.delete_multi(keys)
|
self.backend.delete_multi(keys)
|
||||||
|
|
||||||
def cache_on_arguments(
|
def cache_on_arguments(
|
||||||
self,
|
self,
|
||||||
namespace: Optional[str] = None,
|
namespace=None,
|
||||||
expiration_time: Union[float, ExpirationTimeCallable, None] = None,
|
expiration_time=None,
|
||||||
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
|
should_cache_fn=None,
|
||||||
to_str: Callable[[Any], str] = str,
|
to_str=compat.string_type,
|
||||||
function_key_generator: Optional[FunctionKeyGenerator] = None,
|
function_key_generator=None,
|
||||||
) -> Callable[[Callable[..., ValuePayload]], Callable[..., ValuePayload]]:
|
):
|
||||||
"""A function decorator that will cache the return
|
"""A function decorator that will cache the return
|
||||||
value of the function using a key derived from the
|
value of the function using a key derived from the
|
||||||
function itself and its arguments.
|
function itself and its arguments.
|
||||||
|
@ -1482,7 +1256,7 @@ class CacheRegion:
|
||||||
(with caveats) for use with instance or class methods.
|
(with caveats) for use with instance or class methods.
|
||||||
Given the example::
|
Given the example::
|
||||||
|
|
||||||
class MyClass:
|
class MyClass(object):
|
||||||
@region.cache_on_arguments(namespace="foo")
|
@region.cache_on_arguments(namespace="foo")
|
||||||
def one(self, a, b):
|
def one(self, a, b):
|
||||||
return a + b
|
return a + b
|
||||||
|
@ -1496,12 +1270,12 @@ class CacheRegion:
|
||||||
name within the same module, as can occur when decorating
|
name within the same module, as can occur when decorating
|
||||||
instance or class methods as below::
|
instance or class methods as below::
|
||||||
|
|
||||||
class MyClass:
|
class MyClass(object):
|
||||||
@region.cache_on_arguments(namespace='MC')
|
@region.cache_on_arguments(namespace='MC')
|
||||||
def somemethod(self, x, y):
|
def somemethod(self, x, y):
|
||||||
""
|
""
|
||||||
|
|
||||||
class MyOtherClass:
|
class MyOtherClass(object):
|
||||||
@region.cache_on_arguments(namespace='MOC')
|
@region.cache_on_arguments(namespace='MOC')
|
||||||
def somemethod(self, x, y):
|
def somemethod(self, x, y):
|
||||||
""
|
""
|
||||||
|
@ -1539,8 +1313,14 @@ class CacheRegion:
|
||||||
end of the day, week or time period" and "cache until a certain date
|
end of the day, week or time period" and "cache until a certain date
|
||||||
or time passes".
|
or time passes".
|
||||||
|
|
||||||
|
.. versionchanged:: 0.5.0
|
||||||
|
``expiration_time`` may be passed as a callable to
|
||||||
|
:meth:`.CacheRegion.cache_on_arguments`.
|
||||||
|
|
||||||
:param should_cache_fn: passed to :meth:`.CacheRegion.get_or_create`.
|
:param should_cache_fn: passed to :meth:`.CacheRegion.get_or_create`.
|
||||||
|
|
||||||
|
.. versionadded:: 0.4.3
|
||||||
|
|
||||||
:param to_str: callable, will be called on each function argument
|
:param to_str: callable, will be called on each function argument
|
||||||
in order to convert to a string. Defaults to ``str()``. If the
|
in order to convert to a string. Defaults to ``str()``. If the
|
||||||
function accepts non-ascii unicode arguments on Python 2.x, the
|
function accepts non-ascii unicode arguments on Python 2.x, the
|
||||||
|
@ -1548,10 +1328,14 @@ class CacheRegion:
|
||||||
produce unicode cache keys which may require key mangling before
|
produce unicode cache keys which may require key mangling before
|
||||||
reaching the cache.
|
reaching the cache.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5.0
|
||||||
|
|
||||||
:param function_key_generator: a function that will produce a
|
:param function_key_generator: a function that will produce a
|
||||||
"cache key". This function will supersede the one configured on the
|
"cache key". This function will supersede the one configured on the
|
||||||
:class:`.CacheRegion` itself.
|
:class:`.CacheRegion` itself.
|
||||||
|
|
||||||
|
.. versionadded:: 0.5.5
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
|
|
||||||
:meth:`.CacheRegion.cache_multi_on_arguments`
|
:meth:`.CacheRegion.cache_multi_on_arguments`
|
||||||
|
@ -1559,34 +1343,30 @@ class CacheRegion:
|
||||||
:meth:`.CacheRegion.get_or_create`
|
:meth:`.CacheRegion.get_or_create`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
expiration_time_is_callable = callable(expiration_time)
|
expiration_time_is_callable = compat.callable(expiration_time)
|
||||||
|
|
||||||
if function_key_generator is None:
|
if function_key_generator is None:
|
||||||
_function_key_generator = self.function_key_generator
|
function_key_generator = self.function_key_generator
|
||||||
else:
|
|
||||||
_function_key_generator = function_key_generator
|
|
||||||
|
|
||||||
def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
|
def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
|
||||||
key = key_generator(*arg, **kw)
|
key = key_generator(*arg, **kw)
|
||||||
|
|
||||||
timeout: Optional[float] = (
|
timeout = (
|
||||||
cast(ExpirationTimeCallable, expiration_time)()
|
expiration_time()
|
||||||
if expiration_time_is_callable
|
if expiration_time_is_callable
|
||||||
else cast(Optional[float], expiration_time)
|
else expiration_time
|
||||||
)
|
)
|
||||||
return self.get_or_create(
|
return self.get_or_create(
|
||||||
key, user_func, timeout, should_cache_fn, (arg, kw)
|
key, user_func, timeout, should_cache_fn, (arg, kw)
|
||||||
)
|
)
|
||||||
|
|
||||||
def cache_decorator(user_func):
|
def cache_decorator(user_func):
|
||||||
if to_str is cast(Callable[[Any], str], str):
|
if to_str is compat.string_type:
|
||||||
# backwards compatible
|
# backwards compatible
|
||||||
key_generator = _function_key_generator(
|
key_generator = function_key_generator(namespace, user_func)
|
||||||
namespace, user_func
|
|
||||||
) # type: ignore
|
|
||||||
else:
|
else:
|
||||||
key_generator = _function_key_generator(
|
key_generator = function_key_generator(
|
||||||
namespace, user_func, to_str
|
namespace, user_func, to_str=to_str
|
||||||
)
|
)
|
||||||
|
|
||||||
def refresh(*arg, **kw):
|
def refresh(*arg, **kw):
|
||||||
|
@ -1626,20 +1406,13 @@ class CacheRegion:
|
||||||
|
|
||||||
def cache_multi_on_arguments(
|
def cache_multi_on_arguments(
|
||||||
self,
|
self,
|
||||||
namespace: Optional[str] = None,
|
namespace=None,
|
||||||
expiration_time: Union[float, ExpirationTimeCallable, None] = None,
|
expiration_time=None,
|
||||||
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
|
should_cache_fn=None,
|
||||||
asdict: bool = False,
|
asdict=False,
|
||||||
to_str: ToStr = str,
|
to_str=compat.string_type,
|
||||||
function_multi_key_generator: Optional[
|
function_multi_key_generator=None,
|
||||||
FunctionMultiKeyGenerator
|
):
|
||||||
] = None,
|
|
||||||
) -> Callable[
|
|
||||||
[Callable[..., Sequence[ValuePayload]]],
|
|
||||||
Callable[
|
|
||||||
..., Union[Sequence[ValuePayload], Mapping[KeyType, ValuePayload]]
|
|
||||||
],
|
|
||||||
]:
|
|
||||||
"""A function decorator that will cache multiple return
|
"""A function decorator that will cache multiple return
|
||||||
values from the function using a sequence of keys derived from the
|
values from the function using a sequence of keys derived from the
|
||||||
function itself and the arguments passed to it.
|
function itself and the arguments passed to it.
|
||||||
|
@ -1756,19 +1529,12 @@ class CacheRegion:
|
||||||
:meth:`.CacheRegion.get_or_create_multi`
|
:meth:`.CacheRegion.get_or_create_multi`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
expiration_time_is_callable = callable(expiration_time)
|
expiration_time_is_callable = compat.callable(expiration_time)
|
||||||
|
|
||||||
if function_multi_key_generator is None:
|
if function_multi_key_generator is None:
|
||||||
_function_multi_key_generator = self.function_multi_key_generator
|
function_multi_key_generator = self.function_multi_key_generator
|
||||||
else:
|
|
||||||
_function_multi_key_generator = function_multi_key_generator
|
|
||||||
|
|
||||||
def get_or_create_for_user_func(
|
def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
|
||||||
key_generator: Callable[..., Sequence[KeyType]],
|
|
||||||
user_func: Callable[..., Sequence[ValuePayload]],
|
|
||||||
*arg: Any,
|
|
||||||
**kw: Any,
|
|
||||||
) -> Union[Sequence[ValuePayload], Mapping[KeyType, ValuePayload]]:
|
|
||||||
cache_keys = arg
|
cache_keys = arg
|
||||||
keys = key_generator(*arg, **kw)
|
keys = key_generator(*arg, **kw)
|
||||||
key_lookup = dict(zip(keys, cache_keys))
|
key_lookup = dict(zip(keys, cache_keys))
|
||||||
|
@ -1777,16 +1543,12 @@ class CacheRegion:
|
||||||
def creator(*keys_to_create):
|
def creator(*keys_to_create):
|
||||||
return user_func(*[key_lookup[k] for k in keys_to_create])
|
return user_func(*[key_lookup[k] for k in keys_to_create])
|
||||||
|
|
||||||
timeout: Optional[float] = (
|
timeout = (
|
||||||
cast(ExpirationTimeCallable, expiration_time)()
|
expiration_time()
|
||||||
if expiration_time_is_callable
|
if expiration_time_is_callable
|
||||||
else cast(Optional[float], expiration_time)
|
else expiration_time
|
||||||
)
|
)
|
||||||
|
|
||||||
result: Union[
|
|
||||||
Sequence[ValuePayload], Mapping[KeyType, ValuePayload]
|
|
||||||
]
|
|
||||||
|
|
||||||
if asdict:
|
if asdict:
|
||||||
|
|
||||||
def dict_create(*keys):
|
def dict_create(*keys):
|
||||||
|
@ -1819,7 +1581,7 @@ class CacheRegion:
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def cache_decorator(user_func):
|
def cache_decorator(user_func):
|
||||||
key_generator = _function_multi_key_generator(
|
key_generator = function_multi_key_generator(
|
||||||
namespace, user_func, to_str=to_str
|
namespace, user_func, to_str=to_str
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1865,7 +1627,7 @@ class CacheRegion:
|
||||||
return cache_decorator
|
return cache_decorator
|
||||||
|
|
||||||
|
|
||||||
def make_region(*arg: Any, **kw: Any) -> CacheRegion:
|
def make_region(*arg, **kw):
|
||||||
"""Instantiate a new :class:`.CacheRegion`.
|
"""Instantiate a new :class:`.CacheRegion`.
|
||||||
|
|
||||||
Currently, :func:`.make_region` is a passthrough
|
Currently, :func:`.make_region` is a passthrough
|
||||||
|
|
10
libs/common/dogpile/cache/util.py
vendored
10
libs/common/dogpile/cache/util.py
vendored
|
@ -4,7 +4,7 @@ from ..util import compat
|
||||||
from ..util import langhelpers
|
from ..util import langhelpers
|
||||||
|
|
||||||
|
|
||||||
def function_key_generator(namespace, fn, to_str=str):
|
def function_key_generator(namespace, fn, to_str=compat.string_type):
|
||||||
"""Return a function that generates a string
|
"""Return a function that generates a string
|
||||||
key, based on a given function as well as
|
key, based on a given function as well as
|
||||||
arguments to the returned function itself.
|
arguments to the returned function itself.
|
||||||
|
@ -45,7 +45,7 @@ def function_key_generator(namespace, fn, to_str=str):
|
||||||
return generate_key
|
return generate_key
|
||||||
|
|
||||||
|
|
||||||
def function_multi_key_generator(namespace, fn, to_str=str):
|
def function_multi_key_generator(namespace, fn, to_str=compat.string_type):
|
||||||
|
|
||||||
if namespace is None:
|
if namespace is None:
|
||||||
namespace = "%s:%s" % (fn.__module__, fn.__name__)
|
namespace = "%s:%s" % (fn.__module__, fn.__name__)
|
||||||
|
@ -68,7 +68,7 @@ def function_multi_key_generator(namespace, fn, to_str=str):
|
||||||
return generate_keys
|
return generate_keys
|
||||||
|
|
||||||
|
|
||||||
def kwarg_function_key_generator(namespace, fn, to_str=str):
|
def kwarg_function_key_generator(namespace, fn, to_str=compat.string_type):
|
||||||
"""Return a function that generates a string
|
"""Return a function that generates a string
|
||||||
key, based on a given function as well as
|
key, based on a given function as well as
|
||||||
arguments to the returned function itself.
|
arguments to the returned function itself.
|
||||||
|
@ -131,7 +131,7 @@ def kwarg_function_key_generator(namespace, fn, to_str=str):
|
||||||
def sha1_mangle_key(key):
|
def sha1_mangle_key(key):
|
||||||
"""a SHA1 key mangler."""
|
"""a SHA1 key mangler."""
|
||||||
|
|
||||||
if isinstance(key, str):
|
if isinstance(key, compat.text_type):
|
||||||
key = key.encode("utf-8")
|
key = key.encode("utf-8")
|
||||||
|
|
||||||
return sha1(key).hexdigest()
|
return sha1(key).hexdigest()
|
||||||
|
@ -162,7 +162,7 @@ PluginLoader = langhelpers.PluginLoader
|
||||||
to_list = langhelpers.to_list
|
to_list = langhelpers.to_list
|
||||||
|
|
||||||
|
|
||||||
class repr_obj:
|
class repr_obj(object):
|
||||||
|
|
||||||
__slots__ = ("value", "max_chars")
|
__slots__ = ("value", "max_chars")
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ class NeedRegenerationException(Exception):
|
||||||
NOT_REGENERATED = object()
|
NOT_REGENERATED = object()
|
||||||
|
|
||||||
|
|
||||||
class Lock:
|
class Lock(object):
|
||||||
"""Dogpile lock class.
|
"""Dogpile lock class.
|
||||||
|
|
||||||
Provides an interface around an arbitrary mutex
|
Provides an interface around an arbitrary mutex
|
||||||
|
|
|
@ -1,6 +1,18 @@
|
||||||
import collections
|
import collections
|
||||||
import inspect
|
import inspect
|
||||||
|
import sys
|
||||||
|
|
||||||
|
py2k = sys.version_info < (3, 0)
|
||||||
|
py3k = sys.version_info >= (3, 0)
|
||||||
|
py32 = sys.version_info >= (3, 2)
|
||||||
|
py27 = sys.version_info >= (2, 7)
|
||||||
|
jython = sys.platform.startswith("java")
|
||||||
|
win32 = sys.platform.startswith("win")
|
||||||
|
|
||||||
|
try:
|
||||||
|
import threading
|
||||||
|
except ImportError:
|
||||||
|
import dummy_threading as threading # noqa
|
||||||
|
|
||||||
FullArgSpec = collections.namedtuple(
|
FullArgSpec = collections.namedtuple(
|
||||||
"FullArgSpec",
|
"FullArgSpec",
|
||||||
|
@ -21,17 +33,7 @@ ArgSpec = collections.namedtuple(
|
||||||
|
|
||||||
|
|
||||||
def inspect_getfullargspec(func):
|
def inspect_getfullargspec(func):
|
||||||
"""Fully vendored version of getfullargspec from Python 3.3.
|
"""Fully vendored version of getfullargspec from Python 3.3."""
|
||||||
|
|
||||||
This version is more performant than the one which appeared in
|
|
||||||
later Python 3 versions.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# if a Signature is already present, as is the case with newer
|
|
||||||
# "decorator" package, defer back to built in
|
|
||||||
if hasattr(func, "__signature__"):
|
|
||||||
return inspect.getfullargspec(func)
|
|
||||||
|
|
||||||
if inspect.ismethod(func):
|
if inspect.ismethod(func):
|
||||||
func = func.__func__
|
func = func.__func__
|
||||||
|
@ -44,7 +46,7 @@ def inspect_getfullargspec(func):
|
||||||
|
|
||||||
nargs = co.co_argcount
|
nargs = co.co_argcount
|
||||||
names = co.co_varnames
|
names = co.co_varnames
|
||||||
nkwargs = co.co_kwonlyargcount
|
nkwargs = co.co_kwonlyargcount if py3k else 0
|
||||||
args = list(names[:nargs])
|
args = list(names[:nargs])
|
||||||
kwonlyargs = list(names[nargs : nargs + nkwargs])
|
kwonlyargs = list(names[nargs : nargs + nkwargs])
|
||||||
|
|
||||||
|
@ -63,10 +65,77 @@ def inspect_getfullargspec(func):
|
||||||
varkw,
|
varkw,
|
||||||
func.__defaults__,
|
func.__defaults__,
|
||||||
kwonlyargs,
|
kwonlyargs,
|
||||||
func.__kwdefaults__,
|
func.__kwdefaults__ if py3k else None,
|
||||||
func.__annotations__,
|
func.__annotations__ if py3k else {},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def inspect_getargspec(func):
|
def inspect_getargspec(func):
|
||||||
return ArgSpec(*inspect_getfullargspec(func)[0:4])
|
return ArgSpec(*inspect_getfullargspec(func)[0:4])
|
||||||
|
|
||||||
|
|
||||||
|
if py3k: # pragma: no cover
|
||||||
|
string_types = (str,)
|
||||||
|
text_type = str
|
||||||
|
string_type = str
|
||||||
|
|
||||||
|
if py32:
|
||||||
|
callable = callable # noqa
|
||||||
|
else:
|
||||||
|
|
||||||
|
def callable(fn): # noqa
|
||||||
|
return hasattr(fn, "__call__")
|
||||||
|
|
||||||
|
def u(s):
|
||||||
|
return s
|
||||||
|
|
||||||
|
def ue(s):
|
||||||
|
return s
|
||||||
|
|
||||||
|
import configparser
|
||||||
|
import io
|
||||||
|
import _thread as thread
|
||||||
|
else:
|
||||||
|
# Using noqa bellow due to tox -e pep8 who use
|
||||||
|
# python3.7 as the default interpreter
|
||||||
|
string_types = (basestring,) # noqa
|
||||||
|
text_type = unicode # noqa
|
||||||
|
string_type = str
|
||||||
|
|
||||||
|
def u(s):
|
||||||
|
return unicode(s, "utf-8") # noqa
|
||||||
|
|
||||||
|
def ue(s):
|
||||||
|
return unicode(s, "unicode_escape") # noqa
|
||||||
|
|
||||||
|
import ConfigParser as configparser # noqa
|
||||||
|
import StringIO as io # noqa
|
||||||
|
|
||||||
|
callable = callable # noqa
|
||||||
|
import thread # noqa
|
||||||
|
|
||||||
|
|
||||||
|
if py3k or jython:
|
||||||
|
import pickle
|
||||||
|
else:
|
||||||
|
import cPickle as pickle # noqa
|
||||||
|
|
||||||
|
if py3k:
|
||||||
|
|
||||||
|
def read_config_file(config, fileobj):
|
||||||
|
return config.read_file(fileobj)
|
||||||
|
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def read_config_file(config, fileobj):
|
||||||
|
return config.readfp(fileobj)
|
||||||
|
|
||||||
|
|
||||||
|
def timedelta_total_seconds(td):
|
||||||
|
if py27:
|
||||||
|
return td.total_seconds()
|
||||||
|
else:
|
||||||
|
return (
|
||||||
|
td.microseconds + (td.seconds + td.days * 24 * 3600) * 1e6
|
||||||
|
) / 1e6
|
||||||
|
|
|
@ -1,17 +1,13 @@
|
||||||
import abc
|
|
||||||
import collections
|
import collections
|
||||||
import re
|
import re
|
||||||
import threading
|
|
||||||
from typing import MutableMapping
|
|
||||||
from typing import MutableSet
|
|
||||||
|
|
||||||
import stevedore
|
from . import compat
|
||||||
|
|
||||||
|
|
||||||
def coerce_string_conf(d):
|
def coerce_string_conf(d):
|
||||||
result = {}
|
result = {}
|
||||||
for k, v in d.items():
|
for k, v in d.items():
|
||||||
if not isinstance(v, str):
|
if not isinstance(v, compat.string_types):
|
||||||
result[k] = v
|
result[k] = v
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -29,26 +25,21 @@ def coerce_string_conf(d):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class PluginLoader:
|
class PluginLoader(object):
|
||||||
def __init__(self, group):
|
def __init__(self, group):
|
||||||
self.group = group
|
self.group = group
|
||||||
self.impls = {} # loaded plugins
|
self.impls = {}
|
||||||
self._mgr = None # lazily defined stevedore manager
|
|
||||||
self._unloaded = {} # plugins registered but not loaded
|
|
||||||
|
|
||||||
def load(self, name):
|
def load(self, name):
|
||||||
if name in self._unloaded:
|
|
||||||
self.impls[name] = self._unloaded[name]()
|
|
||||||
return self.impls[name]
|
|
||||||
if name in self.impls:
|
if name in self.impls:
|
||||||
return self.impls[name]
|
return self.impls[name]()
|
||||||
else: # pragma NO COVERAGE
|
else: # pragma NO COVERAGE
|
||||||
if self._mgr is None:
|
import pkg_resources
|
||||||
self._mgr = stevedore.ExtensionManager(self.group)
|
|
||||||
try:
|
for impl in pkg_resources.iter_entry_points(self.group, name):
|
||||||
self.impls[name] = self._mgr[name].plugin
|
self.impls[name] = impl.load
|
||||||
return self.impls[name]
|
return impl.load()
|
||||||
except KeyError:
|
else:
|
||||||
raise self.NotFound(
|
raise self.NotFound(
|
||||||
"Can't load plugin %s %s" % (self.group, name)
|
"Can't load plugin %s %s" % (self.group, name)
|
||||||
)
|
)
|
||||||
|
@ -58,13 +49,13 @@ class PluginLoader:
|
||||||
mod = __import__(modulepath, fromlist=[objname])
|
mod = __import__(modulepath, fromlist=[objname])
|
||||||
return getattr(mod, objname)
|
return getattr(mod, objname)
|
||||||
|
|
||||||
self._unloaded[name] = load
|
self.impls[name] = load
|
||||||
|
|
||||||
class NotFound(Exception):
|
class NotFound(Exception):
|
||||||
"""The specified plugin could not be found."""
|
"""The specified plugin could not be found."""
|
||||||
|
|
||||||
|
|
||||||
class memoized_property:
|
class memoized_property(object):
|
||||||
"""A read-only @property that is only evaluated once."""
|
"""A read-only @property that is only evaluated once."""
|
||||||
|
|
||||||
def __init__(self, fget, doc=None):
|
def __init__(self, fget, doc=None):
|
||||||
|
@ -89,23 +80,8 @@ def to_list(x, default=None):
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
class Mutex(abc.ABC):
|
class KeyReentrantMutex(object):
|
||||||
@abc.abstractmethod
|
def __init__(self, key, mutex, keys):
|
||||||
def acquire(self, wait: bool = True) -> bool:
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def release(self) -> None:
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
|
|
||||||
class KeyReentrantMutex:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
key: str,
|
|
||||||
mutex: Mutex,
|
|
||||||
keys: MutableMapping[int, MutableSet[str]],
|
|
||||||
):
|
|
||||||
self.key = key
|
self.key = key
|
||||||
self.mutex = mutex
|
self.mutex = mutex
|
||||||
self.keys = keys
|
self.keys = keys
|
||||||
|
@ -115,9 +91,7 @@ class KeyReentrantMutex:
|
||||||
# this collection holds zero or one
|
# this collection holds zero or one
|
||||||
# thread idents as the key; a set of
|
# thread idents as the key; a set of
|
||||||
# keynames held as the value.
|
# keynames held as the value.
|
||||||
keystore: MutableMapping[
|
keystore = collections.defaultdict(set)
|
||||||
int, MutableSet[str]
|
|
||||||
] = collections.defaultdict(set)
|
|
||||||
|
|
||||||
def fac(key):
|
def fac(key):
|
||||||
return KeyReentrantMutex(key, mutex, keystore)
|
return KeyReentrantMutex(key, mutex, keystore)
|
||||||
|
@ -125,7 +99,7 @@ class KeyReentrantMutex:
|
||||||
return fac
|
return fac
|
||||||
|
|
||||||
def acquire(self, wait=True):
|
def acquire(self, wait=True):
|
||||||
current_thread = threading.get_ident()
|
current_thread = compat.threading.current_thread().ident
|
||||||
keys = self.keys.get(current_thread)
|
keys = self.keys.get(current_thread)
|
||||||
if keys is not None and self.key not in keys:
|
if keys is not None and self.key not in keys:
|
||||||
# current lockholder, new key. add it in
|
# current lockholder, new key. add it in
|
||||||
|
@ -139,7 +113,7 @@ class KeyReentrantMutex:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def release(self):
|
def release(self):
|
||||||
current_thread = threading.get_ident()
|
current_thread = compat.threading.current_thread().ident
|
||||||
keys = self.keys.get(current_thread)
|
keys = self.keys.get(current_thread)
|
||||||
assert keys is not None, "this thread didn't do the acquire"
|
assert keys is not None, "this thread didn't do the acquire"
|
||||||
assert self.key in keys, "No acquire held for key '%s'" % self.key
|
assert self.key in keys, "No acquire held for key '%s'" % self.key
|
||||||
|
@ -149,10 +123,3 @@ class KeyReentrantMutex:
|
||||||
# the thread ident and unlock.
|
# the thread ident and unlock.
|
||||||
del self.keys[current_thread]
|
del self.keys[current_thread]
|
||||||
self.mutex.release()
|
self.mutex.release()
|
||||||
|
|
||||||
def locked(self):
|
|
||||||
current_thread = threading.get_ident()
|
|
||||||
keys = self.keys.get(current_thread)
|
|
||||||
if keys is None:
|
|
||||||
return False
|
|
||||||
return self.key in keys
|
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
import threading
|
|
||||||
from typing import Any
|
|
||||||
from typing import Callable
|
|
||||||
from typing import MutableMapping
|
|
||||||
import weakref
|
import weakref
|
||||||
|
|
||||||
|
from .compat import threading
|
||||||
|
|
||||||
|
|
||||||
class NameRegistry(object):
|
class NameRegistry(object):
|
||||||
"""Generates and return an object, keeping it as a
|
"""Generates and return an object, keeping it as a
|
||||||
|
@ -41,15 +39,19 @@ class NameRegistry(object):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
_locks = weakref.WeakValueDictionary()
|
||||||
_mutex = threading.RLock()
|
_mutex = threading.RLock()
|
||||||
|
|
||||||
def __init__(self, creator: Callable[..., Any]):
|
def __init__(self, creator):
|
||||||
"""Create a new :class:`.NameRegistry`."""
|
"""Create a new :class:`.NameRegistry`.
|
||||||
self._values: MutableMapping[str, Any] = weakref.WeakValueDictionary()
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._values = weakref.WeakValueDictionary()
|
||||||
self._mutex = threading.RLock()
|
self._mutex = threading.RLock()
|
||||||
self.creator = creator
|
self.creator = creator
|
||||||
|
|
||||||
def get(self, identifier: str, *args: Any, **kw: Any) -> Any:
|
def get(self, identifier, *args, **kw):
|
||||||
r"""Get and possibly create the value.
|
r"""Get and possibly create the value.
|
||||||
|
|
||||||
:param identifier: Hash key for the value.
|
:param identifier: Hash key for the value.
|
||||||
|
@ -68,7 +70,7 @@ class NameRegistry(object):
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return self._sync_get(identifier, *args, **kw)
|
return self._sync_get(identifier, *args, **kw)
|
||||||
|
|
||||||
def _sync_get(self, identifier: str, *args: Any, **kw: Any) -> Any:
|
def _sync_get(self, identifier, *args, **kw):
|
||||||
self._mutex.acquire()
|
self._mutex.acquire()
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
import threading
|
|
||||||
|
from .compat import threading
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -62,10 +63,10 @@ class ReadWriteMutex(object):
|
||||||
# check if we are the last asynchronous reader thread
|
# check if we are the last asynchronous reader thread
|
||||||
# out the door.
|
# out the door.
|
||||||
if self.async_ == 0:
|
if self.async_ == 0:
|
||||||
# yes. so if a sync operation is waiting, notify_all to wake
|
# yes. so if a sync operation is waiting, notifyAll to wake
|
||||||
# it up
|
# it up
|
||||||
if self.current_sync_operation is not None:
|
if self.current_sync_operation is not None:
|
||||||
self.condition.notify_all()
|
self.condition.notifyAll()
|
||||||
elif self.async_ < 0:
|
elif self.async_ < 0:
|
||||||
raise LockError(
|
raise LockError(
|
||||||
"Synchronizer error - too many "
|
"Synchronizer error - too many "
|
||||||
|
@ -95,7 +96,7 @@ class ReadWriteMutex(object):
|
||||||
# establish ourselves as the current sync
|
# establish ourselves as the current sync
|
||||||
# this indicates to other read/write operations
|
# this indicates to other read/write operations
|
||||||
# that they should wait until this is None again
|
# that they should wait until this is None again
|
||||||
self.current_sync_operation = threading.current_thread()
|
self.current_sync_operation = threading.currentThread()
|
||||||
|
|
||||||
# now wait again for asyncs to finish
|
# now wait again for asyncs to finish
|
||||||
if self.async_ > 0:
|
if self.async_ > 0:
|
||||||
|
@ -117,7 +118,7 @@ class ReadWriteMutex(object):
|
||||||
"""Release the 'write' lock."""
|
"""Release the 'write' lock."""
|
||||||
self.condition.acquire()
|
self.condition.acquire()
|
||||||
try:
|
try:
|
||||||
if self.current_sync_operation is not threading.current_thread():
|
if self.current_sync_operation is not threading.currentThread():
|
||||||
raise LockError(
|
raise LockError(
|
||||||
"Synchronizer error - current thread doesn't "
|
"Synchronizer error - current thread doesn't "
|
||||||
"have the write lock"
|
"have the write lock"
|
||||||
|
@ -128,7 +129,7 @@ class ReadWriteMutex(object):
|
||||||
self.current_sync_operation = None
|
self.current_sync_operation = None
|
||||||
|
|
||||||
# tell everyone to get ready
|
# tell everyone to get ready
|
||||||
self.condition.notify_all()
|
self.condition.notifyAll()
|
||||||
|
|
||||||
log.debug("%s released write lock", self)
|
log.debug("%s released write lock", self)
|
||||||
finally:
|
finally:
|
||||||
|
|
|
@ -1,203 +0,0 @@
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""Use a cache layer in front of entry point scanning."""
|
|
||||||
|
|
||||||
import errno
|
|
||||||
import glob
|
|
||||||
import hashlib
|
|
||||||
import itertools
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import struct
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
# For python 3.8 and later
|
|
||||||
import importlib.metadata as importlib_metadata
|
|
||||||
except ImportError:
|
|
||||||
# For everyone else
|
|
||||||
import importlib_metadata
|
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger('stevedore._cache')
|
|
||||||
|
|
||||||
|
|
||||||
def _get_cache_dir():
|
|
||||||
"""Locate a platform-appropriate cache directory to use.
|
|
||||||
|
|
||||||
Does not ensure that the cache directory exists.
|
|
||||||
"""
|
|
||||||
# Linux, Unix, AIX, etc.
|
|
||||||
if os.name == 'posix' and sys.platform != 'darwin':
|
|
||||||
# use ~/.cache if empty OR not set
|
|
||||||
base_path = os.environ.get("XDG_CACHE_HOME", None) \
|
|
||||||
or os.path.expanduser('~/.cache')
|
|
||||||
return os.path.join(base_path, 'python-entrypoints')
|
|
||||||
|
|
||||||
# Mac OS
|
|
||||||
elif sys.platform == 'darwin':
|
|
||||||
return os.path.expanduser('~/Library/Caches/Python Entry Points')
|
|
||||||
|
|
||||||
# Windows (hopefully)
|
|
||||||
else:
|
|
||||||
base_path = os.environ.get('LOCALAPPDATA', None) \
|
|
||||||
or os.path.expanduser('~\\AppData\\Local')
|
|
||||||
return os.path.join(base_path, 'Python Entry Points')
|
|
||||||
|
|
||||||
|
|
||||||
def _get_mtime(name):
|
|
||||||
try:
|
|
||||||
s = os.stat(name)
|
|
||||||
return s.st_mtime
|
|
||||||
except OSError as err:
|
|
||||||
if err.errno != errno.ENOENT:
|
|
||||||
raise
|
|
||||||
return -1.0
|
|
||||||
|
|
||||||
|
|
||||||
def _ftobytes(f):
|
|
||||||
return struct.Struct('f').pack(f)
|
|
||||||
|
|
||||||
|
|
||||||
def _hash_settings_for_path(path):
|
|
||||||
"""Return a hash and the path settings that created it."""
|
|
||||||
paths = []
|
|
||||||
h = hashlib.sha256()
|
|
||||||
|
|
||||||
# Tie the cache to the python interpreter, in case it is part of a
|
|
||||||
# virtualenv.
|
|
||||||
h.update(sys.executable.encode('utf-8'))
|
|
||||||
h.update(sys.prefix.encode('utf-8'))
|
|
||||||
|
|
||||||
for entry in path:
|
|
||||||
mtime = _get_mtime(entry)
|
|
||||||
h.update(entry.encode('utf-8'))
|
|
||||||
h.update(_ftobytes(mtime))
|
|
||||||
paths.append((entry, mtime))
|
|
||||||
|
|
||||||
for ep_file in itertools.chain(
|
|
||||||
glob.iglob(os.path.join(entry,
|
|
||||||
'*.dist-info',
|
|
||||||
'entry_points.txt')),
|
|
||||||
glob.iglob(os.path.join(entry,
|
|
||||||
'*.egg-info',
|
|
||||||
'entry_points.txt'))
|
|
||||||
):
|
|
||||||
mtime = _get_mtime(ep_file)
|
|
||||||
h.update(ep_file.encode('utf-8'))
|
|
||||||
h.update(_ftobytes(mtime))
|
|
||||||
paths.append((ep_file, mtime))
|
|
||||||
|
|
||||||
return (h.hexdigest(), paths)
|
|
||||||
|
|
||||||
|
|
||||||
def _build_cacheable_data(path):
|
|
||||||
real_groups = importlib_metadata.entry_points()
|
|
||||||
# Convert the namedtuple values to regular tuples
|
|
||||||
groups = {}
|
|
||||||
for name, group_data in real_groups.items():
|
|
||||||
existing = set()
|
|
||||||
members = []
|
|
||||||
groups[name] = members
|
|
||||||
for ep in group_data:
|
|
||||||
# Filter out duplicates that can occur when testing a
|
|
||||||
# package that provides entry points using tox, where the
|
|
||||||
# package is installed in the virtualenv that tox builds
|
|
||||||
# and is present in the path as '.'.
|
|
||||||
item = ep.name, ep.value, ep.group # convert to tuple
|
|
||||||
if item in existing:
|
|
||||||
continue
|
|
||||||
existing.add(item)
|
|
||||||
members.append(item)
|
|
||||||
return {
|
|
||||||
'groups': groups,
|
|
||||||
'sys.executable': sys.executable,
|
|
||||||
'sys.prefix': sys.prefix,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Cache:
|
|
||||||
|
|
||||||
def __init__(self, cache_dir=None):
|
|
||||||
if cache_dir is None:
|
|
||||||
cache_dir = _get_cache_dir()
|
|
||||||
self._dir = cache_dir
|
|
||||||
self._internal = {}
|
|
||||||
self._disable_caching = False
|
|
||||||
|
|
||||||
# Caching can be disabled by either placing .disable file into the
|
|
||||||
# target directory or when python executable is under /tmp (this is the
|
|
||||||
# case when executed from ansible)
|
|
||||||
if any([os.path.isfile(os.path.join(self._dir, '.disable')),
|
|
||||||
sys.executable[0:4] == '/tmp']):
|
|
||||||
self._disable_caching = True
|
|
||||||
|
|
||||||
def _get_data_for_path(self, path):
|
|
||||||
if path is None:
|
|
||||||
path = sys.path
|
|
||||||
|
|
||||||
internal_key = tuple(path)
|
|
||||||
if internal_key in self._internal:
|
|
||||||
return self._internal[internal_key]
|
|
||||||
|
|
||||||
digest, path_values = _hash_settings_for_path(path)
|
|
||||||
filename = os.path.join(self._dir, digest)
|
|
||||||
try:
|
|
||||||
log.debug('reading %s', filename)
|
|
||||||
with open(filename, 'r') as f:
|
|
||||||
data = json.load(f)
|
|
||||||
except (IOError, json.JSONDecodeError):
|
|
||||||
data = _build_cacheable_data(path)
|
|
||||||
data['path_values'] = path_values
|
|
||||||
if not self._disable_caching:
|
|
||||||
try:
|
|
||||||
log.debug('writing to %s', filename)
|
|
||||||
os.makedirs(self._dir, exist_ok=True)
|
|
||||||
with open(filename, 'w') as f:
|
|
||||||
json.dump(data, f)
|
|
||||||
except (IOError, OSError):
|
|
||||||
# Could not create cache dir or write file.
|
|
||||||
pass
|
|
||||||
|
|
||||||
self._internal[internal_key] = data
|
|
||||||
return data
|
|
||||||
|
|
||||||
def get_group_all(self, group, path=None):
|
|
||||||
result = []
|
|
||||||
data = self._get_data_for_path(path)
|
|
||||||
group_data = data.get('groups', {}).get(group, [])
|
|
||||||
for vals in group_data:
|
|
||||||
result.append(importlib_metadata.EntryPoint(*vals))
|
|
||||||
return result
|
|
||||||
|
|
||||||
def get_group_named(self, group, path=None):
|
|
||||||
result = {}
|
|
||||||
for ep in self.get_group_all(group, path=path):
|
|
||||||
if ep.name not in result:
|
|
||||||
result[ep.name] = ep
|
|
||||||
return result
|
|
||||||
|
|
||||||
def get_single(self, group, name, path=None):
|
|
||||||
for name, ep in self.get_group_named(group, path=path).items():
|
|
||||||
if name == name:
|
|
||||||
return ep
|
|
||||||
raise ValueError('No entrypoint {!r} in group {!r}'.format(
|
|
||||||
group, name))
|
|
||||||
|
|
||||||
|
|
||||||
_c = Cache()
|
|
||||||
get_group_all = _c.get_group_all
|
|
||||||
get_group_named = _c.get_group_named
|
|
||||||
get_single = _c.get_single
|
|
|
@ -142,7 +142,7 @@ class NameDispatchExtensionManager(DispatchExtensionManager):
|
||||||
then ignored
|
then ignored
|
||||||
:type invoke_on_load: bool
|
:type invoke_on_load: bool
|
||||||
:param on_load_failure_callback: Callback function that will be called when
|
:param on_load_failure_callback: Callback function that will be called when
|
||||||
an entrypoint can not be loaded. The arguments that will be provided
|
a entrypoint can not be loaded. The arguments that will be provided
|
||||||
when this is called (when an entrypoint fails to load) are
|
when this is called (when an entrypoint fails to load) are
|
||||||
(manager, entrypoint, exception)
|
(manager, entrypoint, exception)
|
||||||
:type on_load_failure_callback: function
|
:type on_load_failure_callback: function
|
||||||
|
|
|
@ -10,8 +10,7 @@
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from .exception import MultipleMatches
|
from .exception import NoMatches, MultipleMatches
|
||||||
from .exception import NoMatches
|
|
||||||
from .named import NamedExtensionManager
|
from .named import NamedExtensionManager
|
||||||
|
|
||||||
|
|
||||||
|
@ -34,7 +33,7 @@ class DriverManager(NamedExtensionManager):
|
||||||
is True.
|
is True.
|
||||||
:type invoke_kwds: dict
|
:type invoke_kwds: dict
|
||||||
:param on_load_failure_callback: Callback function that will be called when
|
:param on_load_failure_callback: Callback function that will be called when
|
||||||
an entrypoint can not be loaded. The arguments that will be provided
|
a entrypoint can not be loaded. The arguments that will be provided
|
||||||
when this is called (when an entrypoint fails to load) are
|
when this is called (when an entrypoint fails to load) are
|
||||||
(manager, entrypoint, exception)
|
(manager, entrypoint, exception)
|
||||||
:type on_load_failure_callback: function
|
:type on_load_failure_callback: function
|
||||||
|
@ -86,7 +85,7 @@ class DriverManager(NamedExtensionManager):
|
||||||
and then ignored
|
and then ignored
|
||||||
:type propagate_map_exceptions: bool
|
:type propagate_map_exceptions: bool
|
||||||
:param on_load_failure_callback: Callback function that will
|
:param on_load_failure_callback: Callback function that will
|
||||||
be called when an entrypoint can not be loaded. The
|
be called when a entrypoint can not be loaded. The
|
||||||
arguments that will be provided when this is called (when
|
arguments that will be provided when this is called (when
|
||||||
an entrypoint fails to load) are (manager, entrypoint,
|
an entrypoint fails to load) are (manager, entrypoint,
|
||||||
exception)
|
exception)
|
||||||
|
@ -143,6 +142,7 @@ class DriverManager(NamedExtensionManager):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def driver(self):
|
def driver(self):
|
||||||
"""Returns the driver being used by this manager."""
|
"""Returns the driver being used by this manager.
|
||||||
|
"""
|
||||||
ext = self.extensions[0]
|
ext = self.extensions[0]
|
||||||
return ext.obj if ext.obj else ext.plugin
|
return ext.obj if ext.obj else ext.plugin
|
||||||
|
|
|
@ -46,7 +46,7 @@ class EnabledExtensionManager(ExtensionManager):
|
||||||
then ignored
|
then ignored
|
||||||
:type propagate_map_exceptions: bool
|
:type propagate_map_exceptions: bool
|
||||||
:param on_load_failure_callback: Callback function that will be called when
|
:param on_load_failure_callback: Callback function that will be called when
|
||||||
an entrypoint can not be loaded. The arguments that will be provided
|
a entrypoint can not be loaded. The arguments that will be provided
|
||||||
when this is called (when an entrypoint fails to load) are
|
when this is called (when an entrypoint fails to load) are
|
||||||
(manager, entrypoint, exception)
|
(manager, entrypoint, exception)
|
||||||
:type on_load_failure_callback: function
|
:type on_load_failure_callback: function
|
||||||
|
|
|
@ -1,18 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright (C) 2020 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
import abc
|
import abc
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,17 +1,3 @@
|
||||||
# Copyright (C) 2020 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
from stevedore import driver
|
from stevedore import driver
|
||||||
|
|
|
@ -1,17 +1,3 @@
|
||||||
# Copyright (C) 2020 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
from stevedore import extension
|
from stevedore import extension
|
||||||
|
|
|
@ -1,19 +1,4 @@
|
||||||
# Copyright (C) 2020 Red Hat, Inc.
|
from setuptools import setup, find_packages
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
from setuptools import find_packages
|
|
||||||
from setuptools import setup
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='stevedore-examples',
|
name='stevedore-examples',
|
||||||
|
|
|
@ -1,21 +1,9 @@
|
||||||
# Copyright (C) 2020 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
from stevedore.example import base
|
from stevedore.example import base
|
||||||
|
|
||||||
|
|
||||||
class Simple(base.FormatterBase):
|
class Simple(base.FormatterBase):
|
||||||
"""A very basic formatter."""
|
"""A very basic formatter.
|
||||||
|
"""
|
||||||
|
|
||||||
def format(self, data):
|
def format(self, data):
|
||||||
"""Format the data and return unicode text.
|
"""Format the data and return unicode text.
|
||||||
|
|
|
@ -1,18 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright (C) 2020 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
from stevedore.example import base
|
from stevedore.example import base
|
||||||
|
|
|
@ -1,19 +1,4 @@
|
||||||
# Copyright (C) 2020 Red Hat, Inc.
|
from setuptools import setup, find_packages
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
from setuptools import find_packages
|
|
||||||
from setuptools import setup
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='stevedore-examples2',
|
name='stevedore-examples2',
|
||||||
|
|
|
@ -13,10 +13,11 @@
|
||||||
"""ExtensionManager
|
"""ExtensionManager
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
|
||||||
import operator
|
import operator
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
from . import _cache
|
|
||||||
from .exception import NoMatches
|
from .exception import NoMatches
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
@ -33,7 +34,7 @@ class Extension(object):
|
||||||
:param name: The entry point name.
|
:param name: The entry point name.
|
||||||
:type name: str
|
:type name: str
|
||||||
:param entry_point: The EntryPoint instance returned by
|
:param entry_point: The EntryPoint instance returned by
|
||||||
:mod:`entrypoints`.
|
:mod:`pkg_resources`.
|
||||||
:type entry_point: EntryPoint
|
:type entry_point: EntryPoint
|
||||||
:param plugin: The value returned by entry_point.load()
|
:param plugin: The value returned by entry_point.load()
|
||||||
:param obj: The object returned by ``plugin(*args, **kwds)`` if the
|
:param obj: The object returned by ``plugin(*args, **kwds)`` if the
|
||||||
|
@ -47,38 +48,6 @@ class Extension(object):
|
||||||
self.plugin = plugin
|
self.plugin = plugin
|
||||||
self.obj = obj
|
self.obj = obj
|
||||||
|
|
||||||
@property
|
|
||||||
def module_name(self):
|
|
||||||
"""The name of the module from which the entry point is loaded.
|
|
||||||
|
|
||||||
:return: A string in 'dotted.module' format.
|
|
||||||
"""
|
|
||||||
# NOTE: importlib_metadata from PyPI includes this but the
|
|
||||||
# Python 3.8 standard library does not.
|
|
||||||
match = self.entry_point.pattern.match(self.entry_point.value)
|
|
||||||
return match.group('module')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extras(self):
|
|
||||||
"""The 'extras' settings for the plugin."""
|
|
||||||
# NOTE: The underlying package returns re.Match objects for
|
|
||||||
# some reason. Translate those to the matched strings, which
|
|
||||||
# seem more useful.
|
|
||||||
return [
|
|
||||||
# Python 3.6 returns _sre.SRE_Match objects. Later
|
|
||||||
# versions of python return re.Match objects. Both types
|
|
||||||
# have a 'string' attribute containing the text that
|
|
||||||
# matched the pattern.
|
|
||||||
getattr(e, 'string', e)
|
|
||||||
for e in self.entry_point.extras
|
|
||||||
]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def attr(self):
|
|
||||||
"""The attribute of the module to be loaded."""
|
|
||||||
match = self.entry_point.pattern.match(self.entry_point.value)
|
|
||||||
return match.group('attr')
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def entry_point_target(self):
|
def entry_point_target(self):
|
||||||
"""The module and attribute referenced by this extension's entry_point.
|
"""The module and attribute referenced by this extension's entry_point.
|
||||||
|
@ -86,7 +55,8 @@ class Extension(object):
|
||||||
:return: A string representation of the target of the entry point in
|
:return: A string representation of the target of the entry point in
|
||||||
'dotted.module:object' format.
|
'dotted.module:object' format.
|
||||||
"""
|
"""
|
||||||
return self.entry_point.value
|
return '%s:%s' % (self.entry_point.module_name,
|
||||||
|
self.entry_point.attrs[0])
|
||||||
|
|
||||||
|
|
||||||
class ExtensionManager(object):
|
class ExtensionManager(object):
|
||||||
|
@ -110,7 +80,7 @@ class ExtensionManager(object):
|
||||||
then ignored
|
then ignored
|
||||||
:type propagate_map_exceptions: bool
|
:type propagate_map_exceptions: bool
|
||||||
:param on_load_failure_callback: Callback function that will be called when
|
:param on_load_failure_callback: Callback function that will be called when
|
||||||
an entrypoint can not be loaded. The arguments that will be provided
|
a entrypoint can not be loaded. The arguments that will be provided
|
||||||
when this is called (when an entrypoint fails to load) are
|
when this is called (when an entrypoint fails to load) are
|
||||||
(manager, entrypoint, exception)
|
(manager, entrypoint, exception)
|
||||||
:type on_load_failure_callback: function
|
:type on_load_failure_callback: function
|
||||||
|
@ -156,7 +126,7 @@ class ExtensionManager(object):
|
||||||
are logged and then ignored
|
are logged and then ignored
|
||||||
:type propagate_map_exceptions: bool
|
:type propagate_map_exceptions: bool
|
||||||
:param on_load_failure_callback: Callback function that will
|
:param on_load_failure_callback: Callback function that will
|
||||||
be called when an entrypoint can not be loaded. The
|
be called when a entrypoint can not be loaded. The
|
||||||
arguments that will be provided when this is called (when
|
arguments that will be provided when this is called (when
|
||||||
an entrypoint fails to load) are (manager, entrypoint,
|
an entrypoint fails to load) are (manager, entrypoint,
|
||||||
exception)
|
exception)
|
||||||
|
@ -204,7 +174,7 @@ class ExtensionManager(object):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if self.namespace not in self.ENTRY_POINT_CACHE:
|
if self.namespace not in self.ENTRY_POINT_CACHE:
|
||||||
eps = list(_cache.get_group_all(self.namespace))
|
eps = list(pkg_resources.iter_entry_points(self.namespace))
|
||||||
self.ENTRY_POINT_CACHE[self.namespace] = eps
|
self.ENTRY_POINT_CACHE[self.namespace] = eps
|
||||||
return self.ENTRY_POINT_CACHE[self.namespace]
|
return self.ENTRY_POINT_CACHE[self.namespace]
|
||||||
|
|
||||||
|
@ -252,7 +222,7 @@ class ExtensionManager(object):
|
||||||
ep.require()
|
ep.require()
|
||||||
plugin = ep.resolve()
|
plugin = ep.resolve()
|
||||||
else:
|
else:
|
||||||
plugin = ep.load()
|
plugin = ep.load(require=verify_requirements)
|
||||||
if invoke_on_load:
|
if invoke_on_load:
|
||||||
obj = plugin(*invoke_args, **invoke_kwds)
|
obj = plugin(*invoke_args, **invoke_kwds)
|
||||||
else:
|
else:
|
||||||
|
@ -331,7 +301,8 @@ class ExtensionManager(object):
|
||||||
LOG.exception(err)
|
LOG.exception(err)
|
||||||
|
|
||||||
def items(self):
|
def items(self):
|
||||||
"""Return an iterator of tuples of the form (name, extension).
|
"""
|
||||||
|
Return an iterator of tuples of the form (name, extension).
|
||||||
|
|
||||||
This is analogous to the Mapping.items() method.
|
This is analogous to the Mapping.items() method.
|
||||||
"""
|
"""
|
||||||
|
@ -355,5 +326,6 @@ class ExtensionManager(object):
|
||||||
return self._extensions_by_name[name]
|
return self._extensions_by_name[name]
|
||||||
|
|
||||||
def __contains__(self, name):
|
def __contains__(self, name):
|
||||||
"""Return true if name is in list of enabled extensions."""
|
"""Return true if name is in list of enabled extensions.
|
||||||
|
"""
|
||||||
return any(extension.name == name for extension in self.extensions)
|
return any(extension.name == name for extension in self.extensions)
|
||||||
|
|
|
@ -32,7 +32,7 @@ class HookManager(NamedExtensionManager):
|
||||||
is True.
|
is True.
|
||||||
:type invoke_kwds: dict
|
:type invoke_kwds: dict
|
||||||
:param on_load_failure_callback: Callback function that will be called when
|
:param on_load_failure_callback: Callback function that will be called when
|
||||||
an entrypoint can not be loaded. The arguments that will be provided
|
a entrypoint can not be loaded. The arguments that will be provided
|
||||||
when this is called (when an entrypoint fails to load) are
|
when this is called (when an entrypoint fails to load) are
|
||||||
(manager, entrypoint, exception)
|
(manager, entrypoint, exception)
|
||||||
:type on_load_failure_callback: function
|
:type on_load_failure_callback: function
|
||||||
|
|
|
@ -46,7 +46,7 @@ class NamedExtensionManager(ExtensionManager):
|
||||||
then ignored
|
then ignored
|
||||||
:type propagate_map_exceptions: bool
|
:type propagate_map_exceptions: bool
|
||||||
:param on_load_failure_callback: Callback function that will be called when
|
:param on_load_failure_callback: Callback function that will be called when
|
||||||
an entrypoint can not be loaded. The arguments that will be provided
|
a entrypoint can not be loaded. The arguments that will be provided
|
||||||
when this is called (when an entrypoint fails to load) are
|
when this is called (when an entrypoint fails to load) are
|
||||||
(manager, entrypoint, exception)
|
(manager, entrypoint, exception)
|
||||||
:type on_load_failure_callback: function
|
:type on_load_failure_callback: function
|
||||||
|
@ -108,7 +108,7 @@ class NamedExtensionManager(ExtensionManager):
|
||||||
and then ignored
|
and then ignored
|
||||||
:type propagate_map_exceptions: bool
|
:type propagate_map_exceptions: bool
|
||||||
:param on_load_failure_callback: Callback function that will
|
:param on_load_failure_callback: Callback function that will
|
||||||
be called when an entrypoint can not be loaded. The
|
be called when a entrypoint can not be loaded. The
|
||||||
arguments that will be provided when this is called (when
|
arguments that will be provided when this is called (when
|
||||||
an entrypoint fails to load) are (manager, entrypoint,
|
an entrypoint fails to load) are (manager, entrypoint,
|
||||||
exception)
|
exception)
|
||||||
|
|
|
@ -34,32 +34,29 @@ def _simple_list(mgr):
|
||||||
doc = _get_docstring(ext.plugin) or '\n'
|
doc = _get_docstring(ext.plugin) or '\n'
|
||||||
summary = doc.splitlines()[0].strip()
|
summary = doc.splitlines()[0].strip()
|
||||||
yield('* %s -- %s' % (ext.name, summary),
|
yield('* %s -- %s' % (ext.name, summary),
|
||||||
ext.module_name)
|
ext.entry_point.module_name)
|
||||||
|
|
||||||
|
|
||||||
def _detailed_list(mgr, over='', under='-', titlecase=False):
|
def _detailed_list(mgr, over='', under='-', titlecase=False):
|
||||||
for name in sorted(mgr.names()):
|
for name in sorted(mgr.names()):
|
||||||
ext = mgr[name]
|
ext = mgr[name]
|
||||||
if over:
|
if over:
|
||||||
yield (over * len(ext.name), ext.module_name)
|
yield (over * len(ext.name), ext.entry_point.module_name)
|
||||||
if titlecase:
|
if titlecase:
|
||||||
yield (ext.name.title(), ext.module_name)
|
yield (ext.name.title(), ext.entry_point.module_name)
|
||||||
else:
|
else:
|
||||||
yield (ext.name, ext.module_name)
|
yield (ext.name, ext.entry_point.module_name)
|
||||||
if under:
|
if under:
|
||||||
yield (under * len(ext.name), ext.module_name)
|
yield (under * len(ext.name), ext.entry_point.module_name)
|
||||||
yield ('\n', ext.module_name)
|
yield ('\n', ext.entry_point.module_name)
|
||||||
doc = _get_docstring(ext.plugin)
|
doc = _get_docstring(ext.plugin)
|
||||||
if doc:
|
if doc:
|
||||||
yield (doc, ext.module_name)
|
yield (doc, ext.entry_point.module_name)
|
||||||
else:
|
else:
|
||||||
yield (
|
yield ('.. warning:: No documentation found in %s'
|
||||||
'.. warning:: No documentation found for {} in {}'.format(
|
% ext.entry_point,
|
||||||
ext.name, ext.entry_point_target,
|
ext.entry_point.module_name)
|
||||||
),
|
yield ('\n', ext.entry_point.module_name)
|
||||||
ext.module_name,
|
|
||||||
)
|
|
||||||
yield ('\n', ext.module_name)
|
|
||||||
|
|
||||||
|
|
||||||
class ListPluginsDirective(rst.Directive):
|
class ListPluginsDirective(rst.Directive):
|
||||||
|
@ -82,7 +79,7 @@ class ListPluginsDirective(rst.Directive):
|
||||||
underline_style = self.options.get('underline-style', '=')
|
underline_style = self.options.get('underline-style', '=')
|
||||||
|
|
||||||
def report_load_failure(mgr, ep, err):
|
def report_load_failure(mgr, ep, err):
|
||||||
LOG.warning(u'Failed to load %s: %s' % (ep.module, err))
|
LOG.warning(u'Failed to load %s: %s' % (ep.module_name, err))
|
||||||
|
|
||||||
mgr = extension.ExtensionManager(
|
mgr = extension.ExtensionManager(
|
||||||
namespace,
|
namespace,
|
||||||
|
|
|
@ -1,56 +0,0 @@
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""Tests for stevedore._cache
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from unittest import mock
|
|
||||||
|
|
||||||
from stevedore import _cache
|
|
||||||
from stevedore.tests import utils
|
|
||||||
|
|
||||||
|
|
||||||
class TestCache(utils.TestCase):
|
|
||||||
|
|
||||||
def test_disable_caching_executable(self):
|
|
||||||
"""Test caching is disabled if python interpreter is located under /tmp
|
|
||||||
directory (Ansible)
|
|
||||||
"""
|
|
||||||
with mock.patch.object(sys, 'executable', '/tmp/fake'):
|
|
||||||
sot = _cache.Cache()
|
|
||||||
self.assertTrue(sot._disable_caching)
|
|
||||||
|
|
||||||
def test_disable_caching_file(self):
|
|
||||||
"""Test caching is disabled if .disable file is present in target
|
|
||||||
dir
|
|
||||||
"""
|
|
||||||
cache_dir = _cache._get_cache_dir()
|
|
||||||
|
|
||||||
with mock.patch('os.path.isfile') as mock_path:
|
|
||||||
mock_path.return_value = True
|
|
||||||
sot = _cache.Cache()
|
|
||||||
mock_path.assert_called_with('%s/.disable' % cache_dir)
|
|
||||||
self.assertTrue(sot._disable_caching)
|
|
||||||
|
|
||||||
mock_path.return_value = False
|
|
||||||
sot = _cache.Cache()
|
|
||||||
self.assertFalse(sot._disable_caching)
|
|
||||||
|
|
||||||
@mock.patch('os.makedirs')
|
|
||||||
@mock.patch('builtins.open')
|
|
||||||
def test__get_data_for_path_no_write(self, mock_open, mock_mkdir):
|
|
||||||
sot = _cache.Cache()
|
|
||||||
sot._disable_caching = True
|
|
||||||
mock_open.side_effect = IOError
|
|
||||||
sot._get_data_for_path('fake')
|
|
||||||
mock_mkdir.assert_not_called()
|
|
|
@ -10,8 +10,8 @@
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from stevedore import dispatch
|
|
||||||
from stevedore.tests import utils
|
from stevedore.tests import utils
|
||||||
|
from stevedore import dispatch
|
||||||
|
|
||||||
|
|
||||||
def check_dispatch(ep, *args, **kwds):
|
def check_dispatch(ep, *args, **kwds):
|
||||||
|
|
|
@ -13,12 +13,7 @@
|
||||||
"""Tests for stevedore.extension
|
"""Tests for stevedore.extension
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
import pkg_resources
|
||||||
# For python 3.8 and later
|
|
||||||
import importlib.metadata as importlib_metadata
|
|
||||||
except ImportError:
|
|
||||||
# For everyone else
|
|
||||||
import importlib_metadata
|
|
||||||
|
|
||||||
from stevedore import driver
|
from stevedore import driver
|
||||||
from stevedore import exception
|
from stevedore import exception
|
||||||
|
@ -73,15 +68,13 @@ class TestCallback(utils.TestCase):
|
||||||
extensions = [
|
extensions = [
|
||||||
extension.Extension(
|
extension.Extension(
|
||||||
'backend',
|
'backend',
|
||||||
importlib_metadata.EntryPoint(
|
pkg_resources.EntryPoint.parse('backend = pkg1:driver'),
|
||||||
'backend', 'pkg1:driver', 'backend'),
|
|
||||||
'pkg backend',
|
'pkg backend',
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
extension.Extension(
|
extension.Extension(
|
||||||
'backend',
|
'backend',
|
||||||
importlib_metadata.EntryPoint(
|
pkg_resources.EntryPoint.parse('backend = pkg2:driver'),
|
||||||
'backend', 'pkg2:driver', 'backend'),
|
|
||||||
'pkg backend',
|
'pkg backend',
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
|
|
|
@ -16,13 +16,6 @@
|
||||||
import operator
|
import operator
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
try:
|
|
||||||
# For python 3.8 and later
|
|
||||||
import importlib.metadata as importlib_metadata
|
|
||||||
except ImportError:
|
|
||||||
# For everyone else
|
|
||||||
import importlib_metadata
|
|
||||||
|
|
||||||
from stevedore import exception
|
from stevedore import exception
|
||||||
from stevedore import extension
|
from stevedore import extension
|
||||||
from stevedore.tests import utils
|
from stevedore.tests import utils
|
||||||
|
@ -103,13 +96,13 @@ class TestCallback(utils.TestCase):
|
||||||
|
|
||||||
def test_use_cache(self):
|
def test_use_cache(self):
|
||||||
# If we insert something into the cache of entry points,
|
# If we insert something into the cache of entry points,
|
||||||
# the manager should not have to call into entrypoints
|
# the manager should not have to call into pkg_resources
|
||||||
# to find the plugins.
|
# to find the plugins.
|
||||||
cache = extension.ExtensionManager.ENTRY_POINT_CACHE
|
cache = extension.ExtensionManager.ENTRY_POINT_CACHE
|
||||||
cache['stevedore.test.faux'] = []
|
cache['stevedore.test.faux'] = []
|
||||||
with mock.patch('stevedore._cache.get_group_all',
|
with mock.patch('pkg_resources.iter_entry_points',
|
||||||
side_effect=
|
side_effect=
|
||||||
AssertionError('called get_group_all')):
|
AssertionError('called iter_entry_points')):
|
||||||
em = extension.ExtensionManager('stevedore.test.faux')
|
em = extension.ExtensionManager('stevedore.test.faux')
|
||||||
names = em.names()
|
names = em.names()
|
||||||
self.assertEqual(names, [])
|
self.assertEqual(names, [])
|
||||||
|
@ -242,48 +235,9 @@ class TestLoadRequirementsOldSetuptools(utils.TestCase):
|
||||||
def test_verify_requirements(self):
|
def test_verify_requirements(self):
|
||||||
self.em._load_one_plugin(self.mock_ep, False, (), {},
|
self.em._load_one_plugin(self.mock_ep, False, (), {},
|
||||||
verify_requirements=True)
|
verify_requirements=True)
|
||||||
self.mock_ep.load.assert_called_once_with()
|
self.mock_ep.load.assert_called_once_with(require=True)
|
||||||
|
|
||||||
def test_no_verify_requirements(self):
|
def test_no_verify_requirements(self):
|
||||||
self.em._load_one_plugin(self.mock_ep, False, (), {},
|
self.em._load_one_plugin(self.mock_ep, False, (), {},
|
||||||
verify_requirements=False)
|
verify_requirements=False)
|
||||||
self.mock_ep.load.assert_called_once_with()
|
self.mock_ep.load.assert_called_once_with(require=False)
|
||||||
|
|
||||||
|
|
||||||
class TestExtensionProperties(utils.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.ext1 = extension.Extension(
|
|
||||||
'name',
|
|
||||||
importlib_metadata.EntryPoint(
|
|
||||||
'name', 'module.name:attribute.name [extra]', 'group_name',
|
|
||||||
),
|
|
||||||
mock.Mock(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
self.ext2 = extension.Extension(
|
|
||||||
'name',
|
|
||||||
importlib_metadata.EntryPoint(
|
|
||||||
'name', 'module:attribute', 'group_name',
|
|
||||||
),
|
|
||||||
mock.Mock(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_module_name(self):
|
|
||||||
self.assertEqual('module.name', self.ext1.module_name)
|
|
||||||
self.assertEqual('module', self.ext2.module_name)
|
|
||||||
|
|
||||||
def test_extras(self):
|
|
||||||
self.assertEqual(['[extra]'], self.ext1.extras)
|
|
||||||
self.assertEqual([], self.ext2.extras)
|
|
||||||
|
|
||||||
def test_attr(self):
|
|
||||||
self.assertEqual('attribute.name', self.ext1.attr)
|
|
||||||
self.assertEqual('attribute', self.ext2.attr)
|
|
||||||
|
|
||||||
def test_entry_point_target(self):
|
|
||||||
self.assertEqual('module.name:attribute.name [extra]',
|
|
||||||
self.ext1.entry_point_target)
|
|
||||||
self.assertEqual('module:attribute',
|
|
||||||
self.ext2.entry_point_target)
|
|
||||||
|
|
|
@ -12,26 +12,24 @@
|
||||||
"""Tests for the sphinx extension
|
"""Tests for the sphinx extension
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
from unittest import mock
|
||||||
# For python 3.8 and later
|
|
||||||
import importlib.metadata as importlib_metadata
|
|
||||||
except ImportError:
|
|
||||||
# For everyone else
|
|
||||||
import importlib_metadata
|
|
||||||
|
|
||||||
from stevedore import extension
|
from stevedore import extension
|
||||||
from stevedore import sphinxext
|
from stevedore import sphinxext
|
||||||
from stevedore.tests import utils
|
from stevedore.tests import utils
|
||||||
|
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
|
|
||||||
def _make_ext(name, docstring):
|
def _make_ext(name, docstring):
|
||||||
def inner():
|
def inner():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
inner.__doc__ = docstring
|
inner.__doc__ = docstring
|
||||||
m1 = importlib_metadata.EntryPoint(
|
m1 = mock.Mock(spec=pkg_resources.EntryPoint)
|
||||||
name, '{}_module:{}'.format(name, name), 'group',
|
m1.module_name = '%s_module' % name
|
||||||
)
|
s = mock.Mock(return_value='ENTRY_POINT(%s)' % name)
|
||||||
|
m1.__str__ = s
|
||||||
return extension.Extension(name, m1, inner, None)
|
return extension.Extension(name, m1, inner, None)
|
||||||
|
|
||||||
|
|
||||||
|
@ -113,8 +111,7 @@ class TestSphinxExt(utils.TestCase):
|
||||||
('nodoc', 'nodoc_module'),
|
('nodoc', 'nodoc_module'),
|
||||||
('-----', 'nodoc_module'),
|
('-----', 'nodoc_module'),
|
||||||
('\n', 'nodoc_module'),
|
('\n', 'nodoc_module'),
|
||||||
(('.. warning:: No documentation found for '
|
('.. warning:: No documentation found in ENTRY_POINT(nodoc)',
|
||||||
'nodoc in nodoc_module:nodoc'),
|
|
||||||
'nodoc_module'),
|
'nodoc_module'),
|
||||||
('\n', 'nodoc_module'),
|
('\n', 'nodoc_module'),
|
||||||
],
|
],
|
||||||
|
|
|
@ -10,20 +10,15 @@
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from unittest.mock import Mock
|
from unittest.mock import Mock, sentinel
|
||||||
from unittest.mock import sentinel
|
|
||||||
|
|
||||||
from stevedore.dispatch import DispatchExtensionManager
|
from stevedore import (ExtensionManager, NamedExtensionManager, HookManager,
|
||||||
from stevedore.dispatch import NameDispatchExtensionManager
|
DriverManager, EnabledExtensionManager)
|
||||||
|
from stevedore.dispatch import (DispatchExtensionManager,
|
||||||
|
NameDispatchExtensionManager)
|
||||||
from stevedore.extension import Extension
|
from stevedore.extension import Extension
|
||||||
from stevedore.tests import utils
|
from stevedore.tests import utils
|
||||||
|
|
||||||
from stevedore import DriverManager
|
|
||||||
from stevedore import EnabledExtensionManager
|
|
||||||
from stevedore import ExtensionManager
|
|
||||||
from stevedore import HookManager
|
|
||||||
from stevedore import NamedExtensionManager
|
|
||||||
|
|
||||||
|
|
||||||
test_extension = Extension('test_extension', None, None, None)
|
test_extension = Extension('test_extension', None, None, None)
|
||||||
test_extension2 = Extension('another_one', None, None, None)
|
test_extension2 = Extension('another_one', None, None, None)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue