mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-14 18:47:09 -07:00
Updates vendored subliminal to 2.1.0
Updates rarfile to 3.1 Updates stevedore to 3.5.0 Updates appdirs to 1.4.4 Updates click to 8.1.3 Updates decorator to 5.1.1 Updates dogpile.cache to 1.1.8 Updates pbr to 5.11.0 Updates pysrt to 1.1.2 Updates pytz to 2022.6 Adds importlib-metadata version 3.1.1 Adds typing-extensions version 4.1.1 Adds zipp version 3.11.0
This commit is contained in:
parent
d8da02cb69
commit
f05b09f349
694 changed files with 16621 additions and 11056 deletions
6
libs/common/dogpile/cache/__init__.py
vendored
6
libs/common/dogpile/cache/__init__.py
vendored
|
@ -1,4 +1,6 @@
|
|||
from .region import CacheRegion, register_backend, make_region # noqa
|
||||
from .region import CacheRegion # noqa
|
||||
from .region import make_region # noqa
|
||||
from .region import register_backend # noqa
|
||||
from .. import __version__ # noqa
|
||||
|
||||
# backwards compat
|
||||
from .. import __version__ # noqa
|
||||
|
|
446
libs/common/dogpile/cache/api.py
vendored
446
libs/common/dogpile/cache/api.py
vendored
|
@ -1,14 +1,22 @@
|
|||
import operator
|
||||
from ..util.compat import py3k
|
||||
import abc
|
||||
import pickle
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import cast
|
||||
from typing import Mapping
|
||||
from typing import NamedTuple
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Union
|
||||
|
||||
|
||||
class NoValue(object):
|
||||
class NoValue:
|
||||
"""Describe a missing cache value.
|
||||
|
||||
The :attr:`.NO_VALUE` module global
|
||||
should be used.
|
||||
The :data:`.NO_VALUE` constant should be used.
|
||||
|
||||
"""
|
||||
|
||||
@property
|
||||
def payload(self):
|
||||
return self
|
||||
|
@ -18,49 +26,125 @@ class NoValue(object):
|
|||
fill another cache key.
|
||||
|
||||
"""
|
||||
return '<dogpile.cache.api.NoValue object>'
|
||||
return "<dogpile.cache.api.NoValue object>"
|
||||
|
||||
if py3k:
|
||||
def __bool__(self): # pragma NO COVERAGE
|
||||
return False
|
||||
else:
|
||||
def __nonzero__(self): # pragma NO COVERAGE
|
||||
return False
|
||||
def __bool__(self): # pragma NO COVERAGE
|
||||
return False
|
||||
|
||||
|
||||
NO_VALUE = NoValue()
|
||||
"""Value returned from ``get()`` that describes
|
||||
a key not present."""
|
||||
|
||||
MetaDataType = Mapping[str, Any]
|
||||
|
||||
class CachedValue(tuple):
|
||||
|
||||
KeyType = str
|
||||
"""A cache key."""
|
||||
|
||||
ValuePayload = Any
|
||||
"""An object to be placed in the cache against a key."""
|
||||
|
||||
|
||||
KeyManglerType = Callable[[KeyType], KeyType]
|
||||
Serializer = Callable[[ValuePayload], bytes]
|
||||
Deserializer = Callable[[bytes], ValuePayload]
|
||||
|
||||
|
||||
class CacheMutex(abc.ABC):
|
||||
"""Describes a mutexing object with acquire and release methods.
|
||||
|
||||
This is an abstract base class; any object that has acquire/release
|
||||
methods may be used.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.CacheBackend.get_mutex` - the backend method that optionally
|
||||
returns this locking object.
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def acquire(self, wait: bool = True) -> bool:
|
||||
"""Acquire the mutex.
|
||||
|
||||
:param wait: if True, block until available, else return True/False
|
||||
immediately.
|
||||
|
||||
:return: True if the lock succeeded.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def release(self) -> None:
|
||||
"""Release the mutex."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def locked(self) -> bool:
|
||||
"""Check if the mutex was acquired.
|
||||
|
||||
:return: true if the lock is acquired.
|
||||
|
||||
.. versionadded:: 1.1.2
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def __subclasshook__(cls, C):
|
||||
return hasattr(C, "acquire") and hasattr(C, "release")
|
||||
|
||||
|
||||
class CachedValue(NamedTuple):
|
||||
"""Represent a value stored in the cache.
|
||||
|
||||
:class:`.CachedValue` is a two-tuple of
|
||||
``(payload, metadata)``, where ``metadata``
|
||||
is dogpile.cache's tracking information (
|
||||
currently the creation time). The metadata
|
||||
and tuple structure is pickleable, if
|
||||
the backend requires serialization.
|
||||
currently the creation time).
|
||||
|
||||
"""
|
||||
payload = property(operator.itemgetter(0))
|
||||
"""Named accessor for the payload."""
|
||||
|
||||
metadata = property(operator.itemgetter(1))
|
||||
"""Named accessor for the dogpile.cache metadata dictionary."""
|
||||
payload: ValuePayload
|
||||
|
||||
def __new__(cls, payload, metadata):
|
||||
return tuple.__new__(cls, (payload, metadata))
|
||||
|
||||
def __reduce__(self):
|
||||
return CachedValue, (self.payload, self.metadata)
|
||||
metadata: MetaDataType
|
||||
|
||||
|
||||
class CacheBackend(object):
|
||||
"""Base class for backend implementations."""
|
||||
CacheReturnType = Union[CachedValue, NoValue]
|
||||
"""The non-serialized form of what may be returned from a backend
|
||||
get method.
|
||||
|
||||
key_mangler = None
|
||||
"""
|
||||
|
||||
SerializedReturnType = Union[bytes, NoValue]
|
||||
"""the serialized form of what may be returned from a backend get method."""
|
||||
|
||||
BackendFormatted = Union[CacheReturnType, SerializedReturnType]
|
||||
"""Describes the type returned from the :meth:`.CacheBackend.get` method."""
|
||||
|
||||
BackendSetType = Union[CachedValue, bytes]
|
||||
"""Describes the value argument passed to the :meth:`.CacheBackend.set`
|
||||
method."""
|
||||
|
||||
BackendArguments = Mapping[str, Any]
|
||||
|
||||
|
||||
class CacheBackend:
|
||||
"""Base class for backend implementations.
|
||||
|
||||
Backends which set and get Python object values should subclass this
|
||||
backend. For backends in which the value that's stored is ultimately
|
||||
a stream of bytes, the :class:`.BytesBackend` should be used.
|
||||
|
||||
"""
|
||||
|
||||
key_mangler: Optional[Callable[[KeyType], KeyType]] = None
|
||||
"""Key mangling function.
|
||||
|
||||
May be None, or otherwise declared
|
||||
|
@ -68,7 +152,23 @@ class CacheBackend(object):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments): # pragma NO COVERAGE
|
||||
serializer: Union[None, Serializer] = None
|
||||
"""Serializer function that will be used by default if not overridden
|
||||
by the region.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
deserializer: Union[None, Deserializer] = None
|
||||
"""deserializer function that will be used by default if not overridden
|
||||
by the region.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments: BackendArguments): # pragma NO COVERAGE
|
||||
"""Construct a new :class:`.CacheBackend`.
|
||||
|
||||
Subclasses should override this to
|
||||
|
@ -91,10 +191,10 @@ class CacheBackend(object):
|
|||
)
|
||||
)
|
||||
|
||||
def has_lock_timeout(self):
|
||||
def has_lock_timeout(self) -> bool:
|
||||
return False
|
||||
|
||||
def get_mutex(self, key):
|
||||
def get_mutex(self, key: KeyType) -> Optional[CacheMutex]:
|
||||
"""Return an optional mutexing object for the given key.
|
||||
|
||||
This object need only provide an ``acquire()``
|
||||
|
@ -127,48 +227,141 @@ class CacheBackend(object):
|
|||
"""
|
||||
return None
|
||||
|
||||
def get(self, key): # pragma NO COVERAGE
|
||||
"""Retrieve a value from the cache.
|
||||
def get(self, key: KeyType) -> BackendFormatted: # pragma NO COVERAGE
|
||||
"""Retrieve an optionally serialized value from the cache.
|
||||
|
||||
The returned value should be an instance of
|
||||
:class:`.CachedValue`, or ``NO_VALUE`` if
|
||||
not present.
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.get`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:return: the Python object that corresponds to
|
||||
what was established via the :meth:`.CacheBackend.set` method,
|
||||
or the :data:`.NO_VALUE` constant if not present.
|
||||
|
||||
If a serializer is in use, this method will only be called if the
|
||||
:meth:`.CacheBackend.get_serialized` method is not overridden.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_multi(self, keys): # pragma NO COVERAGE
|
||||
"""Retrieve multiple values from the cache.
|
||||
def get_multi(
|
||||
self, keys: Sequence[KeyType]
|
||||
) -> Sequence[BackendFormatted]: # pragma NO COVERAGE
|
||||
"""Retrieve multiple optionally serialized values from the cache.
|
||||
|
||||
The returned value should be a list, corresponding
|
||||
to the list of keys given.
|
||||
:param keys: sequence of string keys that was passed to the
|
||||
:meth:`.CacheRegion.get_multi` method, which will also be processed
|
||||
by the "key mangling" function if one was present.
|
||||
|
||||
:return a list of values as would be returned
|
||||
individually via the :meth:`.CacheBackend.get` method, corresponding
|
||||
to the list of keys given.
|
||||
|
||||
If a serializer is in use, this method will only be called if the
|
||||
:meth:`.CacheBackend.get_serialized_multi` method is not overridden.
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def set(self, key, value): # pragma NO COVERAGE
|
||||
"""Set a value in the cache.
|
||||
def get_serialized(self, key: KeyType) -> SerializedReturnType:
|
||||
"""Retrieve a serialized value from the cache.
|
||||
|
||||
The key will be whatever was passed
|
||||
to the registry, processed by the
|
||||
"key mangling" function, if any.
|
||||
The value will always be an instance
|
||||
of :class:`.CachedValue`.
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.get`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:return: a bytes object, or :data:`.NO_VALUE`
|
||||
constant if not present.
|
||||
|
||||
The default implementation of this method for :class:`.CacheBackend`
|
||||
returns the value of the :meth:`.CacheBackend.get` method.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.BytesBackend`
|
||||
|
||||
"""
|
||||
return cast(SerializedReturnType, self.get(key))
|
||||
|
||||
def get_serialized_multi(
|
||||
self, keys: Sequence[KeyType]
|
||||
) -> Sequence[SerializedReturnType]: # pragma NO COVERAGE
|
||||
"""Retrieve multiple serialized values from the cache.
|
||||
|
||||
:param keys: sequence of string keys that was passed to the
|
||||
:meth:`.CacheRegion.get_multi` method, which will also be processed
|
||||
by the "key mangling" function if one was present.
|
||||
|
||||
:return: list of bytes objects
|
||||
|
||||
The default implementation of this method for :class:`.CacheBackend`
|
||||
returns the value of the :meth:`.CacheBackend.get_multi` method.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.BytesBackend`
|
||||
|
||||
"""
|
||||
return cast(Sequence[SerializedReturnType], self.get_multi(keys))
|
||||
|
||||
def set(
|
||||
self, key: KeyType, value: BackendSetType
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Set an optionally serialized value in the cache.
|
||||
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.set`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:param value: The optionally serialized :class:`.CachedValue` object.
|
||||
May be an instance of :class:`.CachedValue` or a bytes object
|
||||
depending on if a serializer is in use with the region and if the
|
||||
:meth:`.CacheBackend.set_serialized` method is not overridden.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.CacheBackend.set_serialized`
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_multi(self, mapping): # pragma NO COVERAGE
|
||||
def set_serialized(
|
||||
self, key: KeyType, value: bytes
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Set a serialized value in the cache.
|
||||
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.set`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:param value: a bytes object to be stored.
|
||||
|
||||
The default implementation of this method for :class:`.CacheBackend`
|
||||
calls upon the :meth:`.CacheBackend.set` method.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.BytesBackend`
|
||||
|
||||
"""
|
||||
self.set(key, value)
|
||||
|
||||
def set_multi(
|
||||
self, mapping: Mapping[KeyType, BackendSetType]
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Set multiple values in the cache.
|
||||
|
||||
``mapping`` is a dict in which
|
||||
the key will be whatever was passed
|
||||
to the registry, processed by the
|
||||
"key mangling" function, if any.
|
||||
The value will always be an instance
|
||||
of :class:`.CachedValue`.
|
||||
:param mapping: a dict in which the key will be whatever was passed to
|
||||
the :meth:`.CacheRegion.set_multi` method, processed by the "key
|
||||
mangling" function, if any.
|
||||
|
||||
When implementing a new :class:`.CacheBackend` or cutomizing via
|
||||
:class:`.ProxyBackend`, be aware that when this method is invoked by
|
||||
|
@ -178,17 +371,52 @@ class CacheBackend(object):
|
|||
-- that will have the undesirable effect of modifying the returned
|
||||
values as well.
|
||||
|
||||
If a serializer is in use, this method will only be called if the
|
||||
:meth:`.CacheBackend.set_serialized_multi` method is not overridden.
|
||||
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def delete(self, key): # pragma NO COVERAGE
|
||||
def set_serialized_multi(
|
||||
self, mapping: Mapping[KeyType, bytes]
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Set multiple serialized values in the cache.
|
||||
|
||||
:param mapping: a dict in which the key will be whatever was passed to
|
||||
the :meth:`.CacheRegion.set_multi` method, processed by the "key
|
||||
mangling" function, if any.
|
||||
|
||||
When implementing a new :class:`.CacheBackend` or cutomizing via
|
||||
:class:`.ProxyBackend`, be aware that when this method is invoked by
|
||||
:meth:`.Region.get_or_create_multi`, the ``mapping`` values are the
|
||||
same ones returned to the upstream caller. If the subclass alters the
|
||||
values in any way, it must not do so 'in-place' on the ``mapping`` dict
|
||||
-- that will have the undesirable effect of modifying the returned
|
||||
values as well.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
The default implementation of this method for :class:`.CacheBackend`
|
||||
calls upon the :meth:`.CacheBackend.set_multi` method.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.BytesBackend`
|
||||
|
||||
|
||||
"""
|
||||
self.set_multi(mapping)
|
||||
|
||||
def delete(self, key: KeyType) -> None: # pragma NO COVERAGE
|
||||
"""Delete a value from the cache.
|
||||
|
||||
The key will be whatever was passed
|
||||
to the registry, processed by the
|
||||
"key mangling" function, if any.
|
||||
:param key: String key that was passed to the
|
||||
:meth:`.CacheRegion.delete`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
The behavior here should be idempotent,
|
||||
that is, can be called any number of times
|
||||
|
@ -197,12 +425,14 @@ class CacheBackend(object):
|
|||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def delete_multi(self, keys): # pragma NO COVERAGE
|
||||
def delete_multi(
|
||||
self, keys: Sequence[KeyType]
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Delete multiple values from the cache.
|
||||
|
||||
The key will be whatever was passed
|
||||
to the registry, processed by the
|
||||
"key mangling" function, if any.
|
||||
:param keys: sequence of string keys that was passed to the
|
||||
:meth:`.CacheRegion.delete_multi` method, which will also be processed
|
||||
by the "key mangling" function if one was present.
|
||||
|
||||
The behavior here should be idempotent,
|
||||
that is, can be called any number of times
|
||||
|
@ -213,3 +443,95 @@ class CacheBackend(object):
|
|||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class DefaultSerialization:
|
||||
serializer: Union[None, Serializer] = staticmethod( # type: ignore
|
||||
pickle.dumps
|
||||
)
|
||||
deserializer: Union[None, Deserializer] = staticmethod( # type: ignore
|
||||
pickle.loads
|
||||
)
|
||||
|
||||
|
||||
class BytesBackend(DefaultSerialization, CacheBackend):
|
||||
"""A cache backend that receives and returns series of bytes.
|
||||
|
||||
This backend only supports the "serialized" form of values; subclasses
|
||||
should implement :meth:`.BytesBackend.get_serialized`,
|
||||
:meth:`.BytesBackend.get_serialized_multi`,
|
||||
:meth:`.BytesBackend.set_serialized`,
|
||||
:meth:`.BytesBackend.set_serialized_multi`.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
def get_serialized(self, key: KeyType) -> SerializedReturnType:
|
||||
"""Retrieve a serialized value from the cache.
|
||||
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.get`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:return: a bytes object, or :data:`.NO_VALUE`
|
||||
constant if not present.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_serialized_multi(
|
||||
self, keys: Sequence[KeyType]
|
||||
) -> Sequence[SerializedReturnType]: # pragma NO COVERAGE
|
||||
"""Retrieve multiple serialized values from the cache.
|
||||
|
||||
:param keys: sequence of string keys that was passed to the
|
||||
:meth:`.CacheRegion.get_multi` method, which will also be processed
|
||||
by the "key mangling" function if one was present.
|
||||
|
||||
:return: list of bytes objects
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_serialized(
|
||||
self, key: KeyType, value: bytes
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Set a serialized value in the cache.
|
||||
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.set`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:param value: a bytes object to be stored.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_serialized_multi(
|
||||
self, mapping: Mapping[KeyType, bytes]
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Set multiple serialized values in the cache.
|
||||
|
||||
:param mapping: a dict in which the key will be whatever was passed to
|
||||
the :meth:`.CacheRegion.set_multi` method, processed by the "key
|
||||
mangling" function, if any.
|
||||
|
||||
When implementing a new :class:`.CacheBackend` or cutomizing via
|
||||
:class:`.ProxyBackend`, be aware that when this method is invoked by
|
||||
:meth:`.Region.get_or_create_multi`, the ``mapping`` values are the
|
||||
same ones returned to the upstream caller. If the subclass alters the
|
||||
values in any way, it must not do so 'in-place' on the ``mapping`` dict
|
||||
-- that will have the undesirable effect of modifying the returned
|
||||
values as well.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
|
51
libs/common/dogpile/cache/backends/__init__.py
vendored
51
libs/common/dogpile/cache/backends/__init__.py
vendored
|
@ -1,22 +1,47 @@
|
|||
from dogpile.cache.region import register_backend
|
||||
from ...util import PluginLoader
|
||||
|
||||
_backend_loader = PluginLoader("dogpile.cache")
|
||||
register_backend = _backend_loader.register
|
||||
|
||||
register_backend(
|
||||
"dogpile.cache.null", "dogpile.cache.backends.null", "NullBackend")
|
||||
"dogpile.cache.null", "dogpile.cache.backends.null", "NullBackend"
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.dbm", "dogpile.cache.backends.file", "DBMBackend")
|
||||
"dogpile.cache.dbm", "dogpile.cache.backends.file", "DBMBackend"
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.pylibmc", "dogpile.cache.backends.memcached",
|
||||
"PylibmcBackend")
|
||||
"dogpile.cache.pylibmc",
|
||||
"dogpile.cache.backends.memcached",
|
||||
"PylibmcBackend",
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.bmemcached", "dogpile.cache.backends.memcached",
|
||||
"BMemcachedBackend")
|
||||
"dogpile.cache.bmemcached",
|
||||
"dogpile.cache.backends.memcached",
|
||||
"BMemcachedBackend",
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.memcached", "dogpile.cache.backends.memcached",
|
||||
"MemcachedBackend")
|
||||
"dogpile.cache.memcached",
|
||||
"dogpile.cache.backends.memcached",
|
||||
"MemcachedBackend",
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.memory", "dogpile.cache.backends.memory", "MemoryBackend")
|
||||
"dogpile.cache.pymemcache",
|
||||
"dogpile.cache.backends.memcached",
|
||||
"PyMemcacheBackend",
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.memory_pickle", "dogpile.cache.backends.memory",
|
||||
"MemoryPickleBackend")
|
||||
"dogpile.cache.memory", "dogpile.cache.backends.memory", "MemoryBackend"
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.redis", "dogpile.cache.backends.redis", "RedisBackend")
|
||||
"dogpile.cache.memory_pickle",
|
||||
"dogpile.cache.backends.memory",
|
||||
"MemoryPickleBackend",
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.redis", "dogpile.cache.backends.redis", "RedisBackend"
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.redis_sentinel",
|
||||
"dogpile.cache.backends.redis",
|
||||
"RedisSentinelBackend",
|
||||
)
|
||||
|
|
96
libs/common/dogpile/cache/backends/file.py
vendored
96
libs/common/dogpile/cache/backends/file.py
vendored
|
@ -7,16 +7,20 @@ Provides backends that deal with local filesystem access.
|
|||
"""
|
||||
|
||||
from __future__ import with_statement
|
||||
from ..api import CacheBackend, NO_VALUE
|
||||
|
||||
from contextlib import contextmanager
|
||||
from ...util import compat
|
||||
from ... import util
|
||||
import dbm
|
||||
import os
|
||||
import threading
|
||||
|
||||
__all__ = 'DBMBackend', 'FileLock', 'AbstractFileLock'
|
||||
from ..api import BytesBackend
|
||||
from ..api import NO_VALUE
|
||||
from ... import util
|
||||
|
||||
__all__ = ["DBMBackend", "FileLock", "AbstractFileLock"]
|
||||
|
||||
|
||||
class DBMBackend(CacheBackend):
|
||||
class DBMBackend(BytesBackend):
|
||||
"""A file-backend using a dbm file to store keys.
|
||||
|
||||
Basic usage::
|
||||
|
@ -134,28 +138,25 @@ class DBMBackend(CacheBackend):
|
|||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
self.filename = os.path.abspath(
|
||||
os.path.normpath(arguments['filename'])
|
||||
os.path.normpath(arguments["filename"])
|
||||
)
|
||||
dir_, filename = os.path.split(self.filename)
|
||||
|
||||
self.lock_factory = arguments.get("lock_factory", FileLock)
|
||||
self._rw_lock = self._init_lock(
|
||||
arguments.get('rw_lockfile'),
|
||||
".rw.lock", dir_, filename)
|
||||
arguments.get("rw_lockfile"), ".rw.lock", dir_, filename
|
||||
)
|
||||
self._dogpile_lock = self._init_lock(
|
||||
arguments.get('dogpile_lockfile'),
|
||||
arguments.get("dogpile_lockfile"),
|
||||
".dogpile.lock",
|
||||
dir_, filename,
|
||||
util.KeyReentrantMutex.factory)
|
||||
dir_,
|
||||
filename,
|
||||
util.KeyReentrantMutex.factory,
|
||||
)
|
||||
|
||||
# TODO: make this configurable
|
||||
if compat.py3k:
|
||||
import dbm
|
||||
else:
|
||||
import anydbm as dbm
|
||||
self.dbmmodule = dbm
|
||||
self._init_dbm_file()
|
||||
|
||||
def _init_lock(self, argument, suffix, basedir, basefile, wrapper=None):
|
||||
|
@ -163,9 +164,8 @@ class DBMBackend(CacheBackend):
|
|||
lock = self.lock_factory(os.path.join(basedir, basefile + suffix))
|
||||
elif argument is not False:
|
||||
lock = self.lock_factory(
|
||||
os.path.abspath(
|
||||
os.path.normpath(argument)
|
||||
))
|
||||
os.path.abspath(os.path.normpath(argument))
|
||||
)
|
||||
else:
|
||||
return None
|
||||
if wrapper:
|
||||
|
@ -175,12 +175,12 @@ class DBMBackend(CacheBackend):
|
|||
def _init_dbm_file(self):
|
||||
exists = os.access(self.filename, os.F_OK)
|
||||
if not exists:
|
||||
for ext in ('db', 'dat', 'pag', 'dir'):
|
||||
for ext in ("db", "dat", "pag", "dir"):
|
||||
if os.access(self.filename + os.extsep + ext, os.F_OK):
|
||||
exists = True
|
||||
break
|
||||
if not exists:
|
||||
fh = self.dbmmodule.open(self.filename, 'c')
|
||||
fh = dbm.open(self.filename, "c")
|
||||
fh.close()
|
||||
|
||||
def get_mutex(self, key):
|
||||
|
@ -210,57 +210,50 @@ class DBMBackend(CacheBackend):
|
|||
@contextmanager
|
||||
def _dbm_file(self, write):
|
||||
with self._use_rw_lock(write):
|
||||
dbm = self.dbmmodule.open(
|
||||
self.filename,
|
||||
"w" if write else "r")
|
||||
yield dbm
|
||||
dbm.close()
|
||||
with dbm.open(self.filename, "w" if write else "r") as dbm_obj:
|
||||
yield dbm_obj
|
||||
|
||||
def get(self, key):
|
||||
with self._dbm_file(False) as dbm:
|
||||
if hasattr(dbm, 'get'):
|
||||
value = dbm.get(key, NO_VALUE)
|
||||
def get_serialized(self, key):
|
||||
with self._dbm_file(False) as dbm_obj:
|
||||
if hasattr(dbm_obj, "get"):
|
||||
value = dbm_obj.get(key, NO_VALUE)
|
||||
else:
|
||||
# gdbm objects lack a .get method
|
||||
try:
|
||||
value = dbm[key]
|
||||
value = dbm_obj[key]
|
||||
except KeyError:
|
||||
value = NO_VALUE
|
||||
if value is not NO_VALUE:
|
||||
value = compat.pickle.loads(value)
|
||||
return value
|
||||
|
||||
def get_multi(self, keys):
|
||||
return [self.get(key) for key in keys]
|
||||
def get_serialized_multi(self, keys):
|
||||
return [self.get_serialized(key) for key in keys]
|
||||
|
||||
def set(self, key, value):
|
||||
with self._dbm_file(True) as dbm:
|
||||
dbm[key] = compat.pickle.dumps(value,
|
||||
compat.pickle.HIGHEST_PROTOCOL)
|
||||
def set_serialized(self, key, value):
|
||||
with self._dbm_file(True) as dbm_obj:
|
||||
dbm_obj[key] = value
|
||||
|
||||
def set_multi(self, mapping):
|
||||
with self._dbm_file(True) as dbm:
|
||||
def set_serialized_multi(self, mapping):
|
||||
with self._dbm_file(True) as dbm_obj:
|
||||
for key, value in mapping.items():
|
||||
dbm[key] = compat.pickle.dumps(value,
|
||||
compat.pickle.HIGHEST_PROTOCOL)
|
||||
dbm_obj[key] = value
|
||||
|
||||
def delete(self, key):
|
||||
with self._dbm_file(True) as dbm:
|
||||
with self._dbm_file(True) as dbm_obj:
|
||||
try:
|
||||
del dbm[key]
|
||||
del dbm_obj[key]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def delete_multi(self, keys):
|
||||
with self._dbm_file(True) as dbm:
|
||||
with self._dbm_file(True) as dbm_obj:
|
||||
for key in keys:
|
||||
try:
|
||||
del dbm[key]
|
||||
del dbm_obj[key]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
class AbstractFileLock(object):
|
||||
class AbstractFileLock:
|
||||
"""Coordinate read/write access to a file.
|
||||
|
||||
typically is a file-based lock but doesn't necessarily have to be.
|
||||
|
@ -392,17 +385,18 @@ class FileLock(AbstractFileLock):
|
|||
"""
|
||||
|
||||
def __init__(self, filename):
|
||||
self._filedescriptor = compat.threading.local()
|
||||
self._filedescriptor = threading.local()
|
||||
self.filename = filename
|
||||
|
||||
@util.memoized_property
|
||||
def _module(self):
|
||||
import fcntl
|
||||
|
||||
return fcntl
|
||||
|
||||
@property
|
||||
def is_open(self):
|
||||
return hasattr(self._filedescriptor, 'fileno')
|
||||
return hasattr(self._filedescriptor, "fileno")
|
||||
|
||||
def acquire_read_lock(self, wait):
|
||||
return self._acquire(wait, os.O_RDONLY, self._module.LOCK_SH)
|
||||
|
|
389
libs/common/dogpile/cache/backends/memcached.py
vendored
389
libs/common/dogpile/cache/backends/memcached.py
vendored
|
@ -6,23 +6,43 @@ Provides backends for talking to `memcached <http://memcached.org>`_.
|
|||
|
||||
"""
|
||||
|
||||
from ..api import CacheBackend, NO_VALUE
|
||||
from ...util import compat
|
||||
from ... import util
|
||||
import random
|
||||
import threading
|
||||
import time
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Mapping
|
||||
import warnings
|
||||
|
||||
__all__ = 'GenericMemcachedBackend', 'MemcachedBackend',\
|
||||
'PylibmcBackend', 'BMemcachedBackend', 'MemcachedLock'
|
||||
from ..api import CacheBackend
|
||||
from ..api import NO_VALUE
|
||||
from ... import util
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
import bmemcached
|
||||
import memcache
|
||||
import pylibmc
|
||||
import pymemcache
|
||||
else:
|
||||
# delayed import
|
||||
bmemcached = None # noqa F811
|
||||
memcache = None # noqa F811
|
||||
pylibmc = None # noqa F811
|
||||
pymemcache = None # noqa F811
|
||||
|
||||
__all__ = (
|
||||
"GenericMemcachedBackend",
|
||||
"MemcachedBackend",
|
||||
"PylibmcBackend",
|
||||
"PyMemcacheBackend",
|
||||
"BMemcachedBackend",
|
||||
"MemcachedLock",
|
||||
)
|
||||
|
||||
|
||||
class MemcachedLock(object):
|
||||
"""Simple distributed lock using memcached.
|
||||
|
||||
This is an adaptation of the lock featured at
|
||||
http://amix.dk/blog/post/19386
|
||||
|
||||
"""
|
||||
"""Simple distributed lock using memcached."""
|
||||
|
||||
def __init__(self, client_fn, key, timeout=0):
|
||||
self.client_fn = client_fn
|
||||
|
@ -38,11 +58,15 @@ class MemcachedLock(object):
|
|||
elif not wait:
|
||||
return False
|
||||
else:
|
||||
sleep_time = (((i + 1) * random.random()) + 2 ** i) / 2.5
|
||||
sleep_time = (((i + 1) * random.random()) + 2**i) / 2.5
|
||||
time.sleep(sleep_time)
|
||||
if i < 15:
|
||||
i += 1
|
||||
|
||||
def locked(self):
|
||||
client = self.client_fn()
|
||||
return client.get(self.key) is not None
|
||||
|
||||
def release(self):
|
||||
client = self.client_fn()
|
||||
client.delete(self.key)
|
||||
|
@ -100,10 +124,17 @@ class GenericMemcachedBackend(CacheBackend):
|
|||
|
||||
"""
|
||||
|
||||
set_arguments = {}
|
||||
set_arguments: Mapping[str, Any] = {}
|
||||
"""Additional arguments which will be passed
|
||||
to the :meth:`set` method."""
|
||||
|
||||
# No need to override serializer, as all the memcached libraries
|
||||
# handles that themselves. Still, we support customizing the
|
||||
# serializer/deserializer to use better default pickle protocol
|
||||
# or completely different serialization mechanism
|
||||
serializer = None
|
||||
deserializer = None
|
||||
|
||||
def __init__(self, arguments):
|
||||
self._imports()
|
||||
# using a plain threading.local here. threading.local
|
||||
|
@ -111,11 +142,10 @@ class GenericMemcachedBackend(CacheBackend):
|
|||
# so the idea is that this is superior to pylibmc's
|
||||
# own ThreadMappedPool which doesn't handle this
|
||||
# automatically.
|
||||
self.url = util.to_list(arguments['url'])
|
||||
self.distributed_lock = arguments.get('distributed_lock', False)
|
||||
self.lock_timeout = arguments.get('lock_timeout', 0)
|
||||
self.memcached_expire_time = arguments.get(
|
||||
'memcached_expire_time', 0)
|
||||
self.url = util.to_list(arguments["url"])
|
||||
self.distributed_lock = arguments.get("distributed_lock", False)
|
||||
self.lock_timeout = arguments.get("lock_timeout", 0)
|
||||
self.memcached_expire_time = arguments.get("memcached_expire_time", 0)
|
||||
|
||||
def has_lock_timeout(self):
|
||||
return self.lock_timeout != 0
|
||||
|
@ -132,7 +162,7 @@ class GenericMemcachedBackend(CacheBackend):
|
|||
def _clients(self):
|
||||
backend = self
|
||||
|
||||
class ClientPool(compat.threading.local):
|
||||
class ClientPool(threading.local):
|
||||
def __init__(self):
|
||||
self.memcached = backend._create_client()
|
||||
|
||||
|
@ -152,8 +182,9 @@ class GenericMemcachedBackend(CacheBackend):
|
|||
|
||||
def get_mutex(self, key):
|
||||
if self.distributed_lock:
|
||||
return MemcachedLock(lambda: self.client, key,
|
||||
timeout=self.lock_timeout)
|
||||
return MemcachedLock(
|
||||
lambda: self.client, key, timeout=self.lock_timeout
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
@ -166,23 +197,18 @@ class GenericMemcachedBackend(CacheBackend):
|
|||
|
||||
def get_multi(self, keys):
|
||||
values = self.client.get_multi(keys)
|
||||
|
||||
return [
|
||||
NO_VALUE if key not in values
|
||||
else values[key] for key in keys
|
||||
NO_VALUE if val is None else val
|
||||
for val in [values.get(key, NO_VALUE) for key in keys]
|
||||
]
|
||||
|
||||
def set(self, key, value):
|
||||
self.client.set(
|
||||
key,
|
||||
value,
|
||||
**self.set_arguments
|
||||
)
|
||||
self.client.set(key, value, **self.set_arguments)
|
||||
|
||||
def set_multi(self, mapping):
|
||||
self.client.set_multi(
|
||||
mapping,
|
||||
**self.set_arguments
|
||||
)
|
||||
mapping = {key: value for key, value in mapping.items()}
|
||||
self.client.set_multi(mapping, **self.set_arguments)
|
||||
|
||||
def delete(self, key):
|
||||
self.client.delete(key)
|
||||
|
@ -191,24 +217,23 @@ class GenericMemcachedBackend(CacheBackend):
|
|||
self.client.delete_multi(keys)
|
||||
|
||||
|
||||
class MemcacheArgs(object):
|
||||
class MemcacheArgs(GenericMemcachedBackend):
|
||||
"""Mixin which provides support for the 'time' argument to set(),
|
||||
'min_compress_len' to other methods.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
self.min_compress_len = arguments.get('min_compress_len', 0)
|
||||
self.min_compress_len = arguments.get("min_compress_len", 0)
|
||||
|
||||
self.set_arguments = {}
|
||||
if "memcached_expire_time" in arguments:
|
||||
self.set_arguments["time"] = arguments["memcached_expire_time"]
|
||||
if "min_compress_len" in arguments:
|
||||
self.set_arguments["min_compress_len"] = \
|
||||
arguments["min_compress_len"]
|
||||
self.set_arguments["min_compress_len"] = arguments[
|
||||
"min_compress_len"
|
||||
]
|
||||
super(MemcacheArgs, self).__init__(arguments)
|
||||
|
||||
pylibmc = None
|
||||
|
||||
|
||||
class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend):
|
||||
"""A backend for the
|
||||
|
@ -245,8 +270,8 @@ class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend):
|
|||
"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
self.binary = arguments.get('binary', False)
|
||||
self.behaviors = arguments.get('behaviors', {})
|
||||
self.binary = arguments.get("binary", False)
|
||||
self.behaviors = arguments.get("behaviors", {})
|
||||
super(PylibmcBackend, self).__init__(arguments)
|
||||
|
||||
def _imports(self):
|
||||
|
@ -255,13 +280,9 @@ class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend):
|
|||
|
||||
def _create_client(self):
|
||||
return pylibmc.Client(
|
||||
self.url,
|
||||
binary=self.binary,
|
||||
behaviors=self.behaviors
|
||||
self.url, binary=self.binary, behaviors=self.behaviors
|
||||
)
|
||||
|
||||
memcache = None
|
||||
|
||||
|
||||
class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend):
|
||||
"""A backend using the standard
|
||||
|
@ -281,16 +302,39 @@ class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend):
|
|||
}
|
||||
)
|
||||
|
||||
:param dead_retry: Number of seconds memcached server is considered dead
|
||||
before it is tried again. Will be passed to ``memcache.Client``
|
||||
as the ``dead_retry`` parameter.
|
||||
|
||||
.. versionchanged:: 1.1.8 Moved the ``dead_retry`` argument which was
|
||||
erroneously added to "set_parameters" to
|
||||
be part of the Memcached connection arguments.
|
||||
|
||||
:param socket_timeout: Timeout in seconds for every call to a server.
|
||||
Will be passed to ``memcache.Client`` as the ``socket_timeout``
|
||||
parameter.
|
||||
|
||||
.. versionchanged:: 1.1.8 Moved the ``socket_timeout`` argument which
|
||||
was erroneously added to "set_parameters"
|
||||
to be part of the Memcached connection arguments.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
self.dead_retry = arguments.get("dead_retry", 30)
|
||||
self.socket_timeout = arguments.get("socket_timeout", 3)
|
||||
super(MemcachedBackend, self).__init__(arguments)
|
||||
|
||||
def _imports(self):
|
||||
global memcache
|
||||
import memcache # noqa
|
||||
|
||||
def _create_client(self):
|
||||
return memcache.Client(self.url)
|
||||
|
||||
|
||||
bmemcached = None
|
||||
return memcache.Client(
|
||||
self.url,
|
||||
dead_retry=self.dead_retry,
|
||||
socket_timeout=self.socket_timeout,
|
||||
)
|
||||
|
||||
|
||||
class BMemcachedBackend(GenericMemcachedBackend):
|
||||
|
@ -299,9 +343,11 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
|||
python-binary-memcached>`_
|
||||
memcached client.
|
||||
|
||||
This is a pure Python memcached client which
|
||||
includes the ability to authenticate with a memcached
|
||||
server using SASL.
|
||||
This is a pure Python memcached client which includes
|
||||
security features like SASL and SSL/TLS.
|
||||
|
||||
SASL is a standard for adding authentication mechanisms
|
||||
to protocols in a way that is protocol independent.
|
||||
|
||||
A typical configuration using username/password::
|
||||
|
||||
|
@ -317,6 +363,25 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
|||
}
|
||||
)
|
||||
|
||||
A typical configuration using tls_context::
|
||||
|
||||
import ssl
|
||||
from dogpile.cache import make_region
|
||||
|
||||
ctx = ssl.create_default_context(cafile="/path/to/my-ca.pem")
|
||||
|
||||
region = make_region().configure(
|
||||
'dogpile.cache.bmemcached',
|
||||
expiration_time = 3600,
|
||||
arguments = {
|
||||
'url':["127.0.0.1"],
|
||||
'tls_context':ctx,
|
||||
}
|
||||
)
|
||||
|
||||
For advanced ways to configure TLS creating a more complex
|
||||
tls_context visit https://docs.python.org/3/library/ssl.html
|
||||
|
||||
Arguments which can be passed to the ``arguments``
|
||||
dictionary include:
|
||||
|
||||
|
@ -324,11 +389,17 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
|||
SASL authentication.
|
||||
:param password: optional password, will be used for
|
||||
SASL authentication.
|
||||
:param tls_context: optional TLS context, will be used for
|
||||
TLS connections.
|
||||
|
||||
.. versionadded:: 1.0.2
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
self.username = arguments.get('username', None)
|
||||
self.password = arguments.get('password', None)
|
||||
self.username = arguments.get("username", None)
|
||||
self.password = arguments.get("password", None)
|
||||
self.tls_context = arguments.get("tls_context", None)
|
||||
super(BMemcachedBackend, self).__init__(arguments)
|
||||
|
||||
def _imports(self):
|
||||
|
@ -345,7 +416,8 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
|||
def add(self, key, value, timeout=0):
|
||||
try:
|
||||
return super(RepairBMemcachedAPI, self).add(
|
||||
key, value, timeout)
|
||||
key, value, timeout
|
||||
)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
@ -355,10 +427,213 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
|||
return self.Client(
|
||||
self.url,
|
||||
username=self.username,
|
||||
password=self.password
|
||||
password=self.password,
|
||||
tls_context=self.tls_context,
|
||||
)
|
||||
|
||||
def delete_multi(self, keys):
|
||||
"""python-binary-memcached api does not implements delete_multi"""
|
||||
for key in keys:
|
||||
self.delete(key)
|
||||
|
||||
|
||||
class PyMemcacheBackend(GenericMemcachedBackend):
|
||||
"""A backend for the
|
||||
`pymemcache <https://github.com/pinterest/pymemcache>`_
|
||||
memcached client.
|
||||
|
||||
A comprehensive, fast, pure Python memcached client
|
||||
|
||||
.. versionadded:: 1.1.2
|
||||
|
||||
pymemcache supports the following features:
|
||||
|
||||
* Complete implementation of the memcached text protocol.
|
||||
* Configurable timeouts for socket connect and send/recv calls.
|
||||
* Access to the "noreply" flag, which can significantly increase
|
||||
the speed of writes.
|
||||
* Flexible, simple approach to serialization and deserialization.
|
||||
* The (optional) ability to treat network and memcached errors as
|
||||
cache misses.
|
||||
|
||||
dogpile.cache uses the ``HashClient`` from pymemcache in order to reduce
|
||||
API differences when compared to other memcached client drivers.
|
||||
This allows the user to provide a single server or a list of memcached
|
||||
servers.
|
||||
|
||||
Arguments which can be passed to the ``arguments``
|
||||
dictionary include:
|
||||
|
||||
:param tls_context: optional TLS context, will be used for
|
||||
TLS connections.
|
||||
|
||||
A typical configuration using tls_context::
|
||||
|
||||
import ssl
|
||||
from dogpile.cache import make_region
|
||||
|
||||
ctx = ssl.create_default_context(cafile="/path/to/my-ca.pem")
|
||||
|
||||
region = make_region().configure(
|
||||
'dogpile.cache.pymemcache',
|
||||
expiration_time = 3600,
|
||||
arguments = {
|
||||
'url':["127.0.0.1"],
|
||||
'tls_context':ctx,
|
||||
}
|
||||
)
|
||||
|
||||
.. seealso::
|
||||
|
||||
`<https://docs.python.org/3/library/ssl.html>`_ - additional TLS
|
||||
documentation.
|
||||
|
||||
:param serde: optional "serde". Defaults to
|
||||
``pymemcache.serde.pickle_serde``.
|
||||
|
||||
:param default_noreply: defaults to False. When set to True this flag
|
||||
enables the pymemcache "noreply" feature. See the pymemcache
|
||||
documentation for further details.
|
||||
|
||||
:param socket_keepalive: optional socket keepalive, will be used for
|
||||
TCP keepalive configuration. Use of this parameter requires pymemcache
|
||||
3.5.0 or greater. This parameter
|
||||
accepts a
|
||||
`pymemcache.client.base.KeepAliveOpts
|
||||
<https://pymemcache.readthedocs.io/en/latest/apidoc/pymemcache.client.base.html#pymemcache.client.base.KeepaliveOpts>`_
|
||||
object.
|
||||
|
||||
A typical configuration using ``socket_keepalive``::
|
||||
|
||||
from pymemcache import KeepaliveOpts
|
||||
from dogpile.cache import make_region
|
||||
|
||||
# Using the default keepalive configuration
|
||||
socket_keepalive = KeepaliveOpts()
|
||||
|
||||
region = make_region().configure(
|
||||
'dogpile.cache.pymemcache',
|
||||
expiration_time = 3600,
|
||||
arguments = {
|
||||
'url':["127.0.0.1"],
|
||||
'socket_keepalive': socket_keepalive
|
||||
}
|
||||
)
|
||||
|
||||
.. versionadded:: 1.1.4 - added support for ``socket_keepalive``.
|
||||
|
||||
:param enable_retry_client: optional flag to enable retry client
|
||||
mechanisms to handle failure. Defaults to False. When set to ``True``,
|
||||
the :paramref:`.PyMemcacheBackend.retry_attempts` parameter must also
|
||||
be set, along with optional parameters
|
||||
:paramref:`.PyMemcacheBackend.retry_delay`.
|
||||
:paramref:`.PyMemcacheBackend.retry_for`,
|
||||
:paramref:`.PyMemcacheBackend.do_not_retry_for`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
`<https://pymemcache.readthedocs.io/en/latest/getting_started.html#using-the-built-in-retrying-mechanism>`_ -
|
||||
in the pymemcache documentation
|
||||
|
||||
.. versionadded:: 1.1.4
|
||||
|
||||
:param retry_attempts: how many times to attempt an action with
|
||||
pymemcache's retrying wrapper before failing. Must be 1 or above.
|
||||
Defaults to None.
|
||||
|
||||
.. versionadded:: 1.1.4
|
||||
|
||||
:param retry_delay: optional int|float, how many seconds to sleep between
|
||||
each attempt. Used by the retry wrapper. Defaults to None.
|
||||
|
||||
.. versionadded:: 1.1.4
|
||||
|
||||
:param retry_for: optional None|tuple|set|list, what exceptions to
|
||||
allow retries for. Will allow retries for all exceptions if None.
|
||||
Example: ``(MemcacheClientError, MemcacheUnexpectedCloseError)``
|
||||
Accepts any class that is a subclass of Exception. Defaults to None.
|
||||
|
||||
.. versionadded:: 1.1.4
|
||||
|
||||
:param do_not_retry_for: optional None|tuple|set|list, what
|
||||
exceptions should be retried. Will not block retries for any Exception if
|
||||
None. Example: ``(IOError, MemcacheIllegalInputError)``
|
||||
Accepts any class that is a subclass of Exception. Defaults to None.
|
||||
|
||||
.. versionadded:: 1.1.4
|
||||
|
||||
:param hashclient_retry_attempts: Amount of times a client should be tried
|
||||
before it is marked dead and removed from the pool in the HashClient's
|
||||
internal mechanisms.
|
||||
|
||||
.. versionadded:: 1.1.5
|
||||
|
||||
:param hashclient_retry_timeout: Time in seconds that should pass between
|
||||
retry attempts in the HashClient's internal mechanisms.
|
||||
|
||||
.. versionadded:: 1.1.5
|
||||
|
||||
:param dead_timeout: Time in seconds before attempting to add a node
|
||||
back in the pool in the HashClient's internal mechanisms.
|
||||
|
||||
.. versionadded:: 1.1.5
|
||||
|
||||
""" # noqa E501
|
||||
|
||||
def __init__(self, arguments):
|
||||
super().__init__(arguments)
|
||||
|
||||
self.serde = arguments.get("serde", pymemcache.serde.pickle_serde)
|
||||
self.default_noreply = arguments.get("default_noreply", False)
|
||||
self.tls_context = arguments.get("tls_context", None)
|
||||
self.socket_keepalive = arguments.get("socket_keepalive", None)
|
||||
self.enable_retry_client = arguments.get("enable_retry_client", False)
|
||||
self.retry_attempts = arguments.get("retry_attempts", None)
|
||||
self.retry_delay = arguments.get("retry_delay", None)
|
||||
self.retry_for = arguments.get("retry_for", None)
|
||||
self.do_not_retry_for = arguments.get("do_not_retry_for", None)
|
||||
self.hashclient_retry_attempts = arguments.get(
|
||||
"hashclient_retry_attempts", 2
|
||||
)
|
||||
self.hashclient_retry_timeout = arguments.get(
|
||||
"hashclient_retry_timeout", 1
|
||||
)
|
||||
self.dead_timeout = arguments.get("hashclient_dead_timeout", 60)
|
||||
if (
|
||||
self.retry_delay is not None
|
||||
or self.retry_attempts is not None
|
||||
or self.retry_for is not None
|
||||
or self.do_not_retry_for is not None
|
||||
) and not self.enable_retry_client:
|
||||
warnings.warn(
|
||||
"enable_retry_client is not set; retry options "
|
||||
"will be ignored"
|
||||
)
|
||||
|
||||
def _imports(self):
|
||||
global pymemcache
|
||||
import pymemcache
|
||||
|
||||
def _create_client(self):
|
||||
_kwargs = {
|
||||
"serde": self.serde,
|
||||
"default_noreply": self.default_noreply,
|
||||
"tls_context": self.tls_context,
|
||||
"retry_attempts": self.hashclient_retry_attempts,
|
||||
"retry_timeout": self.hashclient_retry_timeout,
|
||||
"dead_timeout": self.dead_timeout,
|
||||
}
|
||||
if self.socket_keepalive is not None:
|
||||
_kwargs.update({"socket_keepalive": self.socket_keepalive})
|
||||
|
||||
client = pymemcache.client.hash.HashClient(self.url, **_kwargs)
|
||||
if self.enable_retry_client:
|
||||
return pymemcache.client.retrying.RetryingClient(
|
||||
client,
|
||||
attempts=self.retry_attempts,
|
||||
retry_delay=self.retry_delay,
|
||||
retry_for=self.retry_for,
|
||||
do_not_retry_for=self.do_not_retry_for,
|
||||
)
|
||||
|
||||
return client
|
||||
|
|
31
libs/common/dogpile/cache/backends/memory.py
vendored
31
libs/common/dogpile/cache/backends/memory.py
vendored
|
@ -10,8 +10,10 @@ places the value as given into the dictionary.
|
|||
|
||||
"""
|
||||
|
||||
from ..api import CacheBackend, NO_VALUE
|
||||
from ...util.compat import pickle
|
||||
|
||||
from ..api import CacheBackend
|
||||
from ..api import DefaultSerialization
|
||||
from ..api import NO_VALUE
|
||||
|
||||
|
||||
class MemoryBackend(CacheBackend):
|
||||
|
@ -47,39 +49,21 @@ class MemoryBackend(CacheBackend):
|
|||
|
||||
|
||||
"""
|
||||
pickle_values = False
|
||||
|
||||
def __init__(self, arguments):
|
||||
self._cache = arguments.pop("cache_dict", {})
|
||||
|
||||
def get(self, key):
|
||||
value = self._cache.get(key, NO_VALUE)
|
||||
if value is not NO_VALUE and self.pickle_values:
|
||||
value = pickle.loads(value)
|
||||
return value
|
||||
return self._cache.get(key, NO_VALUE)
|
||||
|
||||
def get_multi(self, keys):
|
||||
ret = [
|
||||
self._cache.get(key, NO_VALUE)
|
||||
for key in keys]
|
||||
if self.pickle_values:
|
||||
ret = [
|
||||
pickle.loads(value)
|
||||
if value is not NO_VALUE else value
|
||||
for value in ret
|
||||
]
|
||||
return ret
|
||||
return [self._cache.get(key, NO_VALUE) for key in keys]
|
||||
|
||||
def set(self, key, value):
|
||||
if self.pickle_values:
|
||||
value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
||||
self._cache[key] = value
|
||||
|
||||
def set_multi(self, mapping):
|
||||
pickle_values = self.pickle_values
|
||||
for key, value in mapping.items():
|
||||
if pickle_values:
|
||||
value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
||||
self._cache[key] = value
|
||||
|
||||
def delete(self, key):
|
||||
|
@ -90,7 +74,7 @@ class MemoryBackend(CacheBackend):
|
|||
self._cache.pop(key, None)
|
||||
|
||||
|
||||
class MemoryPickleBackend(MemoryBackend):
|
||||
class MemoryPickleBackend(DefaultSerialization, MemoryBackend):
|
||||
"""A backend that uses a plain dictionary, but serializes objects on
|
||||
:meth:`.MemoryBackend.set` and deserializes :meth:`.MemoryBackend.get`.
|
||||
|
||||
|
@ -121,4 +105,3 @@ class MemoryPickleBackend(MemoryBackend):
|
|||
.. versionadded:: 0.5.3
|
||||
|
||||
"""
|
||||
pickle_values = True
|
||||
|
|
8
libs/common/dogpile/cache/backends/null.py
vendored
8
libs/common/dogpile/cache/backends/null.py
vendored
|
@ -10,10 +10,11 @@ caching for a region that is otherwise used normally.
|
|||
|
||||
"""
|
||||
|
||||
from ..api import CacheBackend, NO_VALUE
|
||||
from ..api import CacheBackend
|
||||
from ..api import NO_VALUE
|
||||
|
||||
|
||||
__all__ = ['NullBackend']
|
||||
__all__ = ["NullBackend"]
|
||||
|
||||
|
||||
class NullLock(object):
|
||||
|
@ -23,6 +24,9 @@ class NullLock(object):
|
|||
def release(self):
|
||||
pass
|
||||
|
||||
def locked(self):
|
||||
return False
|
||||
|
||||
|
||||
class NullBackend(CacheBackend):
|
||||
"""A "null" backend that effectively disables all cache operations.
|
||||
|
|
302
libs/common/dogpile/cache/backends/redis.py
vendored
302
libs/common/dogpile/cache/backends/redis.py
vendored
|
@ -7,16 +7,24 @@ Provides backends for talking to `Redis <http://redis.io>`_.
|
|||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from ..api import CacheBackend, NO_VALUE
|
||||
from ...util.compat import pickle, u
|
||||
|
||||
redis = None
|
||||
import typing
|
||||
import warnings
|
||||
|
||||
__all__ = 'RedisBackend',
|
||||
from ..api import BytesBackend
|
||||
from ..api import NO_VALUE
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
import redis
|
||||
else:
|
||||
# delayed import
|
||||
redis = None # noqa F811
|
||||
|
||||
__all__ = ("RedisBackend", "RedisSentinelBackend")
|
||||
|
||||
|
||||
class RedisBackend(CacheBackend):
|
||||
"""A `Redis <http://redis.io/>`_ backend, using the
|
||||
class RedisBackend(BytesBackend):
|
||||
r"""A `Redis <http://redis.io/>`_ backend, using the
|
||||
`redis-py <http://pypi.python.org/pypi/redis/>`_ backend.
|
||||
|
||||
Example configuration::
|
||||
|
@ -30,23 +38,21 @@ class RedisBackend(CacheBackend):
|
|||
'port': 6379,
|
||||
'db': 0,
|
||||
'redis_expiration_time': 60*60*2, # 2 hours
|
||||
'distributed_lock': True
|
||||
'distributed_lock': True,
|
||||
'thread_local_lock': False
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
Arguments accepted in the arguments dictionary:
|
||||
|
||||
:param url: string. If provided, will override separate host/port/db
|
||||
params. The format is that accepted by ``StrictRedis.from_url()``.
|
||||
|
||||
.. versionadded:: 0.4.1
|
||||
|
||||
:param host: string, default is ``localhost``.
|
||||
|
||||
:param password: string, default is no password.
|
||||
|
||||
.. versionadded:: 0.4.1
|
||||
|
||||
:param port: integer, default is ``6379``.
|
||||
|
||||
:param db: integer, default is ``0``.
|
||||
|
@ -56,57 +62,66 @@ class RedisBackend(CacheBackend):
|
|||
cache expiration. By default no expiration is set.
|
||||
|
||||
:param distributed_lock: boolean, when True, will use a
|
||||
redis-lock as the dogpile lock.
|
||||
Use this when multiple
|
||||
processes will be talking to the same redis instance.
|
||||
When left at False, dogpile will coordinate on a regular
|
||||
threading mutex.
|
||||
redis-lock as the dogpile lock. Use this when multiple processes will be
|
||||
talking to the same redis instance. When left at False, dogpile will
|
||||
coordinate on a regular threading mutex.
|
||||
|
||||
:param lock_timeout: integer, number of seconds after acquiring a lock that
|
||||
Redis should expire it. This argument is only valid when
|
||||
``distributed_lock`` is ``True``.
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
:param socket_timeout: float, seconds for socket timeout.
|
||||
Default is None (no timeout).
|
||||
|
||||
.. versionadded:: 0.5.4
|
||||
|
||||
:param lock_sleep: integer, number of seconds to sleep when failed to
|
||||
acquire a lock. This argument is only valid when
|
||||
``distributed_lock`` is ``True``.
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
:param connection_pool: ``redis.ConnectionPool`` object. If provided,
|
||||
this object supersedes other connection arguments passed to the
|
||||
``redis.StrictRedis`` instance, including url and/or host as well as
|
||||
socket_timeout, and will be passed to ``redis.StrictRedis`` as the
|
||||
source of connectivity.
|
||||
|
||||
.. versionadded:: 0.5.4
|
||||
:param thread_local_lock: bool, whether a thread-local Redis lock object
|
||||
should be used. This is the default, but is not compatible with
|
||||
asynchronous runners, as they run in a different thread than the one
|
||||
used to create the lock.
|
||||
|
||||
:param connection_kwargs: dict, additional keyword arguments are passed
|
||||
along to the
|
||||
``StrictRedis.from_url()`` method or ``StrictRedis()`` constructor
|
||||
directly, including parameters like ``ssl``, ``ssl_certfile``,
|
||||
``charset``, etc.
|
||||
|
||||
.. versionadded:: 1.1.6 Added ``connection_kwargs`` parameter.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
arguments = arguments.copy()
|
||||
self._imports()
|
||||
self.url = arguments.pop('url', None)
|
||||
self.host = arguments.pop('host', 'localhost')
|
||||
self.password = arguments.pop('password', None)
|
||||
self.port = arguments.pop('port', 6379)
|
||||
self.db = arguments.pop('db', 0)
|
||||
self.distributed_lock = arguments.get('distributed_lock', False)
|
||||
self.socket_timeout = arguments.pop('socket_timeout', None)
|
||||
self.url = arguments.pop("url", None)
|
||||
self.host = arguments.pop("host", "localhost")
|
||||
self.password = arguments.pop("password", None)
|
||||
self.port = arguments.pop("port", 6379)
|
||||
self.db = arguments.pop("db", 0)
|
||||
self.distributed_lock = arguments.pop("distributed_lock", False)
|
||||
self.socket_timeout = arguments.pop("socket_timeout", None)
|
||||
self.lock_timeout = arguments.pop("lock_timeout", None)
|
||||
self.lock_sleep = arguments.pop("lock_sleep", 0.1)
|
||||
self.thread_local_lock = arguments.pop("thread_local_lock", True)
|
||||
self.connection_kwargs = arguments.pop("connection_kwargs", {})
|
||||
|
||||
self.lock_timeout = arguments.get('lock_timeout', None)
|
||||
self.lock_sleep = arguments.get('lock_sleep', 0.1)
|
||||
if self.distributed_lock and self.thread_local_lock:
|
||||
warnings.warn(
|
||||
"The Redis backend thread_local_lock parameter should be "
|
||||
"set to False when distributed_lock is True"
|
||||
)
|
||||
|
||||
self.redis_expiration_time = arguments.pop('redis_expiration_time', 0)
|
||||
self.connection_pool = arguments.get('connection_pool', None)
|
||||
self.client = self._create_client()
|
||||
self.redis_expiration_time = arguments.pop("redis_expiration_time", 0)
|
||||
self.connection_pool = arguments.pop("connection_pool", None)
|
||||
self._create_client()
|
||||
|
||||
def _imports(self):
|
||||
# defer imports until backend is used
|
||||
|
@ -118,66 +133,207 @@ class RedisBackend(CacheBackend):
|
|||
# the connection pool already has all other connection
|
||||
# options present within, so here we disregard socket_timeout
|
||||
# and others.
|
||||
return redis.StrictRedis(connection_pool=self.connection_pool)
|
||||
|
||||
args = {}
|
||||
if self.socket_timeout:
|
||||
args['socket_timeout'] = self.socket_timeout
|
||||
|
||||
if self.url is not None:
|
||||
args.update(url=self.url)
|
||||
return redis.StrictRedis.from_url(**args)
|
||||
else:
|
||||
args.update(
|
||||
host=self.host, password=self.password,
|
||||
port=self.port, db=self.db
|
||||
self.writer_client = redis.StrictRedis(
|
||||
connection_pool=self.connection_pool
|
||||
)
|
||||
return redis.StrictRedis(**args)
|
||||
self.reader_client = self.writer_client
|
||||
else:
|
||||
args = {}
|
||||
args.update(self.connection_kwargs)
|
||||
if self.socket_timeout:
|
||||
args["socket_timeout"] = self.socket_timeout
|
||||
|
||||
if self.url is not None:
|
||||
args.update(url=self.url)
|
||||
self.writer_client = redis.StrictRedis.from_url(**args)
|
||||
self.reader_client = self.writer_client
|
||||
else:
|
||||
args.update(
|
||||
host=self.host,
|
||||
password=self.password,
|
||||
port=self.port,
|
||||
db=self.db,
|
||||
)
|
||||
self.writer_client = redis.StrictRedis(**args)
|
||||
self.reader_client = self.writer_client
|
||||
|
||||
def get_mutex(self, key):
|
||||
if self.distributed_lock:
|
||||
return self.client.lock(u('_lock{0}').format(key),
|
||||
self.lock_timeout, self.lock_sleep)
|
||||
return _RedisLockWrapper(
|
||||
self.writer_client.lock(
|
||||
"_lock{0}".format(key),
|
||||
timeout=self.lock_timeout,
|
||||
sleep=self.lock_sleep,
|
||||
thread_local=self.thread_local_lock,
|
||||
)
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
def get(self, key):
|
||||
value = self.client.get(key)
|
||||
def get_serialized(self, key):
|
||||
value = self.reader_client.get(key)
|
||||
if value is None:
|
||||
return NO_VALUE
|
||||
return pickle.loads(value)
|
||||
return value
|
||||
|
||||
def get_multi(self, keys):
|
||||
def get_serialized_multi(self, keys):
|
||||
if not keys:
|
||||
return []
|
||||
values = self.client.mget(keys)
|
||||
return [
|
||||
pickle.loads(v) if v is not None else NO_VALUE
|
||||
for v in values]
|
||||
values = self.reader_client.mget(keys)
|
||||
return [v if v is not None else NO_VALUE for v in values]
|
||||
|
||||
def set(self, key, value):
|
||||
def set_serialized(self, key, value):
|
||||
if self.redis_expiration_time:
|
||||
self.client.setex(key, self.redis_expiration_time,
|
||||
pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
|
||||
self.writer_client.setex(key, self.redis_expiration_time, value)
|
||||
else:
|
||||
self.client.set(key, pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
|
||||
|
||||
def set_multi(self, mapping):
|
||||
mapping = dict(
|
||||
(k, pickle.dumps(v, pickle.HIGHEST_PROTOCOL))
|
||||
for k, v in mapping.items()
|
||||
)
|
||||
self.writer_client.set(key, value)
|
||||
|
||||
def set_serialized_multi(self, mapping):
|
||||
if not self.redis_expiration_time:
|
||||
self.client.mset(mapping)
|
||||
self.writer_client.mset(mapping)
|
||||
else:
|
||||
pipe = self.client.pipeline()
|
||||
pipe = self.writer_client.pipeline()
|
||||
for key, value in mapping.items():
|
||||
pipe.setex(key, self.redis_expiration_time, value)
|
||||
pipe.execute()
|
||||
|
||||
def delete(self, key):
|
||||
self.client.delete(key)
|
||||
self.writer_client.delete(key)
|
||||
|
||||
def delete_multi(self, keys):
|
||||
self.client.delete(*keys)
|
||||
self.writer_client.delete(*keys)
|
||||
|
||||
|
||||
class _RedisLockWrapper:
|
||||
__slots__ = ("mutex", "__weakref__")
|
||||
|
||||
def __init__(self, mutex: typing.Any):
|
||||
self.mutex = mutex
|
||||
|
||||
def acquire(self, wait: bool = True) -> typing.Any:
|
||||
return self.mutex.acquire(blocking=wait)
|
||||
|
||||
def release(self) -> typing.Any:
|
||||
return self.mutex.release()
|
||||
|
||||
def locked(self) -> bool:
|
||||
return self.mutex.locked() # type: ignore
|
||||
|
||||
|
||||
class RedisSentinelBackend(RedisBackend):
|
||||
"""A `Redis <http://redis.io/>`_ backend, using the
|
||||
`redis-py <http://pypi.python.org/pypi/redis/>`_ backend.
|
||||
It will use the Sentinel of a Redis cluster.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
Example configuration::
|
||||
|
||||
from dogpile.cache import make_region
|
||||
|
||||
region = make_region().configure(
|
||||
'dogpile.cache.redis_sentinel',
|
||||
arguments = {
|
||||
'sentinels': [
|
||||
['redis_sentinel_1', 26379],
|
||||
['redis_sentinel_2', 26379]
|
||||
],
|
||||
'db': 0,
|
||||
'redis_expiration_time': 60*60*2, # 2 hours
|
||||
'distributed_lock': True,
|
||||
'thread_local_lock': False
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
Arguments accepted in the arguments dictionary:
|
||||
|
||||
:param db: integer, default is ``0``.
|
||||
|
||||
:param redis_expiration_time: integer, number of seconds after setting
|
||||
a value that Redis should expire it. This should be larger than dogpile's
|
||||
cache expiration. By default no expiration is set.
|
||||
|
||||
:param distributed_lock: boolean, when True, will use a
|
||||
redis-lock as the dogpile lock. Use this when multiple processes will be
|
||||
talking to the same redis instance. When False, dogpile will
|
||||
coordinate on a regular threading mutex, Default is True.
|
||||
|
||||
:param lock_timeout: integer, number of seconds after acquiring a lock that
|
||||
Redis should expire it. This argument is only valid when
|
||||
``distributed_lock`` is ``True``.
|
||||
|
||||
:param socket_timeout: float, seconds for socket timeout.
|
||||
Default is None (no timeout).
|
||||
|
||||
:param sentinels: is a list of sentinel nodes. Each node is represented by
|
||||
a pair (hostname, port).
|
||||
Default is None (not in sentinel mode).
|
||||
|
||||
:param service_name: str, the service name.
|
||||
Default is 'mymaster'.
|
||||
|
||||
:param sentinel_kwargs: is a dictionary of connection arguments used when
|
||||
connecting to sentinel instances. Any argument that can be passed to
|
||||
a normal Redis connection can be specified here.
|
||||
Default is {}.
|
||||
|
||||
:param connection_kwargs: dict, additional keyword arguments are passed
|
||||
along to the
|
||||
``StrictRedis.from_url()`` method or ``StrictRedis()`` constructor
|
||||
directly, including parameters like ``ssl``, ``ssl_certfile``,
|
||||
``charset``, etc.
|
||||
|
||||
:param lock_sleep: integer, number of seconds to sleep when failed to
|
||||
acquire a lock. This argument is only valid when
|
||||
``distributed_lock`` is ``True``.
|
||||
|
||||
:param thread_local_lock: bool, whether a thread-local Redis lock object
|
||||
should be used. This is the default, but is not compatible with
|
||||
asynchronous runners, as they run in a different thread than the one
|
||||
used to create the lock.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
arguments = arguments.copy()
|
||||
|
||||
self.sentinels = arguments.pop("sentinels", None)
|
||||
self.service_name = arguments.pop("service_name", "mymaster")
|
||||
self.sentinel_kwargs = arguments.pop("sentinel_kwargs", {})
|
||||
|
||||
super().__init__(
|
||||
arguments={
|
||||
"distributed_lock": True,
|
||||
"thread_local_lock": False,
|
||||
**arguments,
|
||||
}
|
||||
)
|
||||
|
||||
def _imports(self):
|
||||
# defer imports until backend is used
|
||||
global redis
|
||||
import redis.sentinel # noqa
|
||||
|
||||
def _create_client(self):
|
||||
sentinel_kwargs = {}
|
||||
sentinel_kwargs.update(self.sentinel_kwargs)
|
||||
sentinel_kwargs.setdefault("password", self.password)
|
||||
|
||||
connection_kwargs = {}
|
||||
connection_kwargs.update(self.connection_kwargs)
|
||||
connection_kwargs.setdefault("password", self.password)
|
||||
|
||||
if self.db is not None:
|
||||
connection_kwargs.setdefault("db", self.db)
|
||||
sentinel_kwargs.setdefault("db", self.db)
|
||||
if self.socket_timeout is not None:
|
||||
connection_kwargs.setdefault("socket_timeout", self.socket_timeout)
|
||||
|
||||
sentinel = redis.sentinel.Sentinel(
|
||||
self.sentinels,
|
||||
sentinel_kwargs=sentinel_kwargs,
|
||||
**connection_kwargs,
|
||||
)
|
||||
self.writer_client = sentinel.master_for(self.service_name)
|
||||
self.reader_client = sentinel.slave_for(self.service_name)
|
||||
|
|
14
libs/common/dogpile/cache/plugins/mako_cache.py
vendored
14
libs/common/dogpile/cache/plugins/mako_cache.py
vendored
|
@ -51,20 +51,22 @@ class MakoPlugin(CacheImpl):
|
|||
def __init__(self, cache):
|
||||
super(MakoPlugin, self).__init__(cache)
|
||||
try:
|
||||
self.regions = self.cache.template.cache_args['regions']
|
||||
self.regions = self.cache.template.cache_args["regions"]
|
||||
except KeyError:
|
||||
raise KeyError(
|
||||
"'cache_regions' argument is required on the "
|
||||
"Mako Lookup or Template object for usage "
|
||||
"with the dogpile.cache plugin.")
|
||||
"with the dogpile.cache plugin."
|
||||
)
|
||||
|
||||
def _get_region(self, **kw):
|
||||
try:
|
||||
region = kw['region']
|
||||
region = kw["region"]
|
||||
except KeyError:
|
||||
raise KeyError(
|
||||
"'cache_region' argument must be specified with 'cache=True'"
|
||||
"within templates for usage with the dogpile.cache plugin.")
|
||||
"within templates for usage with the dogpile.cache plugin."
|
||||
)
|
||||
try:
|
||||
return self.regions[region]
|
||||
except KeyError:
|
||||
|
@ -73,8 +75,8 @@ class MakoPlugin(CacheImpl):
|
|||
def get_and_replace(self, key, creation_function, **kw):
|
||||
expiration_time = kw.pop("timeout", None)
|
||||
return self._get_region(**kw).get_or_create(
|
||||
key, creation_function,
|
||||
expiration_time=expiration_time)
|
||||
key, creation_function, expiration_time=expiration_time
|
||||
)
|
||||
|
||||
def get_or_create(self, key, creation_function, **kw):
|
||||
return self.get_and_replace(key, creation_function, **kw)
|
||||
|
|
53
libs/common/dogpile/cache/proxy.py
vendored
53
libs/common/dogpile/cache/proxy.py
vendored
|
@ -10,7 +10,16 @@ base backend.
|
|||
|
||||
"""
|
||||
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
|
||||
from .api import BackendFormatted
|
||||
from .api import BackendSetType
|
||||
from .api import CacheBackend
|
||||
from .api import CacheMutex
|
||||
from .api import KeyType
|
||||
from .api import SerializedReturnType
|
||||
|
||||
|
||||
class ProxyBackend(CacheBackend):
|
||||
|
@ -55,17 +64,17 @@ class ProxyBackend(CacheBackend):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.proxied = None
|
||||
def __init__(self, *arg, **kw):
|
||||
pass
|
||||
|
||||
def wrap(self, backend):
|
||||
''' Take a backend as an argument and setup the self.proxied property.
|
||||
def wrap(self, backend: CacheBackend) -> "ProxyBackend":
|
||||
"""Take a backend as an argument and setup the self.proxied property.
|
||||
Return an object that be used as a backend by a :class:`.CacheRegion`
|
||||
object.
|
||||
'''
|
||||
assert(
|
||||
isinstance(backend, CacheBackend) or
|
||||
isinstance(backend, ProxyBackend))
|
||||
"""
|
||||
assert isinstance(backend, CacheBackend) or isinstance(
|
||||
backend, ProxyBackend
|
||||
)
|
||||
self.proxied = backend
|
||||
return self
|
||||
|
||||
|
@ -73,23 +82,37 @@ class ProxyBackend(CacheBackend):
|
|||
# Delegate any functions that are not already overridden to
|
||||
# the proxies backend
|
||||
#
|
||||
def get(self, key):
|
||||
def get(self, key: KeyType) -> BackendFormatted:
|
||||
return self.proxied.get(key)
|
||||
|
||||
def set(self, key, value):
|
||||
def set(self, key: KeyType, value: BackendSetType) -> None:
|
||||
self.proxied.set(key, value)
|
||||
|
||||
def delete(self, key):
|
||||
def delete(self, key: KeyType) -> None:
|
||||
self.proxied.delete(key)
|
||||
|
||||
def get_multi(self, keys):
|
||||
def get_multi(self, keys: Sequence[KeyType]) -> Sequence[BackendFormatted]:
|
||||
return self.proxied.get_multi(keys)
|
||||
|
||||
def set_multi(self, mapping):
|
||||
def set_multi(self, mapping: Mapping[KeyType, BackendSetType]) -> None:
|
||||
self.proxied.set_multi(mapping)
|
||||
|
||||
def delete_multi(self, keys):
|
||||
def delete_multi(self, keys: Sequence[KeyType]) -> None:
|
||||
self.proxied.delete_multi(keys)
|
||||
|
||||
def get_mutex(self, key):
|
||||
def get_mutex(self, key: KeyType) -> Optional[CacheMutex]:
|
||||
return self.proxied.get_mutex(key)
|
||||
|
||||
def get_serialized(self, key: KeyType) -> SerializedReturnType:
|
||||
return self.proxied.get_serialized(key)
|
||||
|
||||
def get_serialized_multi(
|
||||
self, keys: Sequence[KeyType]
|
||||
) -> Sequence[SerializedReturnType]:
|
||||
return self.proxied.get_serialized_multi(keys)
|
||||
|
||||
def set_serialized(self, key: KeyType, value: bytes) -> None:
|
||||
self.proxied.set_serialized(key, value)
|
||||
|
||||
def set_serialized_multi(self, mapping: Mapping[KeyType, bytes]) -> None:
|
||||
self.proxied.set_serialized_multi(mapping)
|
||||
|
|
822
libs/common/dogpile/cache/region.py
vendored
822
libs/common/dogpile/cache/region.py
vendored
File diff suppressed because it is too large
Load diff
88
libs/common/dogpile/cache/util.py
vendored
88
libs/common/dogpile/cache/util.py
vendored
|
@ -1,9 +1,10 @@
|
|||
from hashlib import sha1
|
||||
|
||||
from ..util import compat
|
||||
from ..util import langhelpers
|
||||
|
||||
|
||||
def function_key_generator(namespace, fn, to_str=compat.string_type):
|
||||
def function_key_generator(namespace, fn, to_str=str):
|
||||
"""Return a function that generates a string
|
||||
key, based on a given function as well as
|
||||
arguments to the returned function itself.
|
||||
|
@ -23,47 +24,51 @@ def function_key_generator(namespace, fn, to_str=compat.string_type):
|
|||
"""
|
||||
|
||||
if namespace is None:
|
||||
namespace = '%s:%s' % (fn.__module__, fn.__name__)
|
||||
namespace = "%s:%s" % (fn.__module__, fn.__name__)
|
||||
else:
|
||||
namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
|
||||
namespace = "%s:%s|%s" % (fn.__module__, fn.__name__, namespace)
|
||||
|
||||
args = compat.inspect_getargspec(fn)
|
||||
has_self = args[0] and args[0][0] in ('self', 'cls')
|
||||
has_self = args[0] and args[0][0] in ("self", "cls")
|
||||
|
||||
def generate_key(*args, **kw):
|
||||
if kw:
|
||||
raise ValueError(
|
||||
"dogpile.cache's default key creation "
|
||||
"function does not accept keyword arguments.")
|
||||
"function does not accept keyword arguments."
|
||||
)
|
||||
if has_self:
|
||||
args = args[1:]
|
||||
|
||||
return namespace + "|" + " ".join(map(to_str, args))
|
||||
|
||||
return generate_key
|
||||
|
||||
|
||||
def function_multi_key_generator(namespace, fn, to_str=compat.string_type):
|
||||
def function_multi_key_generator(namespace, fn, to_str=str):
|
||||
|
||||
if namespace is None:
|
||||
namespace = '%s:%s' % (fn.__module__, fn.__name__)
|
||||
namespace = "%s:%s" % (fn.__module__, fn.__name__)
|
||||
else:
|
||||
namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
|
||||
namespace = "%s:%s|%s" % (fn.__module__, fn.__name__, namespace)
|
||||
|
||||
args = compat.inspect_getargspec(fn)
|
||||
has_self = args[0] and args[0][0] in ('self', 'cls')
|
||||
has_self = args[0] and args[0][0] in ("self", "cls")
|
||||
|
||||
def generate_keys(*args, **kw):
|
||||
if kw:
|
||||
raise ValueError(
|
||||
"dogpile.cache's default key creation "
|
||||
"function does not accept keyword arguments.")
|
||||
"function does not accept keyword arguments."
|
||||
)
|
||||
if has_self:
|
||||
args = args[1:]
|
||||
return [namespace + "|" + key for key in map(to_str, args)]
|
||||
|
||||
return generate_keys
|
||||
|
||||
|
||||
def kwarg_function_key_generator(namespace, fn, to_str=compat.string_type):
|
||||
def kwarg_function_key_generator(namespace, fn, to_str=str):
|
||||
"""Return a function that generates a string
|
||||
key, based on a given function as well as
|
||||
arguments to the returned function itself.
|
||||
|
@ -83,9 +88,9 @@ def kwarg_function_key_generator(namespace, fn, to_str=compat.string_type):
|
|||
"""
|
||||
|
||||
if namespace is None:
|
||||
namespace = '%s:%s' % (fn.__module__, fn.__name__)
|
||||
namespace = "%s:%s" % (fn.__module__, fn.__name__)
|
||||
else:
|
||||
namespace = '%s:%s|%s' % (fn.__module__, fn.__name__, namespace)
|
||||
namespace = "%s:%s|%s" % (fn.__module__, fn.__name__, namespace)
|
||||
|
||||
argspec = compat.inspect_getargspec(fn)
|
||||
default_list = list(argspec.defaults or [])
|
||||
|
@ -94,32 +99,41 @@ def kwarg_function_key_generator(namespace, fn, to_str=compat.string_type):
|
|||
# enumerate()
|
||||
default_list.reverse()
|
||||
# use idx*-1 to create the correct right-lookup index.
|
||||
args_with_defaults = dict((argspec.args[(idx*-1)], default)
|
||||
for idx, default in enumerate(default_list, 1))
|
||||
if argspec.args and argspec.args[0] in ('self', 'cls'):
|
||||
args_with_defaults = dict(
|
||||
(argspec.args[(idx * -1)], default)
|
||||
for idx, default in enumerate(default_list, 1)
|
||||
)
|
||||
if argspec.args and argspec.args[0] in ("self", "cls"):
|
||||
arg_index_start = 1
|
||||
else:
|
||||
arg_index_start = 0
|
||||
|
||||
def generate_key(*args, **kwargs):
|
||||
as_kwargs = dict(
|
||||
[(argspec.args[idx], arg)
|
||||
for idx, arg in enumerate(args[arg_index_start:],
|
||||
arg_index_start)])
|
||||
[
|
||||
(argspec.args[idx], arg)
|
||||
for idx, arg in enumerate(
|
||||
args[arg_index_start:], arg_index_start
|
||||
)
|
||||
]
|
||||
)
|
||||
as_kwargs.update(kwargs)
|
||||
for arg, val in args_with_defaults.items():
|
||||
if arg not in as_kwargs:
|
||||
as_kwargs[arg] = val
|
||||
|
||||
argument_values = [as_kwargs[key]
|
||||
for key in sorted(as_kwargs.keys())]
|
||||
return namespace + '|' + " ".join(map(to_str, argument_values))
|
||||
argument_values = [as_kwargs[key] for key in sorted(as_kwargs.keys())]
|
||||
return namespace + "|" + " ".join(map(to_str, argument_values))
|
||||
|
||||
return generate_key
|
||||
|
||||
|
||||
def sha1_mangle_key(key):
|
||||
"""a SHA1 key mangler."""
|
||||
|
||||
if isinstance(key, str):
|
||||
key = key.encode("utf-8")
|
||||
|
||||
return sha1(key).hexdigest()
|
||||
|
||||
|
||||
|
@ -128,13 +142,16 @@ def length_conditional_mangler(length, mangler):
|
|||
past a certain threshold.
|
||||
|
||||
"""
|
||||
|
||||
def mangle(key):
|
||||
if len(key) >= length:
|
||||
return mangler(key)
|
||||
else:
|
||||
return key
|
||||
|
||||
return mangle
|
||||
|
||||
|
||||
# in the 0.6 release these functions were moved to the dogpile.util namespace.
|
||||
# They are linked here to maintain compatibility with older versions.
|
||||
|
||||
|
@ -143,3 +160,30 @@ KeyReentrantMutex = langhelpers.KeyReentrantMutex
|
|||
memoized_property = langhelpers.memoized_property
|
||||
PluginLoader = langhelpers.PluginLoader
|
||||
to_list = langhelpers.to_list
|
||||
|
||||
|
||||
class repr_obj:
|
||||
|
||||
__slots__ = ("value", "max_chars")
|
||||
|
||||
def __init__(self, value, max_chars=300):
|
||||
self.value = value
|
||||
self.max_chars = max_chars
|
||||
|
||||
def __eq__(self, other):
|
||||
return other.value == self.value
|
||||
|
||||
def __repr__(self):
|
||||
rep = repr(self.value)
|
||||
lenrep = len(rep)
|
||||
if lenrep > self.max_chars:
|
||||
segment_length = self.max_chars // 2
|
||||
rep = (
|
||||
rep[0:segment_length]
|
||||
+ (
|
||||
" ... (%d characters truncated) ... "
|
||||
% (lenrep - self.max_chars)
|
||||
)
|
||||
+ rep[-segment_length:]
|
||||
)
|
||||
return rep
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue