mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-21 13:53:15 -07:00
Updated stevedore to 2.0.1
This commit is contained in:
parent
f1624a586f
commit
fb6011f88d
52 changed files with 581 additions and 1960 deletions
448
libs/common/dogpile/cache/api.py
vendored
448
libs/common/dogpile/cache/api.py
vendored
|
@ -1,19 +1,13 @@
|
|||
import abc
|
||||
import pickle
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import cast
|
||||
from typing import Mapping
|
||||
from typing import NamedTuple
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Union
|
||||
import operator
|
||||
|
||||
from ..util.compat import py3k
|
||||
|
||||
|
||||
class NoValue:
|
||||
class NoValue(object):
|
||||
"""Describe a missing cache value.
|
||||
|
||||
The :data:`.NO_VALUE` constant should be used.
|
||||
The :attr:`.NO_VALUE` module global
|
||||
should be used.
|
||||
|
||||
"""
|
||||
|
||||
|
@ -28,123 +22,51 @@ class NoValue:
|
|||
"""
|
||||
return "<dogpile.cache.api.NoValue object>"
|
||||
|
||||
def __bool__(self): # pragma NO COVERAGE
|
||||
return False
|
||||
if py3k:
|
||||
|
||||
def __bool__(self): # pragma NO COVERAGE
|
||||
return False
|
||||
|
||||
else:
|
||||
|
||||
def __nonzero__(self): # pragma NO COVERAGE
|
||||
return False
|
||||
|
||||
|
||||
NO_VALUE = NoValue()
|
||||
"""Value returned from ``get()`` that describes
|
||||
a key not present."""
|
||||
|
||||
MetaDataType = Mapping[str, Any]
|
||||
|
||||
|
||||
KeyType = str
|
||||
"""A cache key."""
|
||||
|
||||
ValuePayload = Any
|
||||
"""An object to be placed in the cache against a key."""
|
||||
|
||||
|
||||
KeyManglerType = Callable[[KeyType], KeyType]
|
||||
Serializer = Callable[[ValuePayload], bytes]
|
||||
Deserializer = Callable[[bytes], ValuePayload]
|
||||
|
||||
|
||||
class CacheMutex(abc.ABC):
|
||||
"""Describes a mutexing object with acquire and release methods.
|
||||
|
||||
This is an abstract base class; any object that has acquire/release
|
||||
methods may be used.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.CacheBackend.get_mutex` - the backend method that optionally
|
||||
returns this locking object.
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def acquire(self, wait: bool = True) -> bool:
|
||||
"""Acquire the mutex.
|
||||
|
||||
:param wait: if True, block until available, else return True/False
|
||||
immediately.
|
||||
|
||||
:return: True if the lock succeeded.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def release(self) -> None:
|
||||
"""Release the mutex."""
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def locked(self) -> bool:
|
||||
"""Check if the mutex was acquired.
|
||||
|
||||
:return: true if the lock is acquired.
|
||||
|
||||
.. versionadded:: 1.1.2
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def __subclasshook__(cls, C):
|
||||
return hasattr(C, "acquire") and hasattr(C, "release")
|
||||
|
||||
|
||||
class CachedValue(NamedTuple):
|
||||
class CachedValue(tuple):
|
||||
"""Represent a value stored in the cache.
|
||||
|
||||
:class:`.CachedValue` is a two-tuple of
|
||||
``(payload, metadata)``, where ``metadata``
|
||||
is dogpile.cache's tracking information (
|
||||
currently the creation time).
|
||||
currently the creation time). The metadata
|
||||
and tuple structure is pickleable, if
|
||||
the backend requires serialization.
|
||||
|
||||
"""
|
||||
|
||||
payload: ValuePayload
|
||||
payload = property(operator.itemgetter(0))
|
||||
"""Named accessor for the payload."""
|
||||
|
||||
metadata: MetaDataType
|
||||
metadata = property(operator.itemgetter(1))
|
||||
"""Named accessor for the dogpile.cache metadata dictionary."""
|
||||
|
||||
def __new__(cls, payload, metadata):
|
||||
return tuple.__new__(cls, (payload, metadata))
|
||||
|
||||
def __reduce__(self):
|
||||
return CachedValue, (self.payload, self.metadata)
|
||||
|
||||
|
||||
CacheReturnType = Union[CachedValue, NoValue]
|
||||
"""The non-serialized form of what may be returned from a backend
|
||||
get method.
|
||||
class CacheBackend(object):
|
||||
"""Base class for backend implementations."""
|
||||
|
||||
"""
|
||||
|
||||
SerializedReturnType = Union[bytes, NoValue]
|
||||
"""the serialized form of what may be returned from a backend get method."""
|
||||
|
||||
BackendFormatted = Union[CacheReturnType, SerializedReturnType]
|
||||
"""Describes the type returned from the :meth:`.CacheBackend.get` method."""
|
||||
|
||||
BackendSetType = Union[CachedValue, bytes]
|
||||
"""Describes the value argument passed to the :meth:`.CacheBackend.set`
|
||||
method."""
|
||||
|
||||
BackendArguments = Mapping[str, Any]
|
||||
|
||||
|
||||
class CacheBackend:
|
||||
"""Base class for backend implementations.
|
||||
|
||||
Backends which set and get Python object values should subclass this
|
||||
backend. For backends in which the value that's stored is ultimately
|
||||
a stream of bytes, the :class:`.BytesBackend` should be used.
|
||||
|
||||
"""
|
||||
|
||||
key_mangler: Optional[Callable[[KeyType], KeyType]] = None
|
||||
key_mangler = None
|
||||
"""Key mangling function.
|
||||
|
||||
May be None, or otherwise declared
|
||||
|
@ -152,23 +74,7 @@ class CacheBackend:
|
|||
|
||||
"""
|
||||
|
||||
serializer: Union[None, Serializer] = None
|
||||
"""Serializer function that will be used by default if not overridden
|
||||
by the region.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
deserializer: Union[None, Deserializer] = None
|
||||
"""deserializer function that will be used by default if not overridden
|
||||
by the region.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments: BackendArguments): # pragma NO COVERAGE
|
||||
def __init__(self, arguments): # pragma NO COVERAGE
|
||||
"""Construct a new :class:`.CacheBackend`.
|
||||
|
||||
Subclasses should override this to
|
||||
|
@ -191,10 +97,10 @@ class CacheBackend:
|
|||
)
|
||||
)
|
||||
|
||||
def has_lock_timeout(self) -> bool:
|
||||
def has_lock_timeout(self):
|
||||
return False
|
||||
|
||||
def get_mutex(self, key: KeyType) -> Optional[CacheMutex]:
|
||||
def get_mutex(self, key):
|
||||
"""Return an optional mutexing object for the given key.
|
||||
|
||||
This object need only provide an ``acquire()``
|
||||
|
@ -227,141 +133,48 @@ class CacheBackend:
|
|||
"""
|
||||
return None
|
||||
|
||||
def get(self, key: KeyType) -> BackendFormatted: # pragma NO COVERAGE
|
||||
"""Retrieve an optionally serialized value from the cache.
|
||||
def get(self, key): # pragma NO COVERAGE
|
||||
"""Retrieve a value from the cache.
|
||||
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.get`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:return: the Python object that corresponds to
|
||||
what was established via the :meth:`.CacheBackend.set` method,
|
||||
or the :data:`.NO_VALUE` constant if not present.
|
||||
|
||||
If a serializer is in use, this method will only be called if the
|
||||
:meth:`.CacheBackend.get_serialized` method is not overridden.
|
||||
The returned value should be an instance of
|
||||
:class:`.CachedValue`, or ``NO_VALUE`` if
|
||||
not present.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_multi(
|
||||
self, keys: Sequence[KeyType]
|
||||
) -> Sequence[BackendFormatted]: # pragma NO COVERAGE
|
||||
"""Retrieve multiple optionally serialized values from the cache.
|
||||
def get_multi(self, keys): # pragma NO COVERAGE
|
||||
"""Retrieve multiple values from the cache.
|
||||
|
||||
:param keys: sequence of string keys that was passed to the
|
||||
:meth:`.CacheRegion.get_multi` method, which will also be processed
|
||||
by the "key mangling" function if one was present.
|
||||
|
||||
:return a list of values as would be returned
|
||||
individually via the :meth:`.CacheBackend.get` method, corresponding
|
||||
to the list of keys given.
|
||||
|
||||
If a serializer is in use, this method will only be called if the
|
||||
:meth:`.CacheBackend.get_serialized_multi` method is not overridden.
|
||||
The returned value should be a list, corresponding
|
||||
to the list of keys given.
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_serialized(self, key: KeyType) -> SerializedReturnType:
|
||||
"""Retrieve a serialized value from the cache.
|
||||
def set(self, key, value): # pragma NO COVERAGE
|
||||
"""Set a value in the cache.
|
||||
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.get`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:return: a bytes object, or :data:`.NO_VALUE`
|
||||
constant if not present.
|
||||
|
||||
The default implementation of this method for :class:`.CacheBackend`
|
||||
returns the value of the :meth:`.CacheBackend.get` method.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.BytesBackend`
|
||||
|
||||
"""
|
||||
return cast(SerializedReturnType, self.get(key))
|
||||
|
||||
def get_serialized_multi(
|
||||
self, keys: Sequence[KeyType]
|
||||
) -> Sequence[SerializedReturnType]: # pragma NO COVERAGE
|
||||
"""Retrieve multiple serialized values from the cache.
|
||||
|
||||
:param keys: sequence of string keys that was passed to the
|
||||
:meth:`.CacheRegion.get_multi` method, which will also be processed
|
||||
by the "key mangling" function if one was present.
|
||||
|
||||
:return: list of bytes objects
|
||||
|
||||
The default implementation of this method for :class:`.CacheBackend`
|
||||
returns the value of the :meth:`.CacheBackend.get_multi` method.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.BytesBackend`
|
||||
|
||||
"""
|
||||
return cast(Sequence[SerializedReturnType], self.get_multi(keys))
|
||||
|
||||
def set(
|
||||
self, key: KeyType, value: BackendSetType
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Set an optionally serialized value in the cache.
|
||||
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.set`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:param value: The optionally serialized :class:`.CachedValue` object.
|
||||
May be an instance of :class:`.CachedValue` or a bytes object
|
||||
depending on if a serializer is in use with the region and if the
|
||||
:meth:`.CacheBackend.set_serialized` method is not overridden.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.CacheBackend.set_serialized`
|
||||
The key will be whatever was passed
|
||||
to the registry, processed by the
|
||||
"key mangling" function, if any.
|
||||
The value will always be an instance
|
||||
of :class:`.CachedValue`.
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_serialized(
|
||||
self, key: KeyType, value: bytes
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Set a serialized value in the cache.
|
||||
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.set`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:param value: a bytes object to be stored.
|
||||
|
||||
The default implementation of this method for :class:`.CacheBackend`
|
||||
calls upon the :meth:`.CacheBackend.set` method.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.BytesBackend`
|
||||
|
||||
"""
|
||||
self.set(key, value)
|
||||
|
||||
def set_multi(
|
||||
self, mapping: Mapping[KeyType, BackendSetType]
|
||||
) -> None: # pragma NO COVERAGE
|
||||
def set_multi(self, mapping): # pragma NO COVERAGE
|
||||
"""Set multiple values in the cache.
|
||||
|
||||
:param mapping: a dict in which the key will be whatever was passed to
|
||||
the :meth:`.CacheRegion.set_multi` method, processed by the "key
|
||||
mangling" function, if any.
|
||||
``mapping`` is a dict in which
|
||||
the key will be whatever was passed
|
||||
to the registry, processed by the
|
||||
"key mangling" function, if any.
|
||||
The value will always be an instance
|
||||
of :class:`.CachedValue`.
|
||||
|
||||
When implementing a new :class:`.CacheBackend` or cutomizing via
|
||||
:class:`.ProxyBackend`, be aware that when this method is invoked by
|
||||
|
@ -371,52 +184,17 @@ class CacheBackend:
|
|||
-- that will have the undesirable effect of modifying the returned
|
||||
values as well.
|
||||
|
||||
If a serializer is in use, this method will only be called if the
|
||||
:meth:`.CacheBackend.set_serialized_multi` method is not overridden.
|
||||
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_serialized_multi(
|
||||
self, mapping: Mapping[KeyType, bytes]
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Set multiple serialized values in the cache.
|
||||
|
||||
:param mapping: a dict in which the key will be whatever was passed to
|
||||
the :meth:`.CacheRegion.set_multi` method, processed by the "key
|
||||
mangling" function, if any.
|
||||
|
||||
When implementing a new :class:`.CacheBackend` or cutomizing via
|
||||
:class:`.ProxyBackend`, be aware that when this method is invoked by
|
||||
:meth:`.Region.get_or_create_multi`, the ``mapping`` values are the
|
||||
same ones returned to the upstream caller. If the subclass alters the
|
||||
values in any way, it must not do so 'in-place' on the ``mapping`` dict
|
||||
-- that will have the undesirable effect of modifying the returned
|
||||
values as well.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
The default implementation of this method for :class:`.CacheBackend`
|
||||
calls upon the :meth:`.CacheBackend.set_multi` method.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`.BytesBackend`
|
||||
|
||||
|
||||
"""
|
||||
self.set_multi(mapping)
|
||||
|
||||
def delete(self, key: KeyType) -> None: # pragma NO COVERAGE
|
||||
def delete(self, key): # pragma NO COVERAGE
|
||||
"""Delete a value from the cache.
|
||||
|
||||
:param key: String key that was passed to the
|
||||
:meth:`.CacheRegion.delete`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
The key will be whatever was passed
|
||||
to the registry, processed by the
|
||||
"key mangling" function, if any.
|
||||
|
||||
The behavior here should be idempotent,
|
||||
that is, can be called any number of times
|
||||
|
@ -425,14 +203,12 @@ class CacheBackend:
|
|||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def delete_multi(
|
||||
self, keys: Sequence[KeyType]
|
||||
) -> None: # pragma NO COVERAGE
|
||||
def delete_multi(self, keys): # pragma NO COVERAGE
|
||||
"""Delete multiple values from the cache.
|
||||
|
||||
:param keys: sequence of string keys that was passed to the
|
||||
:meth:`.CacheRegion.delete_multi` method, which will also be processed
|
||||
by the "key mangling" function if one was present.
|
||||
The key will be whatever was passed
|
||||
to the registry, processed by the
|
||||
"key mangling" function, if any.
|
||||
|
||||
The behavior here should be idempotent,
|
||||
that is, can be called any number of times
|
||||
|
@ -443,95 +219,3 @@ class CacheBackend:
|
|||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class DefaultSerialization:
|
||||
serializer: Union[None, Serializer] = staticmethod( # type: ignore
|
||||
pickle.dumps
|
||||
)
|
||||
deserializer: Union[None, Deserializer] = staticmethod( # type: ignore
|
||||
pickle.loads
|
||||
)
|
||||
|
||||
|
||||
class BytesBackend(DefaultSerialization, CacheBackend):
|
||||
"""A cache backend that receives and returns series of bytes.
|
||||
|
||||
This backend only supports the "serialized" form of values; subclasses
|
||||
should implement :meth:`.BytesBackend.get_serialized`,
|
||||
:meth:`.BytesBackend.get_serialized_multi`,
|
||||
:meth:`.BytesBackend.set_serialized`,
|
||||
:meth:`.BytesBackend.set_serialized_multi`.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
|
||||
def get_serialized(self, key: KeyType) -> SerializedReturnType:
|
||||
"""Retrieve a serialized value from the cache.
|
||||
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.get`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:return: a bytes object, or :data:`.NO_VALUE`
|
||||
constant if not present.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_serialized_multi(
|
||||
self, keys: Sequence[KeyType]
|
||||
) -> Sequence[SerializedReturnType]: # pragma NO COVERAGE
|
||||
"""Retrieve multiple serialized values from the cache.
|
||||
|
||||
:param keys: sequence of string keys that was passed to the
|
||||
:meth:`.CacheRegion.get_multi` method, which will also be processed
|
||||
by the "key mangling" function if one was present.
|
||||
|
||||
:return: list of bytes objects
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_serialized(
|
||||
self, key: KeyType, value: bytes
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Set a serialized value in the cache.
|
||||
|
||||
:param key: String key that was passed to the :meth:`.CacheRegion.set`
|
||||
method, which will also be processed by the "key mangling" function
|
||||
if one was present.
|
||||
|
||||
:param value: a bytes object to be stored.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def set_serialized_multi(
|
||||
self, mapping: Mapping[KeyType, bytes]
|
||||
) -> None: # pragma NO COVERAGE
|
||||
"""Set multiple serialized values in the cache.
|
||||
|
||||
:param mapping: a dict in which the key will be whatever was passed to
|
||||
the :meth:`.CacheRegion.set_multi` method, processed by the "key
|
||||
mangling" function, if any.
|
||||
|
||||
When implementing a new :class:`.CacheBackend` or cutomizing via
|
||||
:class:`.ProxyBackend`, be aware that when this method is invoked by
|
||||
:meth:`.Region.get_or_create_multi`, the ``mapping`` values are the
|
||||
same ones returned to the upstream caller. If the subclass alters the
|
||||
values in any way, it must not do so 'in-place' on the ``mapping`` dict
|
||||
-- that will have the undesirable effect of modifying the returned
|
||||
values as well.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
|
10
libs/common/dogpile/cache/backends/__init__.py
vendored
10
libs/common/dogpile/cache/backends/__init__.py
vendored
|
@ -24,11 +24,6 @@ register_backend(
|
|||
"dogpile.cache.backends.memcached",
|
||||
"MemcachedBackend",
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.pymemcache",
|
||||
"dogpile.cache.backends.memcached",
|
||||
"PyMemcacheBackend",
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.memory", "dogpile.cache.backends.memory", "MemoryBackend"
|
||||
)
|
||||
|
@ -40,8 +35,3 @@ register_backend(
|
|||
register_backend(
|
||||
"dogpile.cache.redis", "dogpile.cache.backends.redis", "RedisBackend"
|
||||
)
|
||||
register_backend(
|
||||
"dogpile.cache.redis_sentinel",
|
||||
"dogpile.cache.backends.redis",
|
||||
"RedisSentinelBackend",
|
||||
)
|
||||
|
|
64
libs/common/dogpile/cache/backends/file.py
vendored
64
libs/common/dogpile/cache/backends/file.py
vendored
|
@ -9,18 +9,17 @@ Provides backends that deal with local filesystem access.
|
|||
from __future__ import with_statement
|
||||
|
||||
from contextlib import contextmanager
|
||||
import dbm
|
||||
import os
|
||||
import threading
|
||||
|
||||
from ..api import BytesBackend
|
||||
from ..api import CacheBackend
|
||||
from ..api import NO_VALUE
|
||||
from ... import util
|
||||
from ...util import compat
|
||||
|
||||
__all__ = ["DBMBackend", "FileLock", "AbstractFileLock"]
|
||||
|
||||
|
||||
class DBMBackend(BytesBackend):
|
||||
class DBMBackend(CacheBackend):
|
||||
"""A file-backend using a dbm file to store keys.
|
||||
|
||||
Basic usage::
|
||||
|
@ -157,6 +156,12 @@ class DBMBackend(BytesBackend):
|
|||
util.KeyReentrantMutex.factory,
|
||||
)
|
||||
|
||||
# TODO: make this configurable
|
||||
if compat.py3k:
|
||||
import dbm
|
||||
else:
|
||||
import anydbm as dbm
|
||||
self.dbmmodule = dbm
|
||||
self._init_dbm_file()
|
||||
|
||||
def _init_lock(self, argument, suffix, basedir, basefile, wrapper=None):
|
||||
|
@ -180,7 +185,7 @@ class DBMBackend(BytesBackend):
|
|||
exists = True
|
||||
break
|
||||
if not exists:
|
||||
fh = dbm.open(self.filename, "c")
|
||||
fh = self.dbmmodule.open(self.filename, "c")
|
||||
fh.close()
|
||||
|
||||
def get_mutex(self, key):
|
||||
|
@ -210,50 +215,57 @@ class DBMBackend(BytesBackend):
|
|||
@contextmanager
|
||||
def _dbm_file(self, write):
|
||||
with self._use_rw_lock(write):
|
||||
with dbm.open(self.filename, "w" if write else "r") as dbm_obj:
|
||||
yield dbm_obj
|
||||
dbm = self.dbmmodule.open(self.filename, "w" if write else "r")
|
||||
yield dbm
|
||||
dbm.close()
|
||||
|
||||
def get_serialized(self, key):
|
||||
with self._dbm_file(False) as dbm_obj:
|
||||
if hasattr(dbm_obj, "get"):
|
||||
value = dbm_obj.get(key, NO_VALUE)
|
||||
def get(self, key):
|
||||
with self._dbm_file(False) as dbm:
|
||||
if hasattr(dbm, "get"):
|
||||
value = dbm.get(key, NO_VALUE)
|
||||
else:
|
||||
# gdbm objects lack a .get method
|
||||
try:
|
||||
value = dbm_obj[key]
|
||||
value = dbm[key]
|
||||
except KeyError:
|
||||
value = NO_VALUE
|
||||
if value is not NO_VALUE:
|
||||
value = compat.pickle.loads(value)
|
||||
return value
|
||||
|
||||
def get_serialized_multi(self, keys):
|
||||
return [self.get_serialized(key) for key in keys]
|
||||
def get_multi(self, keys):
|
||||
return [self.get(key) for key in keys]
|
||||
|
||||
def set_serialized(self, key, value):
|
||||
with self._dbm_file(True) as dbm_obj:
|
||||
dbm_obj[key] = value
|
||||
def set(self, key, value):
|
||||
with self._dbm_file(True) as dbm:
|
||||
dbm[key] = compat.pickle.dumps(
|
||||
value, compat.pickle.HIGHEST_PROTOCOL
|
||||
)
|
||||
|
||||
def set_serialized_multi(self, mapping):
|
||||
with self._dbm_file(True) as dbm_obj:
|
||||
def set_multi(self, mapping):
|
||||
with self._dbm_file(True) as dbm:
|
||||
for key, value in mapping.items():
|
||||
dbm_obj[key] = value
|
||||
dbm[key] = compat.pickle.dumps(
|
||||
value, compat.pickle.HIGHEST_PROTOCOL
|
||||
)
|
||||
|
||||
def delete(self, key):
|
||||
with self._dbm_file(True) as dbm_obj:
|
||||
with self._dbm_file(True) as dbm:
|
||||
try:
|
||||
del dbm_obj[key]
|
||||
del dbm[key]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def delete_multi(self, keys):
|
||||
with self._dbm_file(True) as dbm_obj:
|
||||
with self._dbm_file(True) as dbm:
|
||||
for key in keys:
|
||||
try:
|
||||
del dbm_obj[key]
|
||||
del dbm[key]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
class AbstractFileLock:
|
||||
class AbstractFileLock(object):
|
||||
"""Coordinate read/write access to a file.
|
||||
|
||||
typically is a file-based lock but doesn't necessarily have to be.
|
||||
|
@ -385,7 +397,7 @@ class FileLock(AbstractFileLock):
|
|||
"""
|
||||
|
||||
def __init__(self, filename):
|
||||
self._filedescriptor = threading.local()
|
||||
self._filedescriptor = compat.threading.local()
|
||||
self.filename = filename
|
||||
|
||||
@util.memoized_property
|
||||
|
|
329
libs/common/dogpile/cache/backends/memcached.py
vendored
329
libs/common/dogpile/cache/backends/memcached.py
vendored
|
@ -7,42 +7,29 @@ Provides backends for talking to `memcached <http://memcached.org>`_.
|
|||
"""
|
||||
|
||||
import random
|
||||
import threading
|
||||
import time
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Mapping
|
||||
import warnings
|
||||
|
||||
from ..api import CacheBackend
|
||||
from ..api import NO_VALUE
|
||||
from ... import util
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
import bmemcached
|
||||
import memcache
|
||||
import pylibmc
|
||||
import pymemcache
|
||||
else:
|
||||
# delayed import
|
||||
bmemcached = None # noqa F811
|
||||
memcache = None # noqa F811
|
||||
pylibmc = None # noqa F811
|
||||
pymemcache = None # noqa F811
|
||||
from ...util import compat
|
||||
|
||||
__all__ = (
|
||||
"GenericMemcachedBackend",
|
||||
"MemcachedBackend",
|
||||
"PylibmcBackend",
|
||||
"PyMemcacheBackend",
|
||||
"BMemcachedBackend",
|
||||
"MemcachedLock",
|
||||
)
|
||||
|
||||
|
||||
class MemcachedLock(object):
|
||||
"""Simple distributed lock using memcached."""
|
||||
"""Simple distributed lock using memcached.
|
||||
|
||||
This is an adaptation of the lock featured at
|
||||
http://amix.dk/blog/post/19386
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, client_fn, key, timeout=0):
|
||||
self.client_fn = client_fn
|
||||
|
@ -58,15 +45,11 @@ class MemcachedLock(object):
|
|||
elif not wait:
|
||||
return False
|
||||
else:
|
||||
sleep_time = (((i + 1) * random.random()) + 2**i) / 2.5
|
||||
sleep_time = (((i + 1) * random.random()) + 2 ** i) / 2.5
|
||||
time.sleep(sleep_time)
|
||||
if i < 15:
|
||||
i += 1
|
||||
|
||||
def locked(self):
|
||||
client = self.client_fn()
|
||||
return client.get(self.key) is not None
|
||||
|
||||
def release(self):
|
||||
client = self.client_fn()
|
||||
client.delete(self.key)
|
||||
|
@ -124,17 +107,10 @@ class GenericMemcachedBackend(CacheBackend):
|
|||
|
||||
"""
|
||||
|
||||
set_arguments: Mapping[str, Any] = {}
|
||||
set_arguments = {}
|
||||
"""Additional arguments which will be passed
|
||||
to the :meth:`set` method."""
|
||||
|
||||
# No need to override serializer, as all the memcached libraries
|
||||
# handles that themselves. Still, we support customizing the
|
||||
# serializer/deserializer to use better default pickle protocol
|
||||
# or completely different serialization mechanism
|
||||
serializer = None
|
||||
deserializer = None
|
||||
|
||||
def __init__(self, arguments):
|
||||
self._imports()
|
||||
# using a plain threading.local here. threading.local
|
||||
|
@ -162,7 +138,7 @@ class GenericMemcachedBackend(CacheBackend):
|
|||
def _clients(self):
|
||||
backend = self
|
||||
|
||||
class ClientPool(threading.local):
|
||||
class ClientPool(compat.threading.local):
|
||||
def __init__(self):
|
||||
self.memcached = backend._create_client()
|
||||
|
||||
|
@ -197,17 +173,12 @@ class GenericMemcachedBackend(CacheBackend):
|
|||
|
||||
def get_multi(self, keys):
|
||||
values = self.client.get_multi(keys)
|
||||
|
||||
return [
|
||||
NO_VALUE if val is None else val
|
||||
for val in [values.get(key, NO_VALUE) for key in keys]
|
||||
]
|
||||
return [NO_VALUE if key not in values else values[key] for key in keys]
|
||||
|
||||
def set(self, key, value):
|
||||
self.client.set(key, value, **self.set_arguments)
|
||||
|
||||
def set_multi(self, mapping):
|
||||
mapping = {key: value for key, value in mapping.items()}
|
||||
self.client.set_multi(mapping, **self.set_arguments)
|
||||
|
||||
def delete(self, key):
|
||||
|
@ -217,9 +188,10 @@ class GenericMemcachedBackend(CacheBackend):
|
|||
self.client.delete_multi(keys)
|
||||
|
||||
|
||||
class MemcacheArgs(GenericMemcachedBackend):
|
||||
class MemcacheArgs(object):
|
||||
"""Mixin which provides support for the 'time' argument to set(),
|
||||
'min_compress_len' to other methods.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
|
@ -235,6 +207,9 @@ class MemcacheArgs(GenericMemcachedBackend):
|
|||
super(MemcacheArgs, self).__init__(arguments)
|
||||
|
||||
|
||||
pylibmc = None
|
||||
|
||||
|
||||
class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend):
|
||||
"""A backend for the
|
||||
`pylibmc <http://sendapatch.se/projects/pylibmc/index.html>`_
|
||||
|
@ -284,6 +259,9 @@ class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend):
|
|||
)
|
||||
|
||||
|
||||
memcache = None
|
||||
|
||||
|
||||
class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend):
|
||||
"""A backend using the standard
|
||||
`Python-memcached <http://www.tummy.com/Community/software/\
|
||||
|
@ -302,39 +280,17 @@ class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend):
|
|||
}
|
||||
)
|
||||
|
||||
:param dead_retry: Number of seconds memcached server is considered dead
|
||||
before it is tried again. Will be passed to ``memcache.Client``
|
||||
as the ``dead_retry`` parameter.
|
||||
|
||||
.. versionchanged:: 1.1.8 Moved the ``dead_retry`` argument which was
|
||||
erroneously added to "set_parameters" to
|
||||
be part of the Memcached connection arguments.
|
||||
|
||||
:param socket_timeout: Timeout in seconds for every call to a server.
|
||||
Will be passed to ``memcache.Client`` as the ``socket_timeout``
|
||||
parameter.
|
||||
|
||||
.. versionchanged:: 1.1.8 Moved the ``socket_timeout`` argument which
|
||||
was erroneously added to "set_parameters"
|
||||
to be part of the Memcached connection arguments.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
self.dead_retry = arguments.get("dead_retry", 30)
|
||||
self.socket_timeout = arguments.get("socket_timeout", 3)
|
||||
super(MemcachedBackend, self).__init__(arguments)
|
||||
|
||||
def _imports(self):
|
||||
global memcache
|
||||
import memcache # noqa
|
||||
|
||||
def _create_client(self):
|
||||
return memcache.Client(
|
||||
self.url,
|
||||
dead_retry=self.dead_retry,
|
||||
socket_timeout=self.socket_timeout,
|
||||
)
|
||||
return memcache.Client(self.url)
|
||||
|
||||
|
||||
bmemcached = None
|
||||
|
||||
|
||||
class BMemcachedBackend(GenericMemcachedBackend):
|
||||
|
@ -343,11 +299,9 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
|||
python-binary-memcached>`_
|
||||
memcached client.
|
||||
|
||||
This is a pure Python memcached client which includes
|
||||
security features like SASL and SSL/TLS.
|
||||
|
||||
SASL is a standard for adding authentication mechanisms
|
||||
to protocols in a way that is protocol independent.
|
||||
This is a pure Python memcached client which
|
||||
includes the ability to authenticate with a memcached
|
||||
server using SASL.
|
||||
|
||||
A typical configuration using username/password::
|
||||
|
||||
|
@ -363,25 +317,6 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
|||
}
|
||||
)
|
||||
|
||||
A typical configuration using tls_context::
|
||||
|
||||
import ssl
|
||||
from dogpile.cache import make_region
|
||||
|
||||
ctx = ssl.create_default_context(cafile="/path/to/my-ca.pem")
|
||||
|
||||
region = make_region().configure(
|
||||
'dogpile.cache.bmemcached',
|
||||
expiration_time = 3600,
|
||||
arguments = {
|
||||
'url':["127.0.0.1"],
|
||||
'tls_context':ctx,
|
||||
}
|
||||
)
|
||||
|
||||
For advanced ways to configure TLS creating a more complex
|
||||
tls_context visit https://docs.python.org/3/library/ssl.html
|
||||
|
||||
Arguments which can be passed to the ``arguments``
|
||||
dictionary include:
|
||||
|
||||
|
@ -389,17 +324,12 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
|||
SASL authentication.
|
||||
:param password: optional password, will be used for
|
||||
SASL authentication.
|
||||
:param tls_context: optional TLS context, will be used for
|
||||
TLS connections.
|
||||
|
||||
.. versionadded:: 1.0.2
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
self.username = arguments.get("username", None)
|
||||
self.password = arguments.get("password", None)
|
||||
self.tls_context = arguments.get("tls_context", None)
|
||||
super(BMemcachedBackend, self).__init__(arguments)
|
||||
|
||||
def _imports(self):
|
||||
|
@ -425,215 +355,10 @@ class BMemcachedBackend(GenericMemcachedBackend):
|
|||
|
||||
def _create_client(self):
|
||||
return self.Client(
|
||||
self.url,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
tls_context=self.tls_context,
|
||||
self.url, username=self.username, password=self.password
|
||||
)
|
||||
|
||||
def delete_multi(self, keys):
|
||||
"""python-binary-memcached api does not implements delete_multi"""
|
||||
for key in keys:
|
||||
self.delete(key)
|
||||
|
||||
|
||||
class PyMemcacheBackend(GenericMemcachedBackend):
|
||||
"""A backend for the
|
||||
`pymemcache <https://github.com/pinterest/pymemcache>`_
|
||||
memcached client.
|
||||
|
||||
A comprehensive, fast, pure Python memcached client
|
||||
|
||||
.. versionadded:: 1.1.2
|
||||
|
||||
pymemcache supports the following features:
|
||||
|
||||
* Complete implementation of the memcached text protocol.
|
||||
* Configurable timeouts for socket connect and send/recv calls.
|
||||
* Access to the "noreply" flag, which can significantly increase
|
||||
the speed of writes.
|
||||
* Flexible, simple approach to serialization and deserialization.
|
||||
* The (optional) ability to treat network and memcached errors as
|
||||
cache misses.
|
||||
|
||||
dogpile.cache uses the ``HashClient`` from pymemcache in order to reduce
|
||||
API differences when compared to other memcached client drivers.
|
||||
This allows the user to provide a single server or a list of memcached
|
||||
servers.
|
||||
|
||||
Arguments which can be passed to the ``arguments``
|
||||
dictionary include:
|
||||
|
||||
:param tls_context: optional TLS context, will be used for
|
||||
TLS connections.
|
||||
|
||||
A typical configuration using tls_context::
|
||||
|
||||
import ssl
|
||||
from dogpile.cache import make_region
|
||||
|
||||
ctx = ssl.create_default_context(cafile="/path/to/my-ca.pem")
|
||||
|
||||
region = make_region().configure(
|
||||
'dogpile.cache.pymemcache',
|
||||
expiration_time = 3600,
|
||||
arguments = {
|
||||
'url':["127.0.0.1"],
|
||||
'tls_context':ctx,
|
||||
}
|
||||
)
|
||||
|
||||
.. seealso::
|
||||
|
||||
`<https://docs.python.org/3/library/ssl.html>`_ - additional TLS
|
||||
documentation.
|
||||
|
||||
:param serde: optional "serde". Defaults to
|
||||
``pymemcache.serde.pickle_serde``.
|
||||
|
||||
:param default_noreply: defaults to False. When set to True this flag
|
||||
enables the pymemcache "noreply" feature. See the pymemcache
|
||||
documentation for further details.
|
||||
|
||||
:param socket_keepalive: optional socket keepalive, will be used for
|
||||
TCP keepalive configuration. Use of this parameter requires pymemcache
|
||||
3.5.0 or greater. This parameter
|
||||
accepts a
|
||||
`pymemcache.client.base.KeepAliveOpts
|
||||
<https://pymemcache.readthedocs.io/en/latest/apidoc/pymemcache.client.base.html#pymemcache.client.base.KeepaliveOpts>`_
|
||||
object.
|
||||
|
||||
A typical configuration using ``socket_keepalive``::
|
||||
|
||||
from pymemcache import KeepaliveOpts
|
||||
from dogpile.cache import make_region
|
||||
|
||||
# Using the default keepalive configuration
|
||||
socket_keepalive = KeepaliveOpts()
|
||||
|
||||
region = make_region().configure(
|
||||
'dogpile.cache.pymemcache',
|
||||
expiration_time = 3600,
|
||||
arguments = {
|
||||
'url':["127.0.0.1"],
|
||||
'socket_keepalive': socket_keepalive
|
||||
}
|
||||
)
|
||||
|
||||
.. versionadded:: 1.1.4 - added support for ``socket_keepalive``.
|
||||
|
||||
:param enable_retry_client: optional flag to enable retry client
|
||||
mechanisms to handle failure. Defaults to False. When set to ``True``,
|
||||
the :paramref:`.PyMemcacheBackend.retry_attempts` parameter must also
|
||||
be set, along with optional parameters
|
||||
:paramref:`.PyMemcacheBackend.retry_delay`.
|
||||
:paramref:`.PyMemcacheBackend.retry_for`,
|
||||
:paramref:`.PyMemcacheBackend.do_not_retry_for`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
`<https://pymemcache.readthedocs.io/en/latest/getting_started.html#using-the-built-in-retrying-mechanism>`_ -
|
||||
in the pymemcache documentation
|
||||
|
||||
.. versionadded:: 1.1.4
|
||||
|
||||
:param retry_attempts: how many times to attempt an action with
|
||||
pymemcache's retrying wrapper before failing. Must be 1 or above.
|
||||
Defaults to None.
|
||||
|
||||
.. versionadded:: 1.1.4
|
||||
|
||||
:param retry_delay: optional int|float, how many seconds to sleep between
|
||||
each attempt. Used by the retry wrapper. Defaults to None.
|
||||
|
||||
.. versionadded:: 1.1.4
|
||||
|
||||
:param retry_for: optional None|tuple|set|list, what exceptions to
|
||||
allow retries for. Will allow retries for all exceptions if None.
|
||||
Example: ``(MemcacheClientError, MemcacheUnexpectedCloseError)``
|
||||
Accepts any class that is a subclass of Exception. Defaults to None.
|
||||
|
||||
.. versionadded:: 1.1.4
|
||||
|
||||
:param do_not_retry_for: optional None|tuple|set|list, what
|
||||
exceptions should be retried. Will not block retries for any Exception if
|
||||
None. Example: ``(IOError, MemcacheIllegalInputError)``
|
||||
Accepts any class that is a subclass of Exception. Defaults to None.
|
||||
|
||||
.. versionadded:: 1.1.4
|
||||
|
||||
:param hashclient_retry_attempts: Amount of times a client should be tried
|
||||
before it is marked dead and removed from the pool in the HashClient's
|
||||
internal mechanisms.
|
||||
|
||||
.. versionadded:: 1.1.5
|
||||
|
||||
:param hashclient_retry_timeout: Time in seconds that should pass between
|
||||
retry attempts in the HashClient's internal mechanisms.
|
||||
|
||||
.. versionadded:: 1.1.5
|
||||
|
||||
:param dead_timeout: Time in seconds before attempting to add a node
|
||||
back in the pool in the HashClient's internal mechanisms.
|
||||
|
||||
.. versionadded:: 1.1.5
|
||||
|
||||
""" # noqa E501
|
||||
|
||||
def __init__(self, arguments):
|
||||
super().__init__(arguments)
|
||||
|
||||
self.serde = arguments.get("serde", pymemcache.serde.pickle_serde)
|
||||
self.default_noreply = arguments.get("default_noreply", False)
|
||||
self.tls_context = arguments.get("tls_context", None)
|
||||
self.socket_keepalive = arguments.get("socket_keepalive", None)
|
||||
self.enable_retry_client = arguments.get("enable_retry_client", False)
|
||||
self.retry_attempts = arguments.get("retry_attempts", None)
|
||||
self.retry_delay = arguments.get("retry_delay", None)
|
||||
self.retry_for = arguments.get("retry_for", None)
|
||||
self.do_not_retry_for = arguments.get("do_not_retry_for", None)
|
||||
self.hashclient_retry_attempts = arguments.get(
|
||||
"hashclient_retry_attempts", 2
|
||||
)
|
||||
self.hashclient_retry_timeout = arguments.get(
|
||||
"hashclient_retry_timeout", 1
|
||||
)
|
||||
self.dead_timeout = arguments.get("hashclient_dead_timeout", 60)
|
||||
if (
|
||||
self.retry_delay is not None
|
||||
or self.retry_attempts is not None
|
||||
or self.retry_for is not None
|
||||
or self.do_not_retry_for is not None
|
||||
) and not self.enable_retry_client:
|
||||
warnings.warn(
|
||||
"enable_retry_client is not set; retry options "
|
||||
"will be ignored"
|
||||
)
|
||||
|
||||
def _imports(self):
|
||||
global pymemcache
|
||||
import pymemcache
|
||||
|
||||
def _create_client(self):
|
||||
_kwargs = {
|
||||
"serde": self.serde,
|
||||
"default_noreply": self.default_noreply,
|
||||
"tls_context": self.tls_context,
|
||||
"retry_attempts": self.hashclient_retry_attempts,
|
||||
"retry_timeout": self.hashclient_retry_timeout,
|
||||
"dead_timeout": self.dead_timeout,
|
||||
}
|
||||
if self.socket_keepalive is not None:
|
||||
_kwargs.update({"socket_keepalive": self.socket_keepalive})
|
||||
|
||||
client = pymemcache.client.hash.HashClient(self.url, **_kwargs)
|
||||
if self.enable_retry_client:
|
||||
return pymemcache.client.retrying.RetryingClient(
|
||||
client,
|
||||
attempts=self.retry_attempts,
|
||||
retry_delay=self.retry_delay,
|
||||
retry_for=self.retry_for,
|
||||
do_not_retry_for=self.do_not_retry_for,
|
||||
)
|
||||
|
||||
return client
|
||||
|
|
27
libs/common/dogpile/cache/backends/memory.py
vendored
27
libs/common/dogpile/cache/backends/memory.py
vendored
|
@ -10,10 +10,9 @@ places the value as given into the dictionary.
|
|||
|
||||
"""
|
||||
|
||||
|
||||
from ..api import CacheBackend
|
||||
from ..api import DefaultSerialization
|
||||
from ..api import NO_VALUE
|
||||
from ...util.compat import pickle
|
||||
|
||||
|
||||
class MemoryBackend(CacheBackend):
|
||||
|
@ -50,20 +49,36 @@ class MemoryBackend(CacheBackend):
|
|||
|
||||
"""
|
||||
|
||||
pickle_values = False
|
||||
|
||||
def __init__(self, arguments):
|
||||
self._cache = arguments.pop("cache_dict", {})
|
||||
|
||||
def get(self, key):
|
||||
return self._cache.get(key, NO_VALUE)
|
||||
value = self._cache.get(key, NO_VALUE)
|
||||
if value is not NO_VALUE and self.pickle_values:
|
||||
value = pickle.loads(value)
|
||||
return value
|
||||
|
||||
def get_multi(self, keys):
|
||||
return [self._cache.get(key, NO_VALUE) for key in keys]
|
||||
ret = [self._cache.get(key, NO_VALUE) for key in keys]
|
||||
if self.pickle_values:
|
||||
ret = [
|
||||
pickle.loads(value) if value is not NO_VALUE else value
|
||||
for value in ret
|
||||
]
|
||||
return ret
|
||||
|
||||
def set(self, key, value):
|
||||
if self.pickle_values:
|
||||
value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
||||
self._cache[key] = value
|
||||
|
||||
def set_multi(self, mapping):
|
||||
pickle_values = self.pickle_values
|
||||
for key, value in mapping.items():
|
||||
if pickle_values:
|
||||
value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
||||
self._cache[key] = value
|
||||
|
||||
def delete(self, key):
|
||||
|
@ -74,7 +89,7 @@ class MemoryBackend(CacheBackend):
|
|||
self._cache.pop(key, None)
|
||||
|
||||
|
||||
class MemoryPickleBackend(DefaultSerialization, MemoryBackend):
|
||||
class MemoryPickleBackend(MemoryBackend):
|
||||
"""A backend that uses a plain dictionary, but serializes objects on
|
||||
:meth:`.MemoryBackend.set` and deserializes :meth:`.MemoryBackend.get`.
|
||||
|
||||
|
@ -105,3 +120,5 @@ class MemoryPickleBackend(DefaultSerialization, MemoryBackend):
|
|||
.. versionadded:: 0.5.3
|
||||
|
||||
"""
|
||||
|
||||
pickle_values = True
|
||||
|
|
3
libs/common/dogpile/cache/backends/null.py
vendored
3
libs/common/dogpile/cache/backends/null.py
vendored
|
@ -24,9 +24,6 @@ class NullLock(object):
|
|||
def release(self):
|
||||
pass
|
||||
|
||||
def locked(self):
|
||||
return False
|
||||
|
||||
|
||||
class NullBackend(CacheBackend):
|
||||
"""A "null" backend that effectively disables all cache operations.
|
||||
|
|
273
libs/common/dogpile/cache/backends/redis.py
vendored
273
libs/common/dogpile/cache/backends/redis.py
vendored
|
@ -8,23 +8,20 @@ Provides backends for talking to `Redis <http://redis.io>`_.
|
|||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import typing
|
||||
import warnings
|
||||
|
||||
from ..api import BytesBackend
|
||||
from ..api import CacheBackend
|
||||
from ..api import NO_VALUE
|
||||
from ...util.compat import pickle
|
||||
from ...util.compat import u
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
import redis
|
||||
else:
|
||||
# delayed import
|
||||
redis = None # noqa F811
|
||||
redis = None
|
||||
|
||||
__all__ = ("RedisBackend", "RedisSentinelBackend")
|
||||
__all__ = ("RedisBackend",)
|
||||
|
||||
|
||||
class RedisBackend(BytesBackend):
|
||||
r"""A `Redis <http://redis.io/>`_ backend, using the
|
||||
class RedisBackend(CacheBackend):
|
||||
"""A `Redis <http://redis.io/>`_ backend, using the
|
||||
`redis-py <http://pypi.python.org/pypi/redis/>`_ backend.
|
||||
|
||||
Example configuration::
|
||||
|
@ -49,10 +46,14 @@ class RedisBackend(BytesBackend):
|
|||
:param url: string. If provided, will override separate host/port/db
|
||||
params. The format is that accepted by ``StrictRedis.from_url()``.
|
||||
|
||||
.. versionadded:: 0.4.1
|
||||
|
||||
:param host: string, default is ``localhost``.
|
||||
|
||||
:param password: string, default is no password.
|
||||
|
||||
.. versionadded:: 0.4.1
|
||||
|
||||
:param port: integer, default is ``6379``.
|
||||
|
||||
:param db: integer, default is ``0``.
|
||||
|
@ -70,31 +71,33 @@ class RedisBackend(BytesBackend):
|
|||
Redis should expire it. This argument is only valid when
|
||||
``distributed_lock`` is ``True``.
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
:param socket_timeout: float, seconds for socket timeout.
|
||||
Default is None (no timeout).
|
||||
|
||||
.. versionadded:: 0.5.4
|
||||
|
||||
:param lock_sleep: integer, number of seconds to sleep when failed to
|
||||
acquire a lock. This argument is only valid when
|
||||
``distributed_lock`` is ``True``.
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
:param connection_pool: ``redis.ConnectionPool`` object. If provided,
|
||||
this object supersedes other connection arguments passed to the
|
||||
``redis.StrictRedis`` instance, including url and/or host as well as
|
||||
socket_timeout, and will be passed to ``redis.StrictRedis`` as the
|
||||
source of connectivity.
|
||||
|
||||
.. versionadded:: 0.5.4
|
||||
|
||||
:param thread_local_lock: bool, whether a thread-local Redis lock object
|
||||
should be used. This is the default, but is not compatible with
|
||||
asynchronous runners, as they run in a different thread than the one
|
||||
used to create the lock.
|
||||
|
||||
:param connection_kwargs: dict, additional keyword arguments are passed
|
||||
along to the
|
||||
``StrictRedis.from_url()`` method or ``StrictRedis()`` constructor
|
||||
directly, including parameters like ``ssl``, ``ssl_certfile``,
|
||||
``charset``, etc.
|
||||
|
||||
.. versionadded:: 1.1.6 Added ``connection_kwargs`` parameter.
|
||||
.. versionadded:: 0.9.1
|
||||
|
||||
"""
|
||||
|
||||
|
@ -106,12 +109,12 @@ class RedisBackend(BytesBackend):
|
|||
self.password = arguments.pop("password", None)
|
||||
self.port = arguments.pop("port", 6379)
|
||||
self.db = arguments.pop("db", 0)
|
||||
self.distributed_lock = arguments.pop("distributed_lock", False)
|
||||
self.distributed_lock = arguments.get("distributed_lock", False)
|
||||
self.socket_timeout = arguments.pop("socket_timeout", None)
|
||||
self.lock_timeout = arguments.pop("lock_timeout", None)
|
||||
self.lock_sleep = arguments.pop("lock_sleep", 0.1)
|
||||
self.thread_local_lock = arguments.pop("thread_local_lock", True)
|
||||
self.connection_kwargs = arguments.pop("connection_kwargs", {})
|
||||
|
||||
self.lock_timeout = arguments.get("lock_timeout", None)
|
||||
self.lock_sleep = arguments.get("lock_sleep", 0.1)
|
||||
self.thread_local_lock = arguments.get("thread_local_lock", True)
|
||||
|
||||
if self.distributed_lock and self.thread_local_lock:
|
||||
warnings.warn(
|
||||
|
@ -120,8 +123,8 @@ class RedisBackend(BytesBackend):
|
|||
)
|
||||
|
||||
self.redis_expiration_time = arguments.pop("redis_expiration_time", 0)
|
||||
self.connection_pool = arguments.pop("connection_pool", None)
|
||||
self._create_client()
|
||||
self.connection_pool = arguments.get("connection_pool", None)
|
||||
self.client = self._create_client()
|
||||
|
||||
def _imports(self):
|
||||
# defer imports until backend is used
|
||||
|
@ -133,207 +136,73 @@ class RedisBackend(BytesBackend):
|
|||
# the connection pool already has all other connection
|
||||
# options present within, so here we disregard socket_timeout
|
||||
# and others.
|
||||
self.writer_client = redis.StrictRedis(
|
||||
connection_pool=self.connection_pool
|
||||
)
|
||||
self.reader_client = self.writer_client
|
||||
else:
|
||||
args = {}
|
||||
args.update(self.connection_kwargs)
|
||||
if self.socket_timeout:
|
||||
args["socket_timeout"] = self.socket_timeout
|
||||
return redis.StrictRedis(connection_pool=self.connection_pool)
|
||||
|
||||
if self.url is not None:
|
||||
args.update(url=self.url)
|
||||
self.writer_client = redis.StrictRedis.from_url(**args)
|
||||
self.reader_client = self.writer_client
|
||||
else:
|
||||
args.update(
|
||||
host=self.host,
|
||||
password=self.password,
|
||||
port=self.port,
|
||||
db=self.db,
|
||||
)
|
||||
self.writer_client = redis.StrictRedis(**args)
|
||||
self.reader_client = self.writer_client
|
||||
args = {}
|
||||
if self.socket_timeout:
|
||||
args["socket_timeout"] = self.socket_timeout
|
||||
|
||||
if self.url is not None:
|
||||
args.update(url=self.url)
|
||||
return redis.StrictRedis.from_url(**args)
|
||||
else:
|
||||
args.update(
|
||||
host=self.host,
|
||||
password=self.password,
|
||||
port=self.port,
|
||||
db=self.db,
|
||||
)
|
||||
return redis.StrictRedis(**args)
|
||||
|
||||
def get_mutex(self, key):
|
||||
if self.distributed_lock:
|
||||
return _RedisLockWrapper(
|
||||
self.writer_client.lock(
|
||||
"_lock{0}".format(key),
|
||||
timeout=self.lock_timeout,
|
||||
sleep=self.lock_sleep,
|
||||
thread_local=self.thread_local_lock,
|
||||
)
|
||||
return self.client.lock(
|
||||
u("_lock{0}").format(key),
|
||||
timeout=self.lock_timeout,
|
||||
sleep=self.lock_sleep,
|
||||
thread_local=self.thread_local_lock,
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_serialized(self, key):
|
||||
value = self.reader_client.get(key)
|
||||
def get(self, key):
|
||||
value = self.client.get(key)
|
||||
if value is None:
|
||||
return NO_VALUE
|
||||
return value
|
||||
return pickle.loads(value)
|
||||
|
||||
def get_serialized_multi(self, keys):
|
||||
def get_multi(self, keys):
|
||||
if not keys:
|
||||
return []
|
||||
values = self.reader_client.mget(keys)
|
||||
return [v if v is not None else NO_VALUE for v in values]
|
||||
values = self.client.mget(keys)
|
||||
return [pickle.loads(v) if v is not None else NO_VALUE for v in values]
|
||||
|
||||
def set_serialized(self, key, value):
|
||||
def set(self, key, value):
|
||||
if self.redis_expiration_time:
|
||||
self.writer_client.setex(key, self.redis_expiration_time, value)
|
||||
self.client.setex(
|
||||
key,
|
||||
self.redis_expiration_time,
|
||||
pickle.dumps(value, pickle.HIGHEST_PROTOCOL),
|
||||
)
|
||||
else:
|
||||
self.writer_client.set(key, value)
|
||||
self.client.set(key, pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
|
||||
|
||||
def set_multi(self, mapping):
|
||||
mapping = dict(
|
||||
(k, pickle.dumps(v, pickle.HIGHEST_PROTOCOL))
|
||||
for k, v in mapping.items()
|
||||
)
|
||||
|
||||
def set_serialized_multi(self, mapping):
|
||||
if not self.redis_expiration_time:
|
||||
self.writer_client.mset(mapping)
|
||||
self.client.mset(mapping)
|
||||
else:
|
||||
pipe = self.writer_client.pipeline()
|
||||
pipe = self.client.pipeline()
|
||||
for key, value in mapping.items():
|
||||
pipe.setex(key, self.redis_expiration_time, value)
|
||||
pipe.execute()
|
||||
|
||||
def delete(self, key):
|
||||
self.writer_client.delete(key)
|
||||
self.client.delete(key)
|
||||
|
||||
def delete_multi(self, keys):
|
||||
self.writer_client.delete(*keys)
|
||||
|
||||
|
||||
class _RedisLockWrapper:
|
||||
__slots__ = ("mutex", "__weakref__")
|
||||
|
||||
def __init__(self, mutex: typing.Any):
|
||||
self.mutex = mutex
|
||||
|
||||
def acquire(self, wait: bool = True) -> typing.Any:
|
||||
return self.mutex.acquire(blocking=wait)
|
||||
|
||||
def release(self) -> typing.Any:
|
||||
return self.mutex.release()
|
||||
|
||||
def locked(self) -> bool:
|
||||
return self.mutex.locked() # type: ignore
|
||||
|
||||
|
||||
class RedisSentinelBackend(RedisBackend):
|
||||
"""A `Redis <http://redis.io/>`_ backend, using the
|
||||
`redis-py <http://pypi.python.org/pypi/redis/>`_ backend.
|
||||
It will use the Sentinel of a Redis cluster.
|
||||
|
||||
.. versionadded:: 1.0.0
|
||||
|
||||
Example configuration::
|
||||
|
||||
from dogpile.cache import make_region
|
||||
|
||||
region = make_region().configure(
|
||||
'dogpile.cache.redis_sentinel',
|
||||
arguments = {
|
||||
'sentinels': [
|
||||
['redis_sentinel_1', 26379],
|
||||
['redis_sentinel_2', 26379]
|
||||
],
|
||||
'db': 0,
|
||||
'redis_expiration_time': 60*60*2, # 2 hours
|
||||
'distributed_lock': True,
|
||||
'thread_local_lock': False
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
Arguments accepted in the arguments dictionary:
|
||||
|
||||
:param db: integer, default is ``0``.
|
||||
|
||||
:param redis_expiration_time: integer, number of seconds after setting
|
||||
a value that Redis should expire it. This should be larger than dogpile's
|
||||
cache expiration. By default no expiration is set.
|
||||
|
||||
:param distributed_lock: boolean, when True, will use a
|
||||
redis-lock as the dogpile lock. Use this when multiple processes will be
|
||||
talking to the same redis instance. When False, dogpile will
|
||||
coordinate on a regular threading mutex, Default is True.
|
||||
|
||||
:param lock_timeout: integer, number of seconds after acquiring a lock that
|
||||
Redis should expire it. This argument is only valid when
|
||||
``distributed_lock`` is ``True``.
|
||||
|
||||
:param socket_timeout: float, seconds for socket timeout.
|
||||
Default is None (no timeout).
|
||||
|
||||
:param sentinels: is a list of sentinel nodes. Each node is represented by
|
||||
a pair (hostname, port).
|
||||
Default is None (not in sentinel mode).
|
||||
|
||||
:param service_name: str, the service name.
|
||||
Default is 'mymaster'.
|
||||
|
||||
:param sentinel_kwargs: is a dictionary of connection arguments used when
|
||||
connecting to sentinel instances. Any argument that can be passed to
|
||||
a normal Redis connection can be specified here.
|
||||
Default is {}.
|
||||
|
||||
:param connection_kwargs: dict, additional keyword arguments are passed
|
||||
along to the
|
||||
``StrictRedis.from_url()`` method or ``StrictRedis()`` constructor
|
||||
directly, including parameters like ``ssl``, ``ssl_certfile``,
|
||||
``charset``, etc.
|
||||
|
||||
:param lock_sleep: integer, number of seconds to sleep when failed to
|
||||
acquire a lock. This argument is only valid when
|
||||
``distributed_lock`` is ``True``.
|
||||
|
||||
:param thread_local_lock: bool, whether a thread-local Redis lock object
|
||||
should be used. This is the default, but is not compatible with
|
||||
asynchronous runners, as they run in a different thread than the one
|
||||
used to create the lock.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
arguments = arguments.copy()
|
||||
|
||||
self.sentinels = arguments.pop("sentinels", None)
|
||||
self.service_name = arguments.pop("service_name", "mymaster")
|
||||
self.sentinel_kwargs = arguments.pop("sentinel_kwargs", {})
|
||||
|
||||
super().__init__(
|
||||
arguments={
|
||||
"distributed_lock": True,
|
||||
"thread_local_lock": False,
|
||||
**arguments,
|
||||
}
|
||||
)
|
||||
|
||||
def _imports(self):
|
||||
# defer imports until backend is used
|
||||
global redis
|
||||
import redis.sentinel # noqa
|
||||
|
||||
def _create_client(self):
|
||||
sentinel_kwargs = {}
|
||||
sentinel_kwargs.update(self.sentinel_kwargs)
|
||||
sentinel_kwargs.setdefault("password", self.password)
|
||||
|
||||
connection_kwargs = {}
|
||||
connection_kwargs.update(self.connection_kwargs)
|
||||
connection_kwargs.setdefault("password", self.password)
|
||||
|
||||
if self.db is not None:
|
||||
connection_kwargs.setdefault("db", self.db)
|
||||
sentinel_kwargs.setdefault("db", self.db)
|
||||
if self.socket_timeout is not None:
|
||||
connection_kwargs.setdefault("socket_timeout", self.socket_timeout)
|
||||
|
||||
sentinel = redis.sentinel.Sentinel(
|
||||
self.sentinels,
|
||||
sentinel_kwargs=sentinel_kwargs,
|
||||
**connection_kwargs,
|
||||
)
|
||||
self.writer_client = sentinel.master_for(self.service_name)
|
||||
self.reader_client = sentinel.slave_for(self.service_name)
|
||||
self.client.delete(*keys)
|
||||
|
|
45
libs/common/dogpile/cache/proxy.py
vendored
45
libs/common/dogpile/cache/proxy.py
vendored
|
@ -10,16 +10,7 @@ base backend.
|
|||
|
||||
"""
|
||||
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
|
||||
from .api import BackendFormatted
|
||||
from .api import BackendSetType
|
||||
from .api import CacheBackend
|
||||
from .api import CacheMutex
|
||||
from .api import KeyType
|
||||
from .api import SerializedReturnType
|
||||
|
||||
|
||||
class ProxyBackend(CacheBackend):
|
||||
|
@ -64,11 +55,11 @@ class ProxyBackend(CacheBackend):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
pass
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.proxied = None
|
||||
|
||||
def wrap(self, backend: CacheBackend) -> "ProxyBackend":
|
||||
"""Take a backend as an argument and setup the self.proxied property.
|
||||
def wrap(self, backend):
|
||||
""" Take a backend as an argument and setup the self.proxied property.
|
||||
Return an object that be used as a backend by a :class:`.CacheRegion`
|
||||
object.
|
||||
"""
|
||||
|
@ -82,37 +73,23 @@ class ProxyBackend(CacheBackend):
|
|||
# Delegate any functions that are not already overridden to
|
||||
# the proxies backend
|
||||
#
|
||||
def get(self, key: KeyType) -> BackendFormatted:
|
||||
def get(self, key):
|
||||
return self.proxied.get(key)
|
||||
|
||||
def set(self, key: KeyType, value: BackendSetType) -> None:
|
||||
def set(self, key, value):
|
||||
self.proxied.set(key, value)
|
||||
|
||||
def delete(self, key: KeyType) -> None:
|
||||
def delete(self, key):
|
||||
self.proxied.delete(key)
|
||||
|
||||
def get_multi(self, keys: Sequence[KeyType]) -> Sequence[BackendFormatted]:
|
||||
def get_multi(self, keys):
|
||||
return self.proxied.get_multi(keys)
|
||||
|
||||
def set_multi(self, mapping: Mapping[KeyType, BackendSetType]) -> None:
|
||||
def set_multi(self, mapping):
|
||||
self.proxied.set_multi(mapping)
|
||||
|
||||
def delete_multi(self, keys: Sequence[KeyType]) -> None:
|
||||
def delete_multi(self, keys):
|
||||
self.proxied.delete_multi(keys)
|
||||
|
||||
def get_mutex(self, key: KeyType) -> Optional[CacheMutex]:
|
||||
def get_mutex(self, key):
|
||||
return self.proxied.get_mutex(key)
|
||||
|
||||
def get_serialized(self, key: KeyType) -> SerializedReturnType:
|
||||
return self.proxied.get_serialized(key)
|
||||
|
||||
def get_serialized_multi(
|
||||
self, keys: Sequence[KeyType]
|
||||
) -> Sequence[SerializedReturnType]:
|
||||
return self.proxied.get_serialized_multi(keys)
|
||||
|
||||
def set_serialized(self, key: KeyType, value: bytes) -> None:
|
||||
self.proxied.set_serialized(key, value)
|
||||
|
||||
def set_serialized_multi(self, mapping: Mapping[KeyType, bytes]) -> None:
|
||||
self.proxied.set_serialized_multi(mapping)
|
||||
|
|
554
libs/common/dogpile/cache/region.py
vendored
554
libs/common/dogpile/cache/region.py
vendored
|
@ -4,35 +4,16 @@ import contextlib
|
|||
import datetime
|
||||
from functools import partial
|
||||
from functools import wraps
|
||||
import json
|
||||
import logging
|
||||
from numbers import Number
|
||||
import threading
|
||||
import time
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import cast
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import Union
|
||||
|
||||
from decorator import decorate
|
||||
|
||||
from . import exception
|
||||
from .api import BackendArguments
|
||||
from .api import BackendFormatted
|
||||
from .api import CachedValue
|
||||
from .api import CacheMutex
|
||||
from .api import CacheReturnType
|
||||
from .api import KeyType
|
||||
from .api import MetaDataType
|
||||
from .api import NO_VALUE
|
||||
from .api import SerializedReturnType
|
||||
from .api import Serializer
|
||||
from .api import ValuePayload
|
||||
from .backends import _backend_loader
|
||||
from .backends import register_backend # noqa
|
||||
from .proxy import ProxyBackend
|
||||
|
@ -42,11 +23,12 @@ from .util import repr_obj
|
|||
from .. import Lock
|
||||
from .. import NeedRegenerationException
|
||||
from ..util import coerce_string_conf
|
||||
from ..util import compat
|
||||
from ..util import memoized_property
|
||||
from ..util import NameRegistry
|
||||
from ..util import PluginLoader
|
||||
|
||||
value_version = 2
|
||||
value_version = 1
|
||||
"""An integer placed in the :class:`.CachedValue`
|
||||
so that new versions of dogpile.cache can detect cached
|
||||
values from a previous, backwards-incompatible version.
|
||||
|
@ -56,20 +38,7 @@ values from a previous, backwards-incompatible version.
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
AsyncCreator = Callable[
|
||||
["CacheRegion", KeyType, Callable[[], ValuePayload], CacheMutex], None
|
||||
]
|
||||
|
||||
ExpirationTimeCallable = Callable[[], float]
|
||||
|
||||
ToStr = Callable[[Any], str]
|
||||
|
||||
FunctionKeyGenerator = Callable[..., Callable[..., KeyType]]
|
||||
|
||||
FunctionMultiKeyGenerator = Callable[..., Callable[..., Sequence[KeyType]]]
|
||||
|
||||
|
||||
class RegionInvalidationStrategy:
|
||||
class RegionInvalidationStrategy(object):
|
||||
"""Region invalidation strategy interface
|
||||
|
||||
Implement this interface and pass implementation instance
|
||||
|
@ -141,7 +110,7 @@ class RegionInvalidationStrategy:
|
|||
|
||||
"""
|
||||
|
||||
def invalidate(self, hard: bool = True) -> None:
|
||||
def invalidate(self, hard=True):
|
||||
"""Region invalidation.
|
||||
|
||||
:class:`.CacheRegion` propagated call.
|
||||
|
@ -153,7 +122,7 @@ class RegionInvalidationStrategy:
|
|||
|
||||
raise NotImplementedError()
|
||||
|
||||
def is_hard_invalidated(self, timestamp: float) -> bool:
|
||||
def is_hard_invalidated(self, timestamp):
|
||||
"""Check timestamp to determine if it was hard invalidated.
|
||||
|
||||
:return: Boolean. True if ``timestamp`` is older than
|
||||
|
@ -164,7 +133,7 @@ class RegionInvalidationStrategy:
|
|||
|
||||
raise NotImplementedError()
|
||||
|
||||
def is_soft_invalidated(self, timestamp: float) -> bool:
|
||||
def is_soft_invalidated(self, timestamp):
|
||||
"""Check timestamp to determine if it was soft invalidated.
|
||||
|
||||
:return: Boolean. True if ``timestamp`` is older than
|
||||
|
@ -175,7 +144,7 @@ class RegionInvalidationStrategy:
|
|||
|
||||
raise NotImplementedError()
|
||||
|
||||
def is_invalidated(self, timestamp: float) -> bool:
|
||||
def is_invalidated(self, timestamp):
|
||||
"""Check timestamp to determine if it was invalidated.
|
||||
|
||||
:return: Boolean. True if ``timestamp`` is older than
|
||||
|
@ -185,7 +154,7 @@ class RegionInvalidationStrategy:
|
|||
|
||||
raise NotImplementedError()
|
||||
|
||||
def was_soft_invalidated(self) -> bool:
|
||||
def was_soft_invalidated(self):
|
||||
"""Indicate the region was invalidated in soft mode.
|
||||
|
||||
:return: Boolean. True if region was invalidated in soft mode.
|
||||
|
@ -194,7 +163,7 @@ class RegionInvalidationStrategy:
|
|||
|
||||
raise NotImplementedError()
|
||||
|
||||
def was_hard_invalidated(self) -> bool:
|
||||
def was_hard_invalidated(self):
|
||||
"""Indicate the region was invalidated in hard mode.
|
||||
|
||||
:return: Boolean. True if region was invalidated in hard mode.
|
||||
|
@ -209,27 +178,27 @@ class DefaultInvalidationStrategy(RegionInvalidationStrategy):
|
|||
self._is_hard_invalidated = None
|
||||
self._invalidated = None
|
||||
|
||||
def invalidate(self, hard: bool = True) -> None:
|
||||
def invalidate(self, hard=True):
|
||||
self._is_hard_invalidated = bool(hard)
|
||||
self._invalidated = time.time()
|
||||
|
||||
def is_invalidated(self, timestamp: float) -> bool:
|
||||
def is_invalidated(self, timestamp):
|
||||
return self._invalidated is not None and timestamp < self._invalidated
|
||||
|
||||
def was_hard_invalidated(self) -> bool:
|
||||
def was_hard_invalidated(self):
|
||||
return self._is_hard_invalidated is True
|
||||
|
||||
def is_hard_invalidated(self, timestamp: float) -> bool:
|
||||
def is_hard_invalidated(self, timestamp):
|
||||
return self.was_hard_invalidated() and self.is_invalidated(timestamp)
|
||||
|
||||
def was_soft_invalidated(self) -> bool:
|
||||
def was_soft_invalidated(self):
|
||||
return self._is_hard_invalidated is False
|
||||
|
||||
def is_soft_invalidated(self, timestamp: float) -> bool:
|
||||
def is_soft_invalidated(self, timestamp):
|
||||
return self.was_soft_invalidated() and self.is_invalidated(timestamp)
|
||||
|
||||
|
||||
class CacheRegion:
|
||||
class CacheRegion(object):
|
||||
r"""A front end to a particular cache backend.
|
||||
|
||||
:param name: Optional, a string name for the region.
|
||||
|
@ -315,21 +284,6 @@ class CacheRegion:
|
|||
to convert non-string or Unicode keys to bytestrings, which is
|
||||
needed when using a backend such as bsddb or dbm under Python 2.x
|
||||
in conjunction with Unicode keys.
|
||||
|
||||
:param serializer: function which will be applied to all values before
|
||||
passing to the backend. Defaults to ``None``, in which case the
|
||||
serializer recommended by the backend will be used. Typical
|
||||
serializers include ``pickle.dumps`` and ``json.dumps``.
|
||||
|
||||
.. versionadded:: 1.1.0
|
||||
|
||||
:param deserializer: function which will be applied to all values returned
|
||||
by the backend. Defaults to ``None``, in which case the
|
||||
deserializer recommended by the backend will be used. Typical
|
||||
deserializers include ``pickle.dumps`` and ``json.dumps``.
|
||||
|
||||
.. versionadded:: 1.1.0
|
||||
|
||||
:param async_creation_runner: A callable that, when specified,
|
||||
will be passed to and called by dogpile.lock when
|
||||
there is a stale value present in the cache. It will be passed the
|
||||
|
@ -385,37 +339,31 @@ class CacheRegion:
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
name: Optional[str] = None,
|
||||
function_key_generator: FunctionKeyGenerator = function_key_generator,
|
||||
function_multi_key_generator: FunctionMultiKeyGenerator = function_multi_key_generator, # noqa E501
|
||||
key_mangler: Optional[Callable[[KeyType], KeyType]] = None,
|
||||
serializer: Optional[Callable[[ValuePayload], bytes]] = None,
|
||||
deserializer: Optional[Callable[[bytes], ValuePayload]] = None,
|
||||
async_creation_runner: Optional[AsyncCreator] = None,
|
||||
name=None,
|
||||
function_key_generator=function_key_generator,
|
||||
function_multi_key_generator=function_multi_key_generator,
|
||||
key_mangler=None,
|
||||
async_creation_runner=None,
|
||||
):
|
||||
"""Construct a new :class:`.CacheRegion`."""
|
||||
self.name = name
|
||||
self.function_key_generator = function_key_generator
|
||||
self.function_multi_key_generator = function_multi_key_generator
|
||||
self.key_mangler = self._user_defined_key_mangler = key_mangler
|
||||
self.serializer = self._user_defined_serializer = serializer
|
||||
self.deserializer = self._user_defined_deserializer = deserializer
|
||||
self.async_creation_runner = async_creation_runner
|
||||
self.region_invalidator: RegionInvalidationStrategy = (
|
||||
DefaultInvalidationStrategy()
|
||||
)
|
||||
self.region_invalidator = DefaultInvalidationStrategy()
|
||||
|
||||
def configure(
|
||||
self,
|
||||
backend: str,
|
||||
expiration_time: Optional[Union[float, datetime.timedelta]] = None,
|
||||
arguments: Optional[BackendArguments] = None,
|
||||
_config_argument_dict: Optional[Mapping[str, Any]] = None,
|
||||
_config_prefix: Optional[str] = None,
|
||||
wrap: Sequence[Union[ProxyBackend, Type[ProxyBackend]]] = (),
|
||||
replace_existing_backend: bool = False,
|
||||
region_invalidator: Optional[RegionInvalidationStrategy] = None,
|
||||
) -> "CacheRegion":
|
||||
backend,
|
||||
expiration_time=None,
|
||||
arguments=None,
|
||||
_config_argument_dict=None,
|
||||
_config_prefix=None,
|
||||
wrap=None,
|
||||
replace_existing_backend=False,
|
||||
region_invalidator=None,
|
||||
):
|
||||
"""Configure a :class:`.CacheRegion`.
|
||||
|
||||
The :class:`.CacheRegion` itself
|
||||
|
@ -466,7 +414,7 @@ class CacheRegion:
|
|||
|
||||
.. versionadded:: 0.6.2
|
||||
|
||||
"""
|
||||
"""
|
||||
|
||||
if "backend" in self.__dict__ and not replace_existing_backend:
|
||||
raise exception.RegionAlreadyConfigured(
|
||||
|
@ -490,12 +438,12 @@ class CacheRegion:
|
|||
else:
|
||||
self.backend = backend_cls(arguments or {})
|
||||
|
||||
self.expiration_time: Union[float, None]
|
||||
|
||||
if not expiration_time or isinstance(expiration_time, Number):
|
||||
self.expiration_time = cast(Union[None, float], expiration_time)
|
||||
self.expiration_time = expiration_time
|
||||
elif isinstance(expiration_time, datetime.timedelta):
|
||||
self.expiration_time = int(expiration_time.total_seconds())
|
||||
self.expiration_time = int(
|
||||
compat.timedelta_total_seconds(expiration_time)
|
||||
)
|
||||
else:
|
||||
raise exception.ValidationError(
|
||||
"expiration_time is not a number or timedelta."
|
||||
|
@ -504,12 +452,6 @@ class CacheRegion:
|
|||
if not self._user_defined_key_mangler:
|
||||
self.key_mangler = self.backend.key_mangler
|
||||
|
||||
if not self._user_defined_serializer:
|
||||
self.serializer = self.backend.serializer
|
||||
|
||||
if not self._user_defined_deserializer:
|
||||
self.deserializer = self.backend.deserializer
|
||||
|
||||
self._lock_registry = NameRegistry(self._create_mutex)
|
||||
|
||||
if getattr(wrap, "__iter__", False):
|
||||
|
@ -521,28 +463,26 @@ class CacheRegion:
|
|||
|
||||
return self
|
||||
|
||||
def wrap(self, proxy: Union[ProxyBackend, Type[ProxyBackend]]) -> None:
|
||||
"""Takes a ProxyBackend instance or class and wraps the
|
||||
attached backend."""
|
||||
def wrap(self, proxy):
|
||||
""" Takes a ProxyBackend instance or class and wraps the
|
||||
attached backend. """
|
||||
|
||||
# if we were passed a type rather than an instance then
|
||||
# initialize it.
|
||||
if isinstance(proxy, type):
|
||||
proxy_instance = proxy()
|
||||
else:
|
||||
proxy_instance = proxy
|
||||
if type(proxy) == type:
|
||||
proxy = proxy()
|
||||
|
||||
if not isinstance(proxy_instance, ProxyBackend):
|
||||
if not issubclass(type(proxy), ProxyBackend):
|
||||
raise TypeError(
|
||||
"%r is not a valid ProxyBackend" % (proxy_instance,)
|
||||
"Type %s is not a valid ProxyBackend" % type(proxy)
|
||||
)
|
||||
|
||||
self.backend = proxy_instance.wrap(self.backend)
|
||||
self.backend = proxy.wrap(self.backend)
|
||||
|
||||
def _mutex(self, key):
|
||||
return self._lock_registry.get(key)
|
||||
|
||||
class _LockWrapper(CacheMutex):
|
||||
class _LockWrapper(object):
|
||||
"""weakref-capable wrapper for threading.Lock"""
|
||||
|
||||
def __init__(self):
|
||||
|
@ -554,9 +494,6 @@ class CacheRegion:
|
|||
def release(self):
|
||||
self.lock.release()
|
||||
|
||||
def locked(self):
|
||||
return self.lock.locked()
|
||||
|
||||
def _create_mutex(self, key):
|
||||
mutex = self.backend.get_mutex(key)
|
||||
if mutex is not None:
|
||||
|
@ -762,7 +699,7 @@ class CacheRegion:
|
|||
|
||||
if self.key_mangler:
|
||||
key = self.key_mangler(key)
|
||||
value = self._get_from_backend(key)
|
||||
value = self.backend.get(key)
|
||||
value = self._unexpired_value_fn(expiration_time, ignore_expiration)(
|
||||
value
|
||||
)
|
||||
|
@ -830,10 +767,10 @@ class CacheRegion:
|
|||
if not keys:
|
||||
return []
|
||||
|
||||
if self.key_mangler is not None:
|
||||
keys = [self.key_mangler(key) for key in keys]
|
||||
if self.key_mangler:
|
||||
keys = list(map(lambda key: self.key_mangler(key), keys))
|
||||
|
||||
backend_values = self._get_multi_from_backend(keys)
|
||||
backend_values = self.backend.get_multi(keys)
|
||||
|
||||
_unexpired_value_fn = self._unexpired_value_fn(
|
||||
expiration_time, ignore_expiration
|
||||
|
@ -868,26 +805,14 @@ class CacheRegion:
|
|||
|
||||
return True
|
||||
|
||||
def key_is_locked(self, key: KeyType) -> bool:
|
||||
"""Return True if a particular cache key is currently being generated
|
||||
within the dogpile lock.
|
||||
|
||||
.. versionadded:: 1.1.2
|
||||
|
||||
"""
|
||||
mutex = self._mutex(key)
|
||||
locked: bool = mutex.locked()
|
||||
return locked
|
||||
|
||||
def get_or_create(
|
||||
self,
|
||||
key: KeyType,
|
||||
creator: Callable[..., ValuePayload],
|
||||
expiration_time: Optional[float] = None,
|
||||
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
|
||||
creator_args: Optional[Tuple[Any, Mapping[str, Any]]] = None,
|
||||
) -> ValuePayload:
|
||||
|
||||
key,
|
||||
creator,
|
||||
expiration_time=None,
|
||||
should_cache_fn=None,
|
||||
creator_args=None,
|
||||
):
|
||||
"""Return a cached value based on the given key.
|
||||
|
||||
If the value does not exist or is considered to be expired
|
||||
|
@ -974,17 +899,12 @@ class CacheRegion:
|
|||
key = self.key_mangler(key)
|
||||
|
||||
def get_value():
|
||||
value = self._get_from_backend(key)
|
||||
value = self.backend.get(key)
|
||||
if self._is_cache_miss(value, orig_key):
|
||||
raise NeedRegenerationException()
|
||||
|
||||
ct = cast(CachedValue, value).metadata["ct"]
|
||||
ct = value.metadata["ct"]
|
||||
if self.region_invalidator.is_soft_invalidated(ct):
|
||||
if expiration_time is None:
|
||||
raise exception.DogpileCacheException(
|
||||
"Non-None expiration time required "
|
||||
"for soft invalidation"
|
||||
)
|
||||
ct = time.time() - expiration_time - 0.0001
|
||||
|
||||
return value.payload, ct
|
||||
|
@ -999,42 +919,37 @@ class CacheRegion:
|
|||
created_value = creator()
|
||||
value = self._value(created_value)
|
||||
|
||||
if (
|
||||
expiration_time is None
|
||||
and self.region_invalidator.was_soft_invalidated()
|
||||
):
|
||||
raise exception.DogpileCacheException(
|
||||
"Non-None expiration time required "
|
||||
"for soft invalidation"
|
||||
)
|
||||
|
||||
if not should_cache_fn or should_cache_fn(created_value):
|
||||
self._set_cached_value_to_backend(key, value)
|
||||
self.backend.set(key, value)
|
||||
|
||||
return value.payload, value.metadata["ct"]
|
||||
|
||||
if expiration_time is None:
|
||||
expiration_time = self.expiration_time
|
||||
|
||||
if (
|
||||
expiration_time is None
|
||||
and self.region_invalidator.was_soft_invalidated()
|
||||
):
|
||||
raise exception.DogpileCacheException(
|
||||
"Non-None expiration time required " "for soft invalidation"
|
||||
)
|
||||
|
||||
if expiration_time == -1:
|
||||
expiration_time = None
|
||||
|
||||
async_creator: Optional[Callable[[CacheMutex], AsyncCreator]]
|
||||
if self.async_creation_runner:
|
||||
acr = self.async_creation_runner
|
||||
|
||||
def async_creator(mutex):
|
||||
if creator_args:
|
||||
|
||||
ca = creator_args
|
||||
|
||||
@wraps(creator)
|
||||
def go():
|
||||
return creator(*ca[0], **ca[1])
|
||||
return creator(*creator_args[0], **creator_args[1])
|
||||
|
||||
else:
|
||||
go = creator
|
||||
return acr(self, orig_key, go, mutex)
|
||||
return self.async_creation_runner(self, orig_key, go, mutex)
|
||||
|
||||
else:
|
||||
async_creator = None
|
||||
|
@ -1049,12 +964,8 @@ class CacheRegion:
|
|||
return value
|
||||
|
||||
def get_or_create_multi(
|
||||
self,
|
||||
keys: Sequence[KeyType],
|
||||
creator: Callable[[], ValuePayload],
|
||||
expiration_time: Optional[float] = None,
|
||||
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
|
||||
) -> Sequence[ValuePayload]:
|
||||
self, keys, creator, expiration_time=None, should_cache_fn=None
|
||||
):
|
||||
"""Return a sequence of cached values based on a sequence of keys.
|
||||
|
||||
The behavior for generation of values based on keys corresponds
|
||||
|
@ -1109,29 +1020,34 @@ class CacheRegion:
|
|||
# _has_value() will return False.
|
||||
return value.payload, 0
|
||||
else:
|
||||
ct = cast(CachedValue, value).metadata["ct"]
|
||||
ct = value.metadata["ct"]
|
||||
if self.region_invalidator.is_soft_invalidated(ct):
|
||||
if expiration_time is None:
|
||||
raise exception.DogpileCacheException(
|
||||
"Non-None expiration time required "
|
||||
"for soft invalidation"
|
||||
)
|
||||
ct = time.time() - expiration_time - 0.0001
|
||||
|
||||
return value.payload, ct
|
||||
|
||||
def gen_value() -> ValuePayload:
|
||||
def gen_value():
|
||||
raise NotImplementedError()
|
||||
|
||||
def async_creator(mutexes, key, mutex):
|
||||
def async_creator(key, mutex):
|
||||
mutexes[key] = mutex
|
||||
|
||||
if expiration_time is None:
|
||||
expiration_time = self.expiration_time
|
||||
|
||||
if (
|
||||
expiration_time is None
|
||||
and self.region_invalidator.was_soft_invalidated()
|
||||
):
|
||||
raise exception.DogpileCacheException(
|
||||
"Non-None expiration time required " "for soft invalidation"
|
||||
)
|
||||
|
||||
if expiration_time == -1:
|
||||
expiration_time = None
|
||||
|
||||
mutexes = {}
|
||||
|
||||
sorted_unique_keys = sorted(set(keys))
|
||||
|
||||
if self.key_mangler:
|
||||
|
@ -1141,11 +1057,7 @@ class CacheRegion:
|
|||
|
||||
orig_to_mangled = dict(zip(sorted_unique_keys, mangled_keys))
|
||||
|
||||
values = dict(
|
||||
zip(mangled_keys, self._get_multi_from_backend(mangled_keys))
|
||||
)
|
||||
|
||||
mutexes: Mapping[KeyType, Any] = {}
|
||||
values = dict(zip(mangled_keys, self.backend.get_multi(mangled_keys)))
|
||||
|
||||
for orig_key, mangled_key in orig_to_mangled.items():
|
||||
with Lock(
|
||||
|
@ -1153,9 +1065,7 @@ class CacheRegion:
|
|||
gen_value,
|
||||
lambda: get_value(mangled_key),
|
||||
expiration_time,
|
||||
async_creator=lambda mutex: async_creator(
|
||||
mutexes, orig_key, mutex
|
||||
),
|
||||
async_creator=lambda mutex: async_creator(orig_key, mutex),
|
||||
):
|
||||
pass
|
||||
try:
|
||||
|
@ -1167,194 +1077,59 @@ class CacheRegion:
|
|||
with self._log_time(keys_to_get):
|
||||
new_values = creator(*keys_to_get)
|
||||
|
||||
values_w_created = {
|
||||
orig_to_mangled[k]: self._value(v)
|
||||
values_w_created = dict(
|
||||
(orig_to_mangled[k], self._value(v))
|
||||
for k, v in zip(keys_to_get, new_values)
|
||||
}
|
||||
|
||||
if (
|
||||
expiration_time is None
|
||||
and self.region_invalidator.was_soft_invalidated()
|
||||
):
|
||||
raise exception.DogpileCacheException(
|
||||
"Non-None expiration time required "
|
||||
"for soft invalidation"
|
||||
)
|
||||
)
|
||||
|
||||
if not should_cache_fn:
|
||||
self._set_multi_cached_value_to_backend(values_w_created)
|
||||
|
||||
self.backend.set_multi(values_w_created)
|
||||
else:
|
||||
self._set_multi_cached_value_to_backend(
|
||||
{
|
||||
k: v
|
||||
for k, v in values_w_created.items()
|
||||
if should_cache_fn(v.payload)
|
||||
}
|
||||
values_to_cache = dict(
|
||||
(k, v)
|
||||
for k, v in values_w_created.items()
|
||||
if should_cache_fn(v[0])
|
||||
)
|
||||
|
||||
if values_to_cache:
|
||||
self.backend.set_multi(values_to_cache)
|
||||
|
||||
values.update(values_w_created)
|
||||
return [values[orig_to_mangled[k]].payload for k in keys]
|
||||
finally:
|
||||
for mutex in mutexes.values():
|
||||
mutex.release()
|
||||
|
||||
def _value(
|
||||
self, value: Any, metadata: Optional[MetaDataType] = None
|
||||
) -> CachedValue:
|
||||
def _value(self, value):
|
||||
"""Return a :class:`.CachedValue` given a value."""
|
||||
return CachedValue(value, {"ct": time.time(), "v": value_version})
|
||||
|
||||
if metadata is None:
|
||||
metadata = self._gen_metadata()
|
||||
return CachedValue(value, metadata)
|
||||
|
||||
def _parse_serialized_from_backend(
|
||||
self, value: SerializedReturnType
|
||||
) -> CacheReturnType:
|
||||
if value in (None, NO_VALUE):
|
||||
return NO_VALUE
|
||||
|
||||
assert self.deserializer
|
||||
byte_value = cast(bytes, value)
|
||||
|
||||
bytes_metadata, _, bytes_payload = byte_value.partition(b"|")
|
||||
metadata = json.loads(bytes_metadata)
|
||||
payload = self.deserializer(bytes_payload)
|
||||
return CachedValue(payload, metadata)
|
||||
|
||||
def _serialize_cached_value_elements(
|
||||
self, payload: ValuePayload, metadata: MetaDataType
|
||||
) -> bytes:
|
||||
serializer = cast(Serializer, self.serializer)
|
||||
|
||||
return b"%b|%b" % (
|
||||
json.dumps(metadata).encode("ascii"),
|
||||
serializer(payload),
|
||||
)
|
||||
|
||||
def _serialized_payload(
|
||||
self, payload: ValuePayload, metadata: Optional[MetaDataType] = None
|
||||
) -> BackendFormatted:
|
||||
"""Return a backend formatted representation of a value.
|
||||
|
||||
If a serializer is in use then this will return a string representation
|
||||
with the value formatted by the serializer.
|
||||
|
||||
"""
|
||||
if metadata is None:
|
||||
metadata = self._gen_metadata()
|
||||
|
||||
return self._serialize_cached_value_elements(payload, metadata)
|
||||
|
||||
def _serialized_cached_value(self, value: CachedValue) -> BackendFormatted:
|
||||
"""Return a backend formatted representation of a :class:`.CachedValue`.
|
||||
|
||||
If a serializer is in use then this will return a string representation
|
||||
with the value formatted by the serializer.
|
||||
|
||||
"""
|
||||
|
||||
assert self.serializer
|
||||
return self._serialize_cached_value_elements(
|
||||
value.payload, value.metadata
|
||||
)
|
||||
|
||||
def _get_from_backend(self, key: KeyType) -> CacheReturnType:
|
||||
if self.deserializer:
|
||||
return self._parse_serialized_from_backend(
|
||||
self.backend.get_serialized(key)
|
||||
)
|
||||
else:
|
||||
return cast(CacheReturnType, self.backend.get(key))
|
||||
|
||||
def _get_multi_from_backend(
|
||||
self, keys: Sequence[KeyType]
|
||||
) -> Sequence[CacheReturnType]:
|
||||
if self.deserializer:
|
||||
return [
|
||||
self._parse_serialized_from_backend(v)
|
||||
for v in self.backend.get_serialized_multi(keys)
|
||||
]
|
||||
else:
|
||||
return cast(
|
||||
Sequence[CacheReturnType], self.backend.get_multi(keys)
|
||||
)
|
||||
|
||||
def _set_cached_value_to_backend(
|
||||
self, key: KeyType, value: CachedValue
|
||||
) -> None:
|
||||
if self.serializer:
|
||||
self.backend.set_serialized(
|
||||
key, self._serialized_cached_value(value)
|
||||
)
|
||||
else:
|
||||
self.backend.set(key, value)
|
||||
|
||||
def _set_multi_cached_value_to_backend(
|
||||
self, mapping: Mapping[KeyType, CachedValue]
|
||||
) -> None:
|
||||
if not mapping:
|
||||
return
|
||||
|
||||
if self.serializer:
|
||||
self.backend.set_serialized_multi(
|
||||
{
|
||||
k: self._serialized_cached_value(v)
|
||||
for k, v in mapping.items()
|
||||
}
|
||||
)
|
||||
else:
|
||||
self.backend.set_multi(mapping)
|
||||
|
||||
def _gen_metadata(self) -> MetaDataType:
|
||||
return {"ct": time.time(), "v": value_version}
|
||||
|
||||
def set(self, key: KeyType, value: ValuePayload) -> None:
|
||||
def set(self, key, value):
|
||||
"""Place a new value in the cache under the given key."""
|
||||
|
||||
if self.key_mangler:
|
||||
key = self.key_mangler(key)
|
||||
self.backend.set(key, self._value(value))
|
||||
|
||||
if self.serializer:
|
||||
self.backend.set_serialized(key, self._serialized_payload(value))
|
||||
else:
|
||||
self.backend.set(key, self._value(value))
|
||||
def set_multi(self, mapping):
|
||||
"""Place new values in the cache under the given keys.
|
||||
|
||||
def set_multi(self, mapping: Mapping[KeyType, ValuePayload]) -> None:
|
||||
"""Place new values in the cache under the given keys."""
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
"""
|
||||
if not mapping:
|
||||
return
|
||||
|
||||
metadata = self._gen_metadata()
|
||||
|
||||
if self.serializer:
|
||||
if self.key_mangler:
|
||||
mapping = {
|
||||
self.key_mangler(k): self._serialized_payload(
|
||||
v, metadata=metadata
|
||||
)
|
||||
for k, v in mapping.items()
|
||||
}
|
||||
else:
|
||||
mapping = {
|
||||
k: self._serialized_payload(v, metadata=metadata)
|
||||
for k, v in mapping.items()
|
||||
}
|
||||
self.backend.set_serialized_multi(mapping)
|
||||
if self.key_mangler:
|
||||
mapping = dict(
|
||||
(self.key_mangler(k), self._value(v))
|
||||
for k, v in mapping.items()
|
||||
)
|
||||
else:
|
||||
if self.key_mangler:
|
||||
mapping = {
|
||||
self.key_mangler(k): self._value(v, metadata=metadata)
|
||||
for k, v in mapping.items()
|
||||
}
|
||||
else:
|
||||
mapping = {
|
||||
k: self._value(v, metadata=metadata)
|
||||
for k, v in mapping.items()
|
||||
}
|
||||
self.backend.set_multi(mapping)
|
||||
mapping = dict((k, self._value(v)) for k, v in mapping.items())
|
||||
self.backend.set_multi(mapping)
|
||||
|
||||
def delete(self, key: KeyType) -> None:
|
||||
def delete(self, key):
|
||||
"""Remove a value from the cache.
|
||||
|
||||
This operation is idempotent (can be called multiple times, or on a
|
||||
|
@ -1366,7 +1141,7 @@ class CacheRegion:
|
|||
|
||||
self.backend.delete(key)
|
||||
|
||||
def delete_multi(self, keys: Sequence[KeyType]) -> None:
|
||||
def delete_multi(self, keys):
|
||||
"""Remove multiple values from the cache.
|
||||
|
||||
This operation is idempotent (can be called multiple times, or on a
|
||||
|
@ -1377,19 +1152,18 @@ class CacheRegion:
|
|||
"""
|
||||
|
||||
if self.key_mangler:
|
||||
km = self.key_mangler
|
||||
keys = [km(key) for key in keys]
|
||||
keys = list(map(lambda key: self.key_mangler(key), keys))
|
||||
|
||||
self.backend.delete_multi(keys)
|
||||
|
||||
def cache_on_arguments(
|
||||
self,
|
||||
namespace: Optional[str] = None,
|
||||
expiration_time: Union[float, ExpirationTimeCallable, None] = None,
|
||||
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
|
||||
to_str: Callable[[Any], str] = str,
|
||||
function_key_generator: Optional[FunctionKeyGenerator] = None,
|
||||
) -> Callable[[Callable[..., ValuePayload]], Callable[..., ValuePayload]]:
|
||||
namespace=None,
|
||||
expiration_time=None,
|
||||
should_cache_fn=None,
|
||||
to_str=compat.string_type,
|
||||
function_key_generator=None,
|
||||
):
|
||||
"""A function decorator that will cache the return
|
||||
value of the function using a key derived from the
|
||||
function itself and its arguments.
|
||||
|
@ -1482,7 +1256,7 @@ class CacheRegion:
|
|||
(with caveats) for use with instance or class methods.
|
||||
Given the example::
|
||||
|
||||
class MyClass:
|
||||
class MyClass(object):
|
||||
@region.cache_on_arguments(namespace="foo")
|
||||
def one(self, a, b):
|
||||
return a + b
|
||||
|
@ -1496,12 +1270,12 @@ class CacheRegion:
|
|||
name within the same module, as can occur when decorating
|
||||
instance or class methods as below::
|
||||
|
||||
class MyClass:
|
||||
class MyClass(object):
|
||||
@region.cache_on_arguments(namespace='MC')
|
||||
def somemethod(self, x, y):
|
||||
""
|
||||
|
||||
class MyOtherClass:
|
||||
class MyOtherClass(object):
|
||||
@region.cache_on_arguments(namespace='MOC')
|
||||
def somemethod(self, x, y):
|
||||
""
|
||||
|
@ -1539,8 +1313,14 @@ class CacheRegion:
|
|||
end of the day, week or time period" and "cache until a certain date
|
||||
or time passes".
|
||||
|
||||
.. versionchanged:: 0.5.0
|
||||
``expiration_time`` may be passed as a callable to
|
||||
:meth:`.CacheRegion.cache_on_arguments`.
|
||||
|
||||
:param should_cache_fn: passed to :meth:`.CacheRegion.get_or_create`.
|
||||
|
||||
.. versionadded:: 0.4.3
|
||||
|
||||
:param to_str: callable, will be called on each function argument
|
||||
in order to convert to a string. Defaults to ``str()``. If the
|
||||
function accepts non-ascii unicode arguments on Python 2.x, the
|
||||
|
@ -1548,10 +1328,14 @@ class CacheRegion:
|
|||
produce unicode cache keys which may require key mangling before
|
||||
reaching the cache.
|
||||
|
||||
.. versionadded:: 0.5.0
|
||||
|
||||
:param function_key_generator: a function that will produce a
|
||||
"cache key". This function will supersede the one configured on the
|
||||
:class:`.CacheRegion` itself.
|
||||
|
||||
.. versionadded:: 0.5.5
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.CacheRegion.cache_multi_on_arguments`
|
||||
|
@ -1559,34 +1343,30 @@ class CacheRegion:
|
|||
:meth:`.CacheRegion.get_or_create`
|
||||
|
||||
"""
|
||||
expiration_time_is_callable = callable(expiration_time)
|
||||
expiration_time_is_callable = compat.callable(expiration_time)
|
||||
|
||||
if function_key_generator is None:
|
||||
_function_key_generator = self.function_key_generator
|
||||
else:
|
||||
_function_key_generator = function_key_generator
|
||||
function_key_generator = self.function_key_generator
|
||||
|
||||
def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
|
||||
key = key_generator(*arg, **kw)
|
||||
|
||||
timeout: Optional[float] = (
|
||||
cast(ExpirationTimeCallable, expiration_time)()
|
||||
timeout = (
|
||||
expiration_time()
|
||||
if expiration_time_is_callable
|
||||
else cast(Optional[float], expiration_time)
|
||||
else expiration_time
|
||||
)
|
||||
return self.get_or_create(
|
||||
key, user_func, timeout, should_cache_fn, (arg, kw)
|
||||
)
|
||||
|
||||
def cache_decorator(user_func):
|
||||
if to_str is cast(Callable[[Any], str], str):
|
||||
if to_str is compat.string_type:
|
||||
# backwards compatible
|
||||
key_generator = _function_key_generator(
|
||||
namespace, user_func
|
||||
) # type: ignore
|
||||
key_generator = function_key_generator(namespace, user_func)
|
||||
else:
|
||||
key_generator = _function_key_generator(
|
||||
namespace, user_func, to_str
|
||||
key_generator = function_key_generator(
|
||||
namespace, user_func, to_str=to_str
|
||||
)
|
||||
|
||||
def refresh(*arg, **kw):
|
||||
|
@ -1626,20 +1406,13 @@ class CacheRegion:
|
|||
|
||||
def cache_multi_on_arguments(
|
||||
self,
|
||||
namespace: Optional[str] = None,
|
||||
expiration_time: Union[float, ExpirationTimeCallable, None] = None,
|
||||
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
|
||||
asdict: bool = False,
|
||||
to_str: ToStr = str,
|
||||
function_multi_key_generator: Optional[
|
||||
FunctionMultiKeyGenerator
|
||||
] = None,
|
||||
) -> Callable[
|
||||
[Callable[..., Sequence[ValuePayload]]],
|
||||
Callable[
|
||||
..., Union[Sequence[ValuePayload], Mapping[KeyType, ValuePayload]]
|
||||
],
|
||||
]:
|
||||
namespace=None,
|
||||
expiration_time=None,
|
||||
should_cache_fn=None,
|
||||
asdict=False,
|
||||
to_str=compat.string_type,
|
||||
function_multi_key_generator=None,
|
||||
):
|
||||
"""A function decorator that will cache multiple return
|
||||
values from the function using a sequence of keys derived from the
|
||||
function itself and the arguments passed to it.
|
||||
|
@ -1756,19 +1529,12 @@ class CacheRegion:
|
|||
:meth:`.CacheRegion.get_or_create_multi`
|
||||
|
||||
"""
|
||||
expiration_time_is_callable = callable(expiration_time)
|
||||
expiration_time_is_callable = compat.callable(expiration_time)
|
||||
|
||||
if function_multi_key_generator is None:
|
||||
_function_multi_key_generator = self.function_multi_key_generator
|
||||
else:
|
||||
_function_multi_key_generator = function_multi_key_generator
|
||||
function_multi_key_generator = self.function_multi_key_generator
|
||||
|
||||
def get_or_create_for_user_func(
|
||||
key_generator: Callable[..., Sequence[KeyType]],
|
||||
user_func: Callable[..., Sequence[ValuePayload]],
|
||||
*arg: Any,
|
||||
**kw: Any,
|
||||
) -> Union[Sequence[ValuePayload], Mapping[KeyType, ValuePayload]]:
|
||||
def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
|
||||
cache_keys = arg
|
||||
keys = key_generator(*arg, **kw)
|
||||
key_lookup = dict(zip(keys, cache_keys))
|
||||
|
@ -1777,16 +1543,12 @@ class CacheRegion:
|
|||
def creator(*keys_to_create):
|
||||
return user_func(*[key_lookup[k] for k in keys_to_create])
|
||||
|
||||
timeout: Optional[float] = (
|
||||
cast(ExpirationTimeCallable, expiration_time)()
|
||||
timeout = (
|
||||
expiration_time()
|
||||
if expiration_time_is_callable
|
||||
else cast(Optional[float], expiration_time)
|
||||
else expiration_time
|
||||
)
|
||||
|
||||
result: Union[
|
||||
Sequence[ValuePayload], Mapping[KeyType, ValuePayload]
|
||||
]
|
||||
|
||||
if asdict:
|
||||
|
||||
def dict_create(*keys):
|
||||
|
@ -1819,7 +1581,7 @@ class CacheRegion:
|
|||
return result
|
||||
|
||||
def cache_decorator(user_func):
|
||||
key_generator = _function_multi_key_generator(
|
||||
key_generator = function_multi_key_generator(
|
||||
namespace, user_func, to_str=to_str
|
||||
)
|
||||
|
||||
|
@ -1865,7 +1627,7 @@ class CacheRegion:
|
|||
return cache_decorator
|
||||
|
||||
|
||||
def make_region(*arg: Any, **kw: Any) -> CacheRegion:
|
||||
def make_region(*arg, **kw):
|
||||
"""Instantiate a new :class:`.CacheRegion`.
|
||||
|
||||
Currently, :func:`.make_region` is a passthrough
|
||||
|
|
10
libs/common/dogpile/cache/util.py
vendored
10
libs/common/dogpile/cache/util.py
vendored
|
@ -4,7 +4,7 @@ from ..util import compat
|
|||
from ..util import langhelpers
|
||||
|
||||
|
||||
def function_key_generator(namespace, fn, to_str=str):
|
||||
def function_key_generator(namespace, fn, to_str=compat.string_type):
|
||||
"""Return a function that generates a string
|
||||
key, based on a given function as well as
|
||||
arguments to the returned function itself.
|
||||
|
@ -45,7 +45,7 @@ def function_key_generator(namespace, fn, to_str=str):
|
|||
return generate_key
|
||||
|
||||
|
||||
def function_multi_key_generator(namespace, fn, to_str=str):
|
||||
def function_multi_key_generator(namespace, fn, to_str=compat.string_type):
|
||||
|
||||
if namespace is None:
|
||||
namespace = "%s:%s" % (fn.__module__, fn.__name__)
|
||||
|
@ -68,7 +68,7 @@ def function_multi_key_generator(namespace, fn, to_str=str):
|
|||
return generate_keys
|
||||
|
||||
|
||||
def kwarg_function_key_generator(namespace, fn, to_str=str):
|
||||
def kwarg_function_key_generator(namespace, fn, to_str=compat.string_type):
|
||||
"""Return a function that generates a string
|
||||
key, based on a given function as well as
|
||||
arguments to the returned function itself.
|
||||
|
@ -131,7 +131,7 @@ def kwarg_function_key_generator(namespace, fn, to_str=str):
|
|||
def sha1_mangle_key(key):
|
||||
"""a SHA1 key mangler."""
|
||||
|
||||
if isinstance(key, str):
|
||||
if isinstance(key, compat.text_type):
|
||||
key = key.encode("utf-8")
|
||||
|
||||
return sha1(key).hexdigest()
|
||||
|
@ -162,7 +162,7 @@ PluginLoader = langhelpers.PluginLoader
|
|||
to_list = langhelpers.to_list
|
||||
|
||||
|
||||
class repr_obj:
|
||||
class repr_obj(object):
|
||||
|
||||
__slots__ = ("value", "max_chars")
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue