Updated stevedore to 2.0.1

This commit is contained in:
Labrys of Knossos 2022-11-29 01:47:46 -05:00
parent f1624a586f
commit fb6011f88d
52 changed files with 581 additions and 1960 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -1,4 +1,4 @@
__version__ = "1.1.8"
__version__ = '0.9.2'
from .lock import Lock # noqa
from .lock import NeedRegenerationException # noqa

View file

@ -1,19 +1,13 @@
import abc
import pickle
from typing import Any
from typing import Callable
from typing import cast
from typing import Mapping
from typing import NamedTuple
from typing import Optional
from typing import Sequence
from typing import Union
import operator
from ..util.compat import py3k
class NoValue:
class NoValue(object):
"""Describe a missing cache value.
The :data:`.NO_VALUE` constant should be used.
The :attr:`.NO_VALUE` module global
should be used.
"""
@ -28,123 +22,51 @@ class NoValue:
"""
return "<dogpile.cache.api.NoValue object>"
def __bool__(self): # pragma NO COVERAGE
return False
if py3k:
def __bool__(self): # pragma NO COVERAGE
return False
else:
def __nonzero__(self): # pragma NO COVERAGE
return False
NO_VALUE = NoValue()
"""Value returned from ``get()`` that describes
a key not present."""
MetaDataType = Mapping[str, Any]
KeyType = str
"""A cache key."""
ValuePayload = Any
"""An object to be placed in the cache against a key."""
KeyManglerType = Callable[[KeyType], KeyType]
Serializer = Callable[[ValuePayload], bytes]
Deserializer = Callable[[bytes], ValuePayload]
class CacheMutex(abc.ABC):
"""Describes a mutexing object with acquire and release methods.
This is an abstract base class; any object that has acquire/release
methods may be used.
.. versionadded:: 1.1
.. seealso::
:meth:`.CacheBackend.get_mutex` - the backend method that optionally
returns this locking object.
"""
@abc.abstractmethod
def acquire(self, wait: bool = True) -> bool:
"""Acquire the mutex.
:param wait: if True, block until available, else return True/False
immediately.
:return: True if the lock succeeded.
"""
raise NotImplementedError()
@abc.abstractmethod
def release(self) -> None:
"""Release the mutex."""
raise NotImplementedError()
@abc.abstractmethod
def locked(self) -> bool:
"""Check if the mutex was acquired.
:return: true if the lock is acquired.
.. versionadded:: 1.1.2
"""
raise NotImplementedError()
@classmethod
def __subclasshook__(cls, C):
return hasattr(C, "acquire") and hasattr(C, "release")
class CachedValue(NamedTuple):
class CachedValue(tuple):
"""Represent a value stored in the cache.
:class:`.CachedValue` is a two-tuple of
``(payload, metadata)``, where ``metadata``
is dogpile.cache's tracking information (
currently the creation time).
currently the creation time). The metadata
and tuple structure is pickleable, if
the backend requires serialization.
"""
payload: ValuePayload
payload = property(operator.itemgetter(0))
"""Named accessor for the payload."""
metadata: MetaDataType
metadata = property(operator.itemgetter(1))
"""Named accessor for the dogpile.cache metadata dictionary."""
def __new__(cls, payload, metadata):
return tuple.__new__(cls, (payload, metadata))
def __reduce__(self):
return CachedValue, (self.payload, self.metadata)
CacheReturnType = Union[CachedValue, NoValue]
"""The non-serialized form of what may be returned from a backend
get method.
class CacheBackend(object):
"""Base class for backend implementations."""
"""
SerializedReturnType = Union[bytes, NoValue]
"""the serialized form of what may be returned from a backend get method."""
BackendFormatted = Union[CacheReturnType, SerializedReturnType]
"""Describes the type returned from the :meth:`.CacheBackend.get` method."""
BackendSetType = Union[CachedValue, bytes]
"""Describes the value argument passed to the :meth:`.CacheBackend.set`
method."""
BackendArguments = Mapping[str, Any]
class CacheBackend:
"""Base class for backend implementations.
Backends which set and get Python object values should subclass this
backend. For backends in which the value that's stored is ultimately
a stream of bytes, the :class:`.BytesBackend` should be used.
"""
key_mangler: Optional[Callable[[KeyType], KeyType]] = None
key_mangler = None
"""Key mangling function.
May be None, or otherwise declared
@ -152,23 +74,7 @@ class CacheBackend:
"""
serializer: Union[None, Serializer] = None
"""Serializer function that will be used by default if not overridden
by the region.
.. versionadded:: 1.1
"""
deserializer: Union[None, Deserializer] = None
"""deserializer function that will be used by default if not overridden
by the region.
.. versionadded:: 1.1
"""
def __init__(self, arguments: BackendArguments): # pragma NO COVERAGE
def __init__(self, arguments): # pragma NO COVERAGE
"""Construct a new :class:`.CacheBackend`.
Subclasses should override this to
@ -191,10 +97,10 @@ class CacheBackend:
)
)
def has_lock_timeout(self) -> bool:
def has_lock_timeout(self):
return False
def get_mutex(self, key: KeyType) -> Optional[CacheMutex]:
def get_mutex(self, key):
"""Return an optional mutexing object for the given key.
This object need only provide an ``acquire()``
@ -227,141 +133,48 @@ class CacheBackend:
"""
return None
def get(self, key: KeyType) -> BackendFormatted: # pragma NO COVERAGE
"""Retrieve an optionally serialized value from the cache.
def get(self, key): # pragma NO COVERAGE
"""Retrieve a value from the cache.
:param key: String key that was passed to the :meth:`.CacheRegion.get`
method, which will also be processed by the "key mangling" function
if one was present.
:return: the Python object that corresponds to
what was established via the :meth:`.CacheBackend.set` method,
or the :data:`.NO_VALUE` constant if not present.
If a serializer is in use, this method will only be called if the
:meth:`.CacheBackend.get_serialized` method is not overridden.
The returned value should be an instance of
:class:`.CachedValue`, or ``NO_VALUE`` if
not present.
"""
raise NotImplementedError()
def get_multi(
self, keys: Sequence[KeyType]
) -> Sequence[BackendFormatted]: # pragma NO COVERAGE
"""Retrieve multiple optionally serialized values from the cache.
def get_multi(self, keys): # pragma NO COVERAGE
"""Retrieve multiple values from the cache.
:param keys: sequence of string keys that was passed to the
:meth:`.CacheRegion.get_multi` method, which will also be processed
by the "key mangling" function if one was present.
:return a list of values as would be returned
individually via the :meth:`.CacheBackend.get` method, corresponding
to the list of keys given.
If a serializer is in use, this method will only be called if the
:meth:`.CacheBackend.get_serialized_multi` method is not overridden.
The returned value should be a list, corresponding
to the list of keys given.
.. versionadded:: 0.5.0
"""
raise NotImplementedError()
def get_serialized(self, key: KeyType) -> SerializedReturnType:
"""Retrieve a serialized value from the cache.
def set(self, key, value): # pragma NO COVERAGE
"""Set a value in the cache.
:param key: String key that was passed to the :meth:`.CacheRegion.get`
method, which will also be processed by the "key mangling" function
if one was present.
:return: a bytes object, or :data:`.NO_VALUE`
constant if not present.
The default implementation of this method for :class:`.CacheBackend`
returns the value of the :meth:`.CacheBackend.get` method.
.. versionadded:: 1.1
.. seealso::
:class:`.BytesBackend`
"""
return cast(SerializedReturnType, self.get(key))
def get_serialized_multi(
self, keys: Sequence[KeyType]
) -> Sequence[SerializedReturnType]: # pragma NO COVERAGE
"""Retrieve multiple serialized values from the cache.
:param keys: sequence of string keys that was passed to the
:meth:`.CacheRegion.get_multi` method, which will also be processed
by the "key mangling" function if one was present.
:return: list of bytes objects
The default implementation of this method for :class:`.CacheBackend`
returns the value of the :meth:`.CacheBackend.get_multi` method.
.. versionadded:: 1.1
.. seealso::
:class:`.BytesBackend`
"""
return cast(Sequence[SerializedReturnType], self.get_multi(keys))
def set(
self, key: KeyType, value: BackendSetType
) -> None: # pragma NO COVERAGE
"""Set an optionally serialized value in the cache.
:param key: String key that was passed to the :meth:`.CacheRegion.set`
method, which will also be processed by the "key mangling" function
if one was present.
:param value: The optionally serialized :class:`.CachedValue` object.
May be an instance of :class:`.CachedValue` or a bytes object
depending on if a serializer is in use with the region and if the
:meth:`.CacheBackend.set_serialized` method is not overridden.
.. seealso::
:meth:`.CacheBackend.set_serialized`
The key will be whatever was passed
to the registry, processed by the
"key mangling" function, if any.
The value will always be an instance
of :class:`.CachedValue`.
"""
raise NotImplementedError()
def set_serialized(
self, key: KeyType, value: bytes
) -> None: # pragma NO COVERAGE
"""Set a serialized value in the cache.
:param key: String key that was passed to the :meth:`.CacheRegion.set`
method, which will also be processed by the "key mangling" function
if one was present.
:param value: a bytes object to be stored.
The default implementation of this method for :class:`.CacheBackend`
calls upon the :meth:`.CacheBackend.set` method.
.. versionadded:: 1.1
.. seealso::
:class:`.BytesBackend`
"""
self.set(key, value)
def set_multi(
self, mapping: Mapping[KeyType, BackendSetType]
) -> None: # pragma NO COVERAGE
def set_multi(self, mapping): # pragma NO COVERAGE
"""Set multiple values in the cache.
:param mapping: a dict in which the key will be whatever was passed to
the :meth:`.CacheRegion.set_multi` method, processed by the "key
mangling" function, if any.
``mapping`` is a dict in which
the key will be whatever was passed
to the registry, processed by the
"key mangling" function, if any.
The value will always be an instance
of :class:`.CachedValue`.
When implementing a new :class:`.CacheBackend` or cutomizing via
:class:`.ProxyBackend`, be aware that when this method is invoked by
@ -371,52 +184,17 @@ class CacheBackend:
-- that will have the undesirable effect of modifying the returned
values as well.
If a serializer is in use, this method will only be called if the
:meth:`.CacheBackend.set_serialized_multi` method is not overridden.
.. versionadded:: 0.5.0
"""
raise NotImplementedError()
def set_serialized_multi(
self, mapping: Mapping[KeyType, bytes]
) -> None: # pragma NO COVERAGE
"""Set multiple serialized values in the cache.
:param mapping: a dict in which the key will be whatever was passed to
the :meth:`.CacheRegion.set_multi` method, processed by the "key
mangling" function, if any.
When implementing a new :class:`.CacheBackend` or cutomizing via
:class:`.ProxyBackend`, be aware that when this method is invoked by
:meth:`.Region.get_or_create_multi`, the ``mapping`` values are the
same ones returned to the upstream caller. If the subclass alters the
values in any way, it must not do so 'in-place' on the ``mapping`` dict
-- that will have the undesirable effect of modifying the returned
values as well.
.. versionadded:: 1.1
The default implementation of this method for :class:`.CacheBackend`
calls upon the :meth:`.CacheBackend.set_multi` method.
.. seealso::
:class:`.BytesBackend`
"""
self.set_multi(mapping)
def delete(self, key: KeyType) -> None: # pragma NO COVERAGE
def delete(self, key): # pragma NO COVERAGE
"""Delete a value from the cache.
:param key: String key that was passed to the
:meth:`.CacheRegion.delete`
method, which will also be processed by the "key mangling" function
if one was present.
The key will be whatever was passed
to the registry, processed by the
"key mangling" function, if any.
The behavior here should be idempotent,
that is, can be called any number of times
@ -425,14 +203,12 @@ class CacheBackend:
"""
raise NotImplementedError()
def delete_multi(
self, keys: Sequence[KeyType]
) -> None: # pragma NO COVERAGE
def delete_multi(self, keys): # pragma NO COVERAGE
"""Delete multiple values from the cache.
:param keys: sequence of string keys that was passed to the
:meth:`.CacheRegion.delete_multi` method, which will also be processed
by the "key mangling" function if one was present.
The key will be whatever was passed
to the registry, processed by the
"key mangling" function, if any.
The behavior here should be idempotent,
that is, can be called any number of times
@ -443,95 +219,3 @@ class CacheBackend:
"""
raise NotImplementedError()
class DefaultSerialization:
serializer: Union[None, Serializer] = staticmethod( # type: ignore
pickle.dumps
)
deserializer: Union[None, Deserializer] = staticmethod( # type: ignore
pickle.loads
)
class BytesBackend(DefaultSerialization, CacheBackend):
"""A cache backend that receives and returns series of bytes.
This backend only supports the "serialized" form of values; subclasses
should implement :meth:`.BytesBackend.get_serialized`,
:meth:`.BytesBackend.get_serialized_multi`,
:meth:`.BytesBackend.set_serialized`,
:meth:`.BytesBackend.set_serialized_multi`.
.. versionadded:: 1.1
"""
def get_serialized(self, key: KeyType) -> SerializedReturnType:
"""Retrieve a serialized value from the cache.
:param key: String key that was passed to the :meth:`.CacheRegion.get`
method, which will also be processed by the "key mangling" function
if one was present.
:return: a bytes object, or :data:`.NO_VALUE`
constant if not present.
.. versionadded:: 1.1
"""
raise NotImplementedError()
def get_serialized_multi(
self, keys: Sequence[KeyType]
) -> Sequence[SerializedReturnType]: # pragma NO COVERAGE
"""Retrieve multiple serialized values from the cache.
:param keys: sequence of string keys that was passed to the
:meth:`.CacheRegion.get_multi` method, which will also be processed
by the "key mangling" function if one was present.
:return: list of bytes objects
.. versionadded:: 1.1
"""
raise NotImplementedError()
def set_serialized(
self, key: KeyType, value: bytes
) -> None: # pragma NO COVERAGE
"""Set a serialized value in the cache.
:param key: String key that was passed to the :meth:`.CacheRegion.set`
method, which will also be processed by the "key mangling" function
if one was present.
:param value: a bytes object to be stored.
.. versionadded:: 1.1
"""
raise NotImplementedError()
def set_serialized_multi(
self, mapping: Mapping[KeyType, bytes]
) -> None: # pragma NO COVERAGE
"""Set multiple serialized values in the cache.
:param mapping: a dict in which the key will be whatever was passed to
the :meth:`.CacheRegion.set_multi` method, processed by the "key
mangling" function, if any.
When implementing a new :class:`.CacheBackend` or cutomizing via
:class:`.ProxyBackend`, be aware that when this method is invoked by
:meth:`.Region.get_or_create_multi`, the ``mapping`` values are the
same ones returned to the upstream caller. If the subclass alters the
values in any way, it must not do so 'in-place' on the ``mapping`` dict
-- that will have the undesirable effect of modifying the returned
values as well.
.. versionadded:: 1.1
"""
raise NotImplementedError()

View file

@ -24,11 +24,6 @@ register_backend(
"dogpile.cache.backends.memcached",
"MemcachedBackend",
)
register_backend(
"dogpile.cache.pymemcache",
"dogpile.cache.backends.memcached",
"PyMemcacheBackend",
)
register_backend(
"dogpile.cache.memory", "dogpile.cache.backends.memory", "MemoryBackend"
)
@ -40,8 +35,3 @@ register_backend(
register_backend(
"dogpile.cache.redis", "dogpile.cache.backends.redis", "RedisBackend"
)
register_backend(
"dogpile.cache.redis_sentinel",
"dogpile.cache.backends.redis",
"RedisSentinelBackend",
)

View file

@ -9,18 +9,17 @@ Provides backends that deal with local filesystem access.
from __future__ import with_statement
from contextlib import contextmanager
import dbm
import os
import threading
from ..api import BytesBackend
from ..api import CacheBackend
from ..api import NO_VALUE
from ... import util
from ...util import compat
__all__ = ["DBMBackend", "FileLock", "AbstractFileLock"]
class DBMBackend(BytesBackend):
class DBMBackend(CacheBackend):
"""A file-backend using a dbm file to store keys.
Basic usage::
@ -157,6 +156,12 @@ class DBMBackend(BytesBackend):
util.KeyReentrantMutex.factory,
)
# TODO: make this configurable
if compat.py3k:
import dbm
else:
import anydbm as dbm
self.dbmmodule = dbm
self._init_dbm_file()
def _init_lock(self, argument, suffix, basedir, basefile, wrapper=None):
@ -180,7 +185,7 @@ class DBMBackend(BytesBackend):
exists = True
break
if not exists:
fh = dbm.open(self.filename, "c")
fh = self.dbmmodule.open(self.filename, "c")
fh.close()
def get_mutex(self, key):
@ -210,50 +215,57 @@ class DBMBackend(BytesBackend):
@contextmanager
def _dbm_file(self, write):
with self._use_rw_lock(write):
with dbm.open(self.filename, "w" if write else "r") as dbm_obj:
yield dbm_obj
dbm = self.dbmmodule.open(self.filename, "w" if write else "r")
yield dbm
dbm.close()
def get_serialized(self, key):
with self._dbm_file(False) as dbm_obj:
if hasattr(dbm_obj, "get"):
value = dbm_obj.get(key, NO_VALUE)
def get(self, key):
with self._dbm_file(False) as dbm:
if hasattr(dbm, "get"):
value = dbm.get(key, NO_VALUE)
else:
# gdbm objects lack a .get method
try:
value = dbm_obj[key]
value = dbm[key]
except KeyError:
value = NO_VALUE
if value is not NO_VALUE:
value = compat.pickle.loads(value)
return value
def get_serialized_multi(self, keys):
return [self.get_serialized(key) for key in keys]
def get_multi(self, keys):
return [self.get(key) for key in keys]
def set_serialized(self, key, value):
with self._dbm_file(True) as dbm_obj:
dbm_obj[key] = value
def set(self, key, value):
with self._dbm_file(True) as dbm:
dbm[key] = compat.pickle.dumps(
value, compat.pickle.HIGHEST_PROTOCOL
)
def set_serialized_multi(self, mapping):
with self._dbm_file(True) as dbm_obj:
def set_multi(self, mapping):
with self._dbm_file(True) as dbm:
for key, value in mapping.items():
dbm_obj[key] = value
dbm[key] = compat.pickle.dumps(
value, compat.pickle.HIGHEST_PROTOCOL
)
def delete(self, key):
with self._dbm_file(True) as dbm_obj:
with self._dbm_file(True) as dbm:
try:
del dbm_obj[key]
del dbm[key]
except KeyError:
pass
def delete_multi(self, keys):
with self._dbm_file(True) as dbm_obj:
with self._dbm_file(True) as dbm:
for key in keys:
try:
del dbm_obj[key]
del dbm[key]
except KeyError:
pass
class AbstractFileLock:
class AbstractFileLock(object):
"""Coordinate read/write access to a file.
typically is a file-based lock but doesn't necessarily have to be.
@ -385,7 +397,7 @@ class FileLock(AbstractFileLock):
"""
def __init__(self, filename):
self._filedescriptor = threading.local()
self._filedescriptor = compat.threading.local()
self.filename = filename
@util.memoized_property

View file

@ -7,42 +7,29 @@ Provides backends for talking to `memcached <http://memcached.org>`_.
"""
import random
import threading
import time
import typing
from typing import Any
from typing import Mapping
import warnings
from ..api import CacheBackend
from ..api import NO_VALUE
from ... import util
if typing.TYPE_CHECKING:
import bmemcached
import memcache
import pylibmc
import pymemcache
else:
# delayed import
bmemcached = None # noqa F811
memcache = None # noqa F811
pylibmc = None # noqa F811
pymemcache = None # noqa F811
from ...util import compat
__all__ = (
"GenericMemcachedBackend",
"MemcachedBackend",
"PylibmcBackend",
"PyMemcacheBackend",
"BMemcachedBackend",
"MemcachedLock",
)
class MemcachedLock(object):
"""Simple distributed lock using memcached."""
"""Simple distributed lock using memcached.
This is an adaptation of the lock featured at
http://amix.dk/blog/post/19386
"""
def __init__(self, client_fn, key, timeout=0):
self.client_fn = client_fn
@ -58,15 +45,11 @@ class MemcachedLock(object):
elif not wait:
return False
else:
sleep_time = (((i + 1) * random.random()) + 2**i) / 2.5
sleep_time = (((i + 1) * random.random()) + 2 ** i) / 2.5
time.sleep(sleep_time)
if i < 15:
i += 1
def locked(self):
client = self.client_fn()
return client.get(self.key) is not None
def release(self):
client = self.client_fn()
client.delete(self.key)
@ -124,17 +107,10 @@ class GenericMemcachedBackend(CacheBackend):
"""
set_arguments: Mapping[str, Any] = {}
set_arguments = {}
"""Additional arguments which will be passed
to the :meth:`set` method."""
# No need to override serializer, as all the memcached libraries
# handles that themselves. Still, we support customizing the
# serializer/deserializer to use better default pickle protocol
# or completely different serialization mechanism
serializer = None
deserializer = None
def __init__(self, arguments):
self._imports()
# using a plain threading.local here. threading.local
@ -162,7 +138,7 @@ class GenericMemcachedBackend(CacheBackend):
def _clients(self):
backend = self
class ClientPool(threading.local):
class ClientPool(compat.threading.local):
def __init__(self):
self.memcached = backend._create_client()
@ -197,17 +173,12 @@ class GenericMemcachedBackend(CacheBackend):
def get_multi(self, keys):
values = self.client.get_multi(keys)
return [
NO_VALUE if val is None else val
for val in [values.get(key, NO_VALUE) for key in keys]
]
return [NO_VALUE if key not in values else values[key] for key in keys]
def set(self, key, value):
self.client.set(key, value, **self.set_arguments)
def set_multi(self, mapping):
mapping = {key: value for key, value in mapping.items()}
self.client.set_multi(mapping, **self.set_arguments)
def delete(self, key):
@ -217,9 +188,10 @@ class GenericMemcachedBackend(CacheBackend):
self.client.delete_multi(keys)
class MemcacheArgs(GenericMemcachedBackend):
class MemcacheArgs(object):
"""Mixin which provides support for the 'time' argument to set(),
'min_compress_len' to other methods.
"""
def __init__(self, arguments):
@ -235,6 +207,9 @@ class MemcacheArgs(GenericMemcachedBackend):
super(MemcacheArgs, self).__init__(arguments)
pylibmc = None
class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend):
"""A backend for the
`pylibmc <http://sendapatch.se/projects/pylibmc/index.html>`_
@ -284,6 +259,9 @@ class PylibmcBackend(MemcacheArgs, GenericMemcachedBackend):
)
memcache = None
class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend):
"""A backend using the standard
`Python-memcached <http://www.tummy.com/Community/software/\
@ -302,39 +280,17 @@ class MemcachedBackend(MemcacheArgs, GenericMemcachedBackend):
}
)
:param dead_retry: Number of seconds memcached server is considered dead
before it is tried again. Will be passed to ``memcache.Client``
as the ``dead_retry`` parameter.
.. versionchanged:: 1.1.8 Moved the ``dead_retry`` argument which was
erroneously added to "set_parameters" to
be part of the Memcached connection arguments.
:param socket_timeout: Timeout in seconds for every call to a server.
Will be passed to ``memcache.Client`` as the ``socket_timeout``
parameter.
.. versionchanged:: 1.1.8 Moved the ``socket_timeout`` argument which
was erroneously added to "set_parameters"
to be part of the Memcached connection arguments.
"""
def __init__(self, arguments):
self.dead_retry = arguments.get("dead_retry", 30)
self.socket_timeout = arguments.get("socket_timeout", 3)
super(MemcachedBackend, self).__init__(arguments)
def _imports(self):
global memcache
import memcache # noqa
def _create_client(self):
return memcache.Client(
self.url,
dead_retry=self.dead_retry,
socket_timeout=self.socket_timeout,
)
return memcache.Client(self.url)
bmemcached = None
class BMemcachedBackend(GenericMemcachedBackend):
@ -343,11 +299,9 @@ class BMemcachedBackend(GenericMemcachedBackend):
python-binary-memcached>`_
memcached client.
This is a pure Python memcached client which includes
security features like SASL and SSL/TLS.
SASL is a standard for adding authentication mechanisms
to protocols in a way that is protocol independent.
This is a pure Python memcached client which
includes the ability to authenticate with a memcached
server using SASL.
A typical configuration using username/password::
@ -363,25 +317,6 @@ class BMemcachedBackend(GenericMemcachedBackend):
}
)
A typical configuration using tls_context::
import ssl
from dogpile.cache import make_region
ctx = ssl.create_default_context(cafile="/path/to/my-ca.pem")
region = make_region().configure(
'dogpile.cache.bmemcached',
expiration_time = 3600,
arguments = {
'url':["127.0.0.1"],
'tls_context':ctx,
}
)
For advanced ways to configure TLS creating a more complex
tls_context visit https://docs.python.org/3/library/ssl.html
Arguments which can be passed to the ``arguments``
dictionary include:
@ -389,17 +324,12 @@ class BMemcachedBackend(GenericMemcachedBackend):
SASL authentication.
:param password: optional password, will be used for
SASL authentication.
:param tls_context: optional TLS context, will be used for
TLS connections.
.. versionadded:: 1.0.2
"""
def __init__(self, arguments):
self.username = arguments.get("username", None)
self.password = arguments.get("password", None)
self.tls_context = arguments.get("tls_context", None)
super(BMemcachedBackend, self).__init__(arguments)
def _imports(self):
@ -425,215 +355,10 @@ class BMemcachedBackend(GenericMemcachedBackend):
def _create_client(self):
return self.Client(
self.url,
username=self.username,
password=self.password,
tls_context=self.tls_context,
self.url, username=self.username, password=self.password
)
def delete_multi(self, keys):
"""python-binary-memcached api does not implements delete_multi"""
for key in keys:
self.delete(key)
class PyMemcacheBackend(GenericMemcachedBackend):
"""A backend for the
`pymemcache <https://github.com/pinterest/pymemcache>`_
memcached client.
A comprehensive, fast, pure Python memcached client
.. versionadded:: 1.1.2
pymemcache supports the following features:
* Complete implementation of the memcached text protocol.
* Configurable timeouts for socket connect and send/recv calls.
* Access to the "noreply" flag, which can significantly increase
the speed of writes.
* Flexible, simple approach to serialization and deserialization.
* The (optional) ability to treat network and memcached errors as
cache misses.
dogpile.cache uses the ``HashClient`` from pymemcache in order to reduce
API differences when compared to other memcached client drivers.
This allows the user to provide a single server or a list of memcached
servers.
Arguments which can be passed to the ``arguments``
dictionary include:
:param tls_context: optional TLS context, will be used for
TLS connections.
A typical configuration using tls_context::
import ssl
from dogpile.cache import make_region
ctx = ssl.create_default_context(cafile="/path/to/my-ca.pem")
region = make_region().configure(
'dogpile.cache.pymemcache',
expiration_time = 3600,
arguments = {
'url':["127.0.0.1"],
'tls_context':ctx,
}
)
.. seealso::
`<https://docs.python.org/3/library/ssl.html>`_ - additional TLS
documentation.
:param serde: optional "serde". Defaults to
``pymemcache.serde.pickle_serde``.
:param default_noreply: defaults to False. When set to True this flag
enables the pymemcache "noreply" feature. See the pymemcache
documentation for further details.
:param socket_keepalive: optional socket keepalive, will be used for
TCP keepalive configuration. Use of this parameter requires pymemcache
3.5.0 or greater. This parameter
accepts a
`pymemcache.client.base.KeepAliveOpts
<https://pymemcache.readthedocs.io/en/latest/apidoc/pymemcache.client.base.html#pymemcache.client.base.KeepaliveOpts>`_
object.
A typical configuration using ``socket_keepalive``::
from pymemcache import KeepaliveOpts
from dogpile.cache import make_region
# Using the default keepalive configuration
socket_keepalive = KeepaliveOpts()
region = make_region().configure(
'dogpile.cache.pymemcache',
expiration_time = 3600,
arguments = {
'url':["127.0.0.1"],
'socket_keepalive': socket_keepalive
}
)
.. versionadded:: 1.1.4 - added support for ``socket_keepalive``.
:param enable_retry_client: optional flag to enable retry client
mechanisms to handle failure. Defaults to False. When set to ``True``,
the :paramref:`.PyMemcacheBackend.retry_attempts` parameter must also
be set, along with optional parameters
:paramref:`.PyMemcacheBackend.retry_delay`.
:paramref:`.PyMemcacheBackend.retry_for`,
:paramref:`.PyMemcacheBackend.do_not_retry_for`.
.. seealso::
`<https://pymemcache.readthedocs.io/en/latest/getting_started.html#using-the-built-in-retrying-mechanism>`_ -
in the pymemcache documentation
.. versionadded:: 1.1.4
:param retry_attempts: how many times to attempt an action with
pymemcache's retrying wrapper before failing. Must be 1 or above.
Defaults to None.
.. versionadded:: 1.1.4
:param retry_delay: optional int|float, how many seconds to sleep between
each attempt. Used by the retry wrapper. Defaults to None.
.. versionadded:: 1.1.4
:param retry_for: optional None|tuple|set|list, what exceptions to
allow retries for. Will allow retries for all exceptions if None.
Example: ``(MemcacheClientError, MemcacheUnexpectedCloseError)``
Accepts any class that is a subclass of Exception. Defaults to None.
.. versionadded:: 1.1.4
:param do_not_retry_for: optional None|tuple|set|list, what
exceptions should be retried. Will not block retries for any Exception if
None. Example: ``(IOError, MemcacheIllegalInputError)``
Accepts any class that is a subclass of Exception. Defaults to None.
.. versionadded:: 1.1.4
:param hashclient_retry_attempts: Amount of times a client should be tried
before it is marked dead and removed from the pool in the HashClient's
internal mechanisms.
.. versionadded:: 1.1.5
:param hashclient_retry_timeout: Time in seconds that should pass between
retry attempts in the HashClient's internal mechanisms.
.. versionadded:: 1.1.5
:param dead_timeout: Time in seconds before attempting to add a node
back in the pool in the HashClient's internal mechanisms.
.. versionadded:: 1.1.5
""" # noqa E501
def __init__(self, arguments):
super().__init__(arguments)
self.serde = arguments.get("serde", pymemcache.serde.pickle_serde)
self.default_noreply = arguments.get("default_noreply", False)
self.tls_context = arguments.get("tls_context", None)
self.socket_keepalive = arguments.get("socket_keepalive", None)
self.enable_retry_client = arguments.get("enable_retry_client", False)
self.retry_attempts = arguments.get("retry_attempts", None)
self.retry_delay = arguments.get("retry_delay", None)
self.retry_for = arguments.get("retry_for", None)
self.do_not_retry_for = arguments.get("do_not_retry_for", None)
self.hashclient_retry_attempts = arguments.get(
"hashclient_retry_attempts", 2
)
self.hashclient_retry_timeout = arguments.get(
"hashclient_retry_timeout", 1
)
self.dead_timeout = arguments.get("hashclient_dead_timeout", 60)
if (
self.retry_delay is not None
or self.retry_attempts is not None
or self.retry_for is not None
or self.do_not_retry_for is not None
) and not self.enable_retry_client:
warnings.warn(
"enable_retry_client is not set; retry options "
"will be ignored"
)
def _imports(self):
global pymemcache
import pymemcache
def _create_client(self):
_kwargs = {
"serde": self.serde,
"default_noreply": self.default_noreply,
"tls_context": self.tls_context,
"retry_attempts": self.hashclient_retry_attempts,
"retry_timeout": self.hashclient_retry_timeout,
"dead_timeout": self.dead_timeout,
}
if self.socket_keepalive is not None:
_kwargs.update({"socket_keepalive": self.socket_keepalive})
client = pymemcache.client.hash.HashClient(self.url, **_kwargs)
if self.enable_retry_client:
return pymemcache.client.retrying.RetryingClient(
client,
attempts=self.retry_attempts,
retry_delay=self.retry_delay,
retry_for=self.retry_for,
do_not_retry_for=self.do_not_retry_for,
)
return client

View file

@ -10,10 +10,9 @@ places the value as given into the dictionary.
"""
from ..api import CacheBackend
from ..api import DefaultSerialization
from ..api import NO_VALUE
from ...util.compat import pickle
class MemoryBackend(CacheBackend):
@ -50,20 +49,36 @@ class MemoryBackend(CacheBackend):
"""
pickle_values = False
def __init__(self, arguments):
self._cache = arguments.pop("cache_dict", {})
def get(self, key):
return self._cache.get(key, NO_VALUE)
value = self._cache.get(key, NO_VALUE)
if value is not NO_VALUE and self.pickle_values:
value = pickle.loads(value)
return value
def get_multi(self, keys):
return [self._cache.get(key, NO_VALUE) for key in keys]
ret = [self._cache.get(key, NO_VALUE) for key in keys]
if self.pickle_values:
ret = [
pickle.loads(value) if value is not NO_VALUE else value
for value in ret
]
return ret
def set(self, key, value):
if self.pickle_values:
value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
self._cache[key] = value
def set_multi(self, mapping):
pickle_values = self.pickle_values
for key, value in mapping.items():
if pickle_values:
value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
self._cache[key] = value
def delete(self, key):
@ -74,7 +89,7 @@ class MemoryBackend(CacheBackend):
self._cache.pop(key, None)
class MemoryPickleBackend(DefaultSerialization, MemoryBackend):
class MemoryPickleBackend(MemoryBackend):
"""A backend that uses a plain dictionary, but serializes objects on
:meth:`.MemoryBackend.set` and deserializes :meth:`.MemoryBackend.get`.
@ -105,3 +120,5 @@ class MemoryPickleBackend(DefaultSerialization, MemoryBackend):
.. versionadded:: 0.5.3
"""
pickle_values = True

View file

@ -24,9 +24,6 @@ class NullLock(object):
def release(self):
pass
def locked(self):
return False
class NullBackend(CacheBackend):
"""A "null" backend that effectively disables all cache operations.

View file

@ -8,23 +8,20 @@ Provides backends for talking to `Redis <http://redis.io>`_.
from __future__ import absolute_import
import typing
import warnings
from ..api import BytesBackend
from ..api import CacheBackend
from ..api import NO_VALUE
from ...util.compat import pickle
from ...util.compat import u
if typing.TYPE_CHECKING:
import redis
else:
# delayed import
redis = None # noqa F811
redis = None
__all__ = ("RedisBackend", "RedisSentinelBackend")
__all__ = ("RedisBackend",)
class RedisBackend(BytesBackend):
r"""A `Redis <http://redis.io/>`_ backend, using the
class RedisBackend(CacheBackend):
"""A `Redis <http://redis.io/>`_ backend, using the
`redis-py <http://pypi.python.org/pypi/redis/>`_ backend.
Example configuration::
@ -49,10 +46,14 @@ class RedisBackend(BytesBackend):
:param url: string. If provided, will override separate host/port/db
params. The format is that accepted by ``StrictRedis.from_url()``.
.. versionadded:: 0.4.1
:param host: string, default is ``localhost``.
:param password: string, default is no password.
.. versionadded:: 0.4.1
:param port: integer, default is ``6379``.
:param db: integer, default is ``0``.
@ -70,31 +71,33 @@ class RedisBackend(BytesBackend):
Redis should expire it. This argument is only valid when
``distributed_lock`` is ``True``.
.. versionadded:: 0.5.0
:param socket_timeout: float, seconds for socket timeout.
Default is None (no timeout).
.. versionadded:: 0.5.4
:param lock_sleep: integer, number of seconds to sleep when failed to
acquire a lock. This argument is only valid when
``distributed_lock`` is ``True``.
.. versionadded:: 0.5.0
:param connection_pool: ``redis.ConnectionPool`` object. If provided,
this object supersedes other connection arguments passed to the
``redis.StrictRedis`` instance, including url and/or host as well as
socket_timeout, and will be passed to ``redis.StrictRedis`` as the
source of connectivity.
.. versionadded:: 0.5.4
:param thread_local_lock: bool, whether a thread-local Redis lock object
should be used. This is the default, but is not compatible with
asynchronous runners, as they run in a different thread than the one
used to create the lock.
:param connection_kwargs: dict, additional keyword arguments are passed
along to the
``StrictRedis.from_url()`` method or ``StrictRedis()`` constructor
directly, including parameters like ``ssl``, ``ssl_certfile``,
``charset``, etc.
.. versionadded:: 1.1.6 Added ``connection_kwargs`` parameter.
.. versionadded:: 0.9.1
"""
@ -106,12 +109,12 @@ class RedisBackend(BytesBackend):
self.password = arguments.pop("password", None)
self.port = arguments.pop("port", 6379)
self.db = arguments.pop("db", 0)
self.distributed_lock = arguments.pop("distributed_lock", False)
self.distributed_lock = arguments.get("distributed_lock", False)
self.socket_timeout = arguments.pop("socket_timeout", None)
self.lock_timeout = arguments.pop("lock_timeout", None)
self.lock_sleep = arguments.pop("lock_sleep", 0.1)
self.thread_local_lock = arguments.pop("thread_local_lock", True)
self.connection_kwargs = arguments.pop("connection_kwargs", {})
self.lock_timeout = arguments.get("lock_timeout", None)
self.lock_sleep = arguments.get("lock_sleep", 0.1)
self.thread_local_lock = arguments.get("thread_local_lock", True)
if self.distributed_lock and self.thread_local_lock:
warnings.warn(
@ -120,8 +123,8 @@ class RedisBackend(BytesBackend):
)
self.redis_expiration_time = arguments.pop("redis_expiration_time", 0)
self.connection_pool = arguments.pop("connection_pool", None)
self._create_client()
self.connection_pool = arguments.get("connection_pool", None)
self.client = self._create_client()
def _imports(self):
# defer imports until backend is used
@ -133,207 +136,73 @@ class RedisBackend(BytesBackend):
# the connection pool already has all other connection
# options present within, so here we disregard socket_timeout
# and others.
self.writer_client = redis.StrictRedis(
connection_pool=self.connection_pool
)
self.reader_client = self.writer_client
else:
args = {}
args.update(self.connection_kwargs)
if self.socket_timeout:
args["socket_timeout"] = self.socket_timeout
return redis.StrictRedis(connection_pool=self.connection_pool)
if self.url is not None:
args.update(url=self.url)
self.writer_client = redis.StrictRedis.from_url(**args)
self.reader_client = self.writer_client
else:
args.update(
host=self.host,
password=self.password,
port=self.port,
db=self.db,
)
self.writer_client = redis.StrictRedis(**args)
self.reader_client = self.writer_client
args = {}
if self.socket_timeout:
args["socket_timeout"] = self.socket_timeout
if self.url is not None:
args.update(url=self.url)
return redis.StrictRedis.from_url(**args)
else:
args.update(
host=self.host,
password=self.password,
port=self.port,
db=self.db,
)
return redis.StrictRedis(**args)
def get_mutex(self, key):
if self.distributed_lock:
return _RedisLockWrapper(
self.writer_client.lock(
"_lock{0}".format(key),
timeout=self.lock_timeout,
sleep=self.lock_sleep,
thread_local=self.thread_local_lock,
)
return self.client.lock(
u("_lock{0}").format(key),
timeout=self.lock_timeout,
sleep=self.lock_sleep,
thread_local=self.thread_local_lock,
)
else:
return None
def get_serialized(self, key):
value = self.reader_client.get(key)
def get(self, key):
value = self.client.get(key)
if value is None:
return NO_VALUE
return value
return pickle.loads(value)
def get_serialized_multi(self, keys):
def get_multi(self, keys):
if not keys:
return []
values = self.reader_client.mget(keys)
return [v if v is not None else NO_VALUE for v in values]
values = self.client.mget(keys)
return [pickle.loads(v) if v is not None else NO_VALUE for v in values]
def set_serialized(self, key, value):
def set(self, key, value):
if self.redis_expiration_time:
self.writer_client.setex(key, self.redis_expiration_time, value)
self.client.setex(
key,
self.redis_expiration_time,
pickle.dumps(value, pickle.HIGHEST_PROTOCOL),
)
else:
self.writer_client.set(key, value)
self.client.set(key, pickle.dumps(value, pickle.HIGHEST_PROTOCOL))
def set_multi(self, mapping):
mapping = dict(
(k, pickle.dumps(v, pickle.HIGHEST_PROTOCOL))
for k, v in mapping.items()
)
def set_serialized_multi(self, mapping):
if not self.redis_expiration_time:
self.writer_client.mset(mapping)
self.client.mset(mapping)
else:
pipe = self.writer_client.pipeline()
pipe = self.client.pipeline()
for key, value in mapping.items():
pipe.setex(key, self.redis_expiration_time, value)
pipe.execute()
def delete(self, key):
self.writer_client.delete(key)
self.client.delete(key)
def delete_multi(self, keys):
self.writer_client.delete(*keys)
class _RedisLockWrapper:
__slots__ = ("mutex", "__weakref__")
def __init__(self, mutex: typing.Any):
self.mutex = mutex
def acquire(self, wait: bool = True) -> typing.Any:
return self.mutex.acquire(blocking=wait)
def release(self) -> typing.Any:
return self.mutex.release()
def locked(self) -> bool:
return self.mutex.locked() # type: ignore
class RedisSentinelBackend(RedisBackend):
"""A `Redis <http://redis.io/>`_ backend, using the
`redis-py <http://pypi.python.org/pypi/redis/>`_ backend.
It will use the Sentinel of a Redis cluster.
.. versionadded:: 1.0.0
Example configuration::
from dogpile.cache import make_region
region = make_region().configure(
'dogpile.cache.redis_sentinel',
arguments = {
'sentinels': [
['redis_sentinel_1', 26379],
['redis_sentinel_2', 26379]
],
'db': 0,
'redis_expiration_time': 60*60*2, # 2 hours
'distributed_lock': True,
'thread_local_lock': False
}
)
Arguments accepted in the arguments dictionary:
:param db: integer, default is ``0``.
:param redis_expiration_time: integer, number of seconds after setting
a value that Redis should expire it. This should be larger than dogpile's
cache expiration. By default no expiration is set.
:param distributed_lock: boolean, when True, will use a
redis-lock as the dogpile lock. Use this when multiple processes will be
talking to the same redis instance. When False, dogpile will
coordinate on a regular threading mutex, Default is True.
:param lock_timeout: integer, number of seconds after acquiring a lock that
Redis should expire it. This argument is only valid when
``distributed_lock`` is ``True``.
:param socket_timeout: float, seconds for socket timeout.
Default is None (no timeout).
:param sentinels: is a list of sentinel nodes. Each node is represented by
a pair (hostname, port).
Default is None (not in sentinel mode).
:param service_name: str, the service name.
Default is 'mymaster'.
:param sentinel_kwargs: is a dictionary of connection arguments used when
connecting to sentinel instances. Any argument that can be passed to
a normal Redis connection can be specified here.
Default is {}.
:param connection_kwargs: dict, additional keyword arguments are passed
along to the
``StrictRedis.from_url()`` method or ``StrictRedis()`` constructor
directly, including parameters like ``ssl``, ``ssl_certfile``,
``charset``, etc.
:param lock_sleep: integer, number of seconds to sleep when failed to
acquire a lock. This argument is only valid when
``distributed_lock`` is ``True``.
:param thread_local_lock: bool, whether a thread-local Redis lock object
should be used. This is the default, but is not compatible with
asynchronous runners, as they run in a different thread than the one
used to create the lock.
"""
def __init__(self, arguments):
arguments = arguments.copy()
self.sentinels = arguments.pop("sentinels", None)
self.service_name = arguments.pop("service_name", "mymaster")
self.sentinel_kwargs = arguments.pop("sentinel_kwargs", {})
super().__init__(
arguments={
"distributed_lock": True,
"thread_local_lock": False,
**arguments,
}
)
def _imports(self):
# defer imports until backend is used
global redis
import redis.sentinel # noqa
def _create_client(self):
sentinel_kwargs = {}
sentinel_kwargs.update(self.sentinel_kwargs)
sentinel_kwargs.setdefault("password", self.password)
connection_kwargs = {}
connection_kwargs.update(self.connection_kwargs)
connection_kwargs.setdefault("password", self.password)
if self.db is not None:
connection_kwargs.setdefault("db", self.db)
sentinel_kwargs.setdefault("db", self.db)
if self.socket_timeout is not None:
connection_kwargs.setdefault("socket_timeout", self.socket_timeout)
sentinel = redis.sentinel.Sentinel(
self.sentinels,
sentinel_kwargs=sentinel_kwargs,
**connection_kwargs,
)
self.writer_client = sentinel.master_for(self.service_name)
self.reader_client = sentinel.slave_for(self.service_name)
self.client.delete(*keys)

View file

@ -10,16 +10,7 @@ base backend.
"""
from typing import Mapping
from typing import Optional
from typing import Sequence
from .api import BackendFormatted
from .api import BackendSetType
from .api import CacheBackend
from .api import CacheMutex
from .api import KeyType
from .api import SerializedReturnType
class ProxyBackend(CacheBackend):
@ -64,11 +55,11 @@ class ProxyBackend(CacheBackend):
"""
def __init__(self, *arg, **kw):
pass
def __init__(self, *args, **kwargs):
self.proxied = None
def wrap(self, backend: CacheBackend) -> "ProxyBackend":
"""Take a backend as an argument and setup the self.proxied property.
def wrap(self, backend):
""" Take a backend as an argument and setup the self.proxied property.
Return an object that be used as a backend by a :class:`.CacheRegion`
object.
"""
@ -82,37 +73,23 @@ class ProxyBackend(CacheBackend):
# Delegate any functions that are not already overridden to
# the proxies backend
#
def get(self, key: KeyType) -> BackendFormatted:
def get(self, key):
return self.proxied.get(key)
def set(self, key: KeyType, value: BackendSetType) -> None:
def set(self, key, value):
self.proxied.set(key, value)
def delete(self, key: KeyType) -> None:
def delete(self, key):
self.proxied.delete(key)
def get_multi(self, keys: Sequence[KeyType]) -> Sequence[BackendFormatted]:
def get_multi(self, keys):
return self.proxied.get_multi(keys)
def set_multi(self, mapping: Mapping[KeyType, BackendSetType]) -> None:
def set_multi(self, mapping):
self.proxied.set_multi(mapping)
def delete_multi(self, keys: Sequence[KeyType]) -> None:
def delete_multi(self, keys):
self.proxied.delete_multi(keys)
def get_mutex(self, key: KeyType) -> Optional[CacheMutex]:
def get_mutex(self, key):
return self.proxied.get_mutex(key)
def get_serialized(self, key: KeyType) -> SerializedReturnType:
return self.proxied.get_serialized(key)
def get_serialized_multi(
self, keys: Sequence[KeyType]
) -> Sequence[SerializedReturnType]:
return self.proxied.get_serialized_multi(keys)
def set_serialized(self, key: KeyType, value: bytes) -> None:
self.proxied.set_serialized(key, value)
def set_serialized_multi(self, mapping: Mapping[KeyType, bytes]) -> None:
self.proxied.set_serialized_multi(mapping)

View file

@ -4,35 +4,16 @@ import contextlib
import datetime
from functools import partial
from functools import wraps
import json
import logging
from numbers import Number
import threading
import time
from typing import Any
from typing import Callable
from typing import cast
from typing import Mapping
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import Type
from typing import Union
from decorator import decorate
from . import exception
from .api import BackendArguments
from .api import BackendFormatted
from .api import CachedValue
from .api import CacheMutex
from .api import CacheReturnType
from .api import KeyType
from .api import MetaDataType
from .api import NO_VALUE
from .api import SerializedReturnType
from .api import Serializer
from .api import ValuePayload
from .backends import _backend_loader
from .backends import register_backend # noqa
from .proxy import ProxyBackend
@ -42,11 +23,12 @@ from .util import repr_obj
from .. import Lock
from .. import NeedRegenerationException
from ..util import coerce_string_conf
from ..util import compat
from ..util import memoized_property
from ..util import NameRegistry
from ..util import PluginLoader
value_version = 2
value_version = 1
"""An integer placed in the :class:`.CachedValue`
so that new versions of dogpile.cache can detect cached
values from a previous, backwards-incompatible version.
@ -56,20 +38,7 @@ values from a previous, backwards-incompatible version.
log = logging.getLogger(__name__)
AsyncCreator = Callable[
["CacheRegion", KeyType, Callable[[], ValuePayload], CacheMutex], None
]
ExpirationTimeCallable = Callable[[], float]
ToStr = Callable[[Any], str]
FunctionKeyGenerator = Callable[..., Callable[..., KeyType]]
FunctionMultiKeyGenerator = Callable[..., Callable[..., Sequence[KeyType]]]
class RegionInvalidationStrategy:
class RegionInvalidationStrategy(object):
"""Region invalidation strategy interface
Implement this interface and pass implementation instance
@ -141,7 +110,7 @@ class RegionInvalidationStrategy:
"""
def invalidate(self, hard: bool = True) -> None:
def invalidate(self, hard=True):
"""Region invalidation.
:class:`.CacheRegion` propagated call.
@ -153,7 +122,7 @@ class RegionInvalidationStrategy:
raise NotImplementedError()
def is_hard_invalidated(self, timestamp: float) -> bool:
def is_hard_invalidated(self, timestamp):
"""Check timestamp to determine if it was hard invalidated.
:return: Boolean. True if ``timestamp`` is older than
@ -164,7 +133,7 @@ class RegionInvalidationStrategy:
raise NotImplementedError()
def is_soft_invalidated(self, timestamp: float) -> bool:
def is_soft_invalidated(self, timestamp):
"""Check timestamp to determine if it was soft invalidated.
:return: Boolean. True if ``timestamp`` is older than
@ -175,7 +144,7 @@ class RegionInvalidationStrategy:
raise NotImplementedError()
def is_invalidated(self, timestamp: float) -> bool:
def is_invalidated(self, timestamp):
"""Check timestamp to determine if it was invalidated.
:return: Boolean. True if ``timestamp`` is older than
@ -185,7 +154,7 @@ class RegionInvalidationStrategy:
raise NotImplementedError()
def was_soft_invalidated(self) -> bool:
def was_soft_invalidated(self):
"""Indicate the region was invalidated in soft mode.
:return: Boolean. True if region was invalidated in soft mode.
@ -194,7 +163,7 @@ class RegionInvalidationStrategy:
raise NotImplementedError()
def was_hard_invalidated(self) -> bool:
def was_hard_invalidated(self):
"""Indicate the region was invalidated in hard mode.
:return: Boolean. True if region was invalidated in hard mode.
@ -209,27 +178,27 @@ class DefaultInvalidationStrategy(RegionInvalidationStrategy):
self._is_hard_invalidated = None
self._invalidated = None
def invalidate(self, hard: bool = True) -> None:
def invalidate(self, hard=True):
self._is_hard_invalidated = bool(hard)
self._invalidated = time.time()
def is_invalidated(self, timestamp: float) -> bool:
def is_invalidated(self, timestamp):
return self._invalidated is not None and timestamp < self._invalidated
def was_hard_invalidated(self) -> bool:
def was_hard_invalidated(self):
return self._is_hard_invalidated is True
def is_hard_invalidated(self, timestamp: float) -> bool:
def is_hard_invalidated(self, timestamp):
return self.was_hard_invalidated() and self.is_invalidated(timestamp)
def was_soft_invalidated(self) -> bool:
def was_soft_invalidated(self):
return self._is_hard_invalidated is False
def is_soft_invalidated(self, timestamp: float) -> bool:
def is_soft_invalidated(self, timestamp):
return self.was_soft_invalidated() and self.is_invalidated(timestamp)
class CacheRegion:
class CacheRegion(object):
r"""A front end to a particular cache backend.
:param name: Optional, a string name for the region.
@ -315,21 +284,6 @@ class CacheRegion:
to convert non-string or Unicode keys to bytestrings, which is
needed when using a backend such as bsddb or dbm under Python 2.x
in conjunction with Unicode keys.
:param serializer: function which will be applied to all values before
passing to the backend. Defaults to ``None``, in which case the
serializer recommended by the backend will be used. Typical
serializers include ``pickle.dumps`` and ``json.dumps``.
.. versionadded:: 1.1.0
:param deserializer: function which will be applied to all values returned
by the backend. Defaults to ``None``, in which case the
deserializer recommended by the backend will be used. Typical
deserializers include ``pickle.dumps`` and ``json.dumps``.
.. versionadded:: 1.1.0
:param async_creation_runner: A callable that, when specified,
will be passed to and called by dogpile.lock when
there is a stale value present in the cache. It will be passed the
@ -385,37 +339,31 @@ class CacheRegion:
def __init__(
self,
name: Optional[str] = None,
function_key_generator: FunctionKeyGenerator = function_key_generator,
function_multi_key_generator: FunctionMultiKeyGenerator = function_multi_key_generator, # noqa E501
key_mangler: Optional[Callable[[KeyType], KeyType]] = None,
serializer: Optional[Callable[[ValuePayload], bytes]] = None,
deserializer: Optional[Callable[[bytes], ValuePayload]] = None,
async_creation_runner: Optional[AsyncCreator] = None,
name=None,
function_key_generator=function_key_generator,
function_multi_key_generator=function_multi_key_generator,
key_mangler=None,
async_creation_runner=None,
):
"""Construct a new :class:`.CacheRegion`."""
self.name = name
self.function_key_generator = function_key_generator
self.function_multi_key_generator = function_multi_key_generator
self.key_mangler = self._user_defined_key_mangler = key_mangler
self.serializer = self._user_defined_serializer = serializer
self.deserializer = self._user_defined_deserializer = deserializer
self.async_creation_runner = async_creation_runner
self.region_invalidator: RegionInvalidationStrategy = (
DefaultInvalidationStrategy()
)
self.region_invalidator = DefaultInvalidationStrategy()
def configure(
self,
backend: str,
expiration_time: Optional[Union[float, datetime.timedelta]] = None,
arguments: Optional[BackendArguments] = None,
_config_argument_dict: Optional[Mapping[str, Any]] = None,
_config_prefix: Optional[str] = None,
wrap: Sequence[Union[ProxyBackend, Type[ProxyBackend]]] = (),
replace_existing_backend: bool = False,
region_invalidator: Optional[RegionInvalidationStrategy] = None,
) -> "CacheRegion":
backend,
expiration_time=None,
arguments=None,
_config_argument_dict=None,
_config_prefix=None,
wrap=None,
replace_existing_backend=False,
region_invalidator=None,
):
"""Configure a :class:`.CacheRegion`.
The :class:`.CacheRegion` itself
@ -466,7 +414,7 @@ class CacheRegion:
.. versionadded:: 0.6.2
"""
"""
if "backend" in self.__dict__ and not replace_existing_backend:
raise exception.RegionAlreadyConfigured(
@ -490,12 +438,12 @@ class CacheRegion:
else:
self.backend = backend_cls(arguments or {})
self.expiration_time: Union[float, None]
if not expiration_time or isinstance(expiration_time, Number):
self.expiration_time = cast(Union[None, float], expiration_time)
self.expiration_time = expiration_time
elif isinstance(expiration_time, datetime.timedelta):
self.expiration_time = int(expiration_time.total_seconds())
self.expiration_time = int(
compat.timedelta_total_seconds(expiration_time)
)
else:
raise exception.ValidationError(
"expiration_time is not a number or timedelta."
@ -504,12 +452,6 @@ class CacheRegion:
if not self._user_defined_key_mangler:
self.key_mangler = self.backend.key_mangler
if not self._user_defined_serializer:
self.serializer = self.backend.serializer
if not self._user_defined_deserializer:
self.deserializer = self.backend.deserializer
self._lock_registry = NameRegistry(self._create_mutex)
if getattr(wrap, "__iter__", False):
@ -521,28 +463,26 @@ class CacheRegion:
return self
def wrap(self, proxy: Union[ProxyBackend, Type[ProxyBackend]]) -> None:
"""Takes a ProxyBackend instance or class and wraps the
attached backend."""
def wrap(self, proxy):
""" Takes a ProxyBackend instance or class and wraps the
attached backend. """
# if we were passed a type rather than an instance then
# initialize it.
if isinstance(proxy, type):
proxy_instance = proxy()
else:
proxy_instance = proxy
if type(proxy) == type:
proxy = proxy()
if not isinstance(proxy_instance, ProxyBackend):
if not issubclass(type(proxy), ProxyBackend):
raise TypeError(
"%r is not a valid ProxyBackend" % (proxy_instance,)
"Type %s is not a valid ProxyBackend" % type(proxy)
)
self.backend = proxy_instance.wrap(self.backend)
self.backend = proxy.wrap(self.backend)
def _mutex(self, key):
return self._lock_registry.get(key)
class _LockWrapper(CacheMutex):
class _LockWrapper(object):
"""weakref-capable wrapper for threading.Lock"""
def __init__(self):
@ -554,9 +494,6 @@ class CacheRegion:
def release(self):
self.lock.release()
def locked(self):
return self.lock.locked()
def _create_mutex(self, key):
mutex = self.backend.get_mutex(key)
if mutex is not None:
@ -762,7 +699,7 @@ class CacheRegion:
if self.key_mangler:
key = self.key_mangler(key)
value = self._get_from_backend(key)
value = self.backend.get(key)
value = self._unexpired_value_fn(expiration_time, ignore_expiration)(
value
)
@ -830,10 +767,10 @@ class CacheRegion:
if not keys:
return []
if self.key_mangler is not None:
keys = [self.key_mangler(key) for key in keys]
if self.key_mangler:
keys = list(map(lambda key: self.key_mangler(key), keys))
backend_values = self._get_multi_from_backend(keys)
backend_values = self.backend.get_multi(keys)
_unexpired_value_fn = self._unexpired_value_fn(
expiration_time, ignore_expiration
@ -868,26 +805,14 @@ class CacheRegion:
return True
def key_is_locked(self, key: KeyType) -> bool:
"""Return True if a particular cache key is currently being generated
within the dogpile lock.
.. versionadded:: 1.1.2
"""
mutex = self._mutex(key)
locked: bool = mutex.locked()
return locked
def get_or_create(
self,
key: KeyType,
creator: Callable[..., ValuePayload],
expiration_time: Optional[float] = None,
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
creator_args: Optional[Tuple[Any, Mapping[str, Any]]] = None,
) -> ValuePayload:
key,
creator,
expiration_time=None,
should_cache_fn=None,
creator_args=None,
):
"""Return a cached value based on the given key.
If the value does not exist or is considered to be expired
@ -974,17 +899,12 @@ class CacheRegion:
key = self.key_mangler(key)
def get_value():
value = self._get_from_backend(key)
value = self.backend.get(key)
if self._is_cache_miss(value, orig_key):
raise NeedRegenerationException()
ct = cast(CachedValue, value).metadata["ct"]
ct = value.metadata["ct"]
if self.region_invalidator.is_soft_invalidated(ct):
if expiration_time is None:
raise exception.DogpileCacheException(
"Non-None expiration time required "
"for soft invalidation"
)
ct = time.time() - expiration_time - 0.0001
return value.payload, ct
@ -999,42 +919,37 @@ class CacheRegion:
created_value = creator()
value = self._value(created_value)
if (
expiration_time is None
and self.region_invalidator.was_soft_invalidated()
):
raise exception.DogpileCacheException(
"Non-None expiration time required "
"for soft invalidation"
)
if not should_cache_fn or should_cache_fn(created_value):
self._set_cached_value_to_backend(key, value)
self.backend.set(key, value)
return value.payload, value.metadata["ct"]
if expiration_time is None:
expiration_time = self.expiration_time
if (
expiration_time is None
and self.region_invalidator.was_soft_invalidated()
):
raise exception.DogpileCacheException(
"Non-None expiration time required " "for soft invalidation"
)
if expiration_time == -1:
expiration_time = None
async_creator: Optional[Callable[[CacheMutex], AsyncCreator]]
if self.async_creation_runner:
acr = self.async_creation_runner
def async_creator(mutex):
if creator_args:
ca = creator_args
@wraps(creator)
def go():
return creator(*ca[0], **ca[1])
return creator(*creator_args[0], **creator_args[1])
else:
go = creator
return acr(self, orig_key, go, mutex)
return self.async_creation_runner(self, orig_key, go, mutex)
else:
async_creator = None
@ -1049,12 +964,8 @@ class CacheRegion:
return value
def get_or_create_multi(
self,
keys: Sequence[KeyType],
creator: Callable[[], ValuePayload],
expiration_time: Optional[float] = None,
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
) -> Sequence[ValuePayload]:
self, keys, creator, expiration_time=None, should_cache_fn=None
):
"""Return a sequence of cached values based on a sequence of keys.
The behavior for generation of values based on keys corresponds
@ -1109,29 +1020,34 @@ class CacheRegion:
# _has_value() will return False.
return value.payload, 0
else:
ct = cast(CachedValue, value).metadata["ct"]
ct = value.metadata["ct"]
if self.region_invalidator.is_soft_invalidated(ct):
if expiration_time is None:
raise exception.DogpileCacheException(
"Non-None expiration time required "
"for soft invalidation"
)
ct = time.time() - expiration_time - 0.0001
return value.payload, ct
def gen_value() -> ValuePayload:
def gen_value():
raise NotImplementedError()
def async_creator(mutexes, key, mutex):
def async_creator(key, mutex):
mutexes[key] = mutex
if expiration_time is None:
expiration_time = self.expiration_time
if (
expiration_time is None
and self.region_invalidator.was_soft_invalidated()
):
raise exception.DogpileCacheException(
"Non-None expiration time required " "for soft invalidation"
)
if expiration_time == -1:
expiration_time = None
mutexes = {}
sorted_unique_keys = sorted(set(keys))
if self.key_mangler:
@ -1141,11 +1057,7 @@ class CacheRegion:
orig_to_mangled = dict(zip(sorted_unique_keys, mangled_keys))
values = dict(
zip(mangled_keys, self._get_multi_from_backend(mangled_keys))
)
mutexes: Mapping[KeyType, Any] = {}
values = dict(zip(mangled_keys, self.backend.get_multi(mangled_keys)))
for orig_key, mangled_key in orig_to_mangled.items():
with Lock(
@ -1153,9 +1065,7 @@ class CacheRegion:
gen_value,
lambda: get_value(mangled_key),
expiration_time,
async_creator=lambda mutex: async_creator(
mutexes, orig_key, mutex
),
async_creator=lambda mutex: async_creator(orig_key, mutex),
):
pass
try:
@ -1167,194 +1077,59 @@ class CacheRegion:
with self._log_time(keys_to_get):
new_values = creator(*keys_to_get)
values_w_created = {
orig_to_mangled[k]: self._value(v)
values_w_created = dict(
(orig_to_mangled[k], self._value(v))
for k, v in zip(keys_to_get, new_values)
}
if (
expiration_time is None
and self.region_invalidator.was_soft_invalidated()
):
raise exception.DogpileCacheException(
"Non-None expiration time required "
"for soft invalidation"
)
)
if not should_cache_fn:
self._set_multi_cached_value_to_backend(values_w_created)
self.backend.set_multi(values_w_created)
else:
self._set_multi_cached_value_to_backend(
{
k: v
for k, v in values_w_created.items()
if should_cache_fn(v.payload)
}
values_to_cache = dict(
(k, v)
for k, v in values_w_created.items()
if should_cache_fn(v[0])
)
if values_to_cache:
self.backend.set_multi(values_to_cache)
values.update(values_w_created)
return [values[orig_to_mangled[k]].payload for k in keys]
finally:
for mutex in mutexes.values():
mutex.release()
def _value(
self, value: Any, metadata: Optional[MetaDataType] = None
) -> CachedValue:
def _value(self, value):
"""Return a :class:`.CachedValue` given a value."""
return CachedValue(value, {"ct": time.time(), "v": value_version})
if metadata is None:
metadata = self._gen_metadata()
return CachedValue(value, metadata)
def _parse_serialized_from_backend(
self, value: SerializedReturnType
) -> CacheReturnType:
if value in (None, NO_VALUE):
return NO_VALUE
assert self.deserializer
byte_value = cast(bytes, value)
bytes_metadata, _, bytes_payload = byte_value.partition(b"|")
metadata = json.loads(bytes_metadata)
payload = self.deserializer(bytes_payload)
return CachedValue(payload, metadata)
def _serialize_cached_value_elements(
self, payload: ValuePayload, metadata: MetaDataType
) -> bytes:
serializer = cast(Serializer, self.serializer)
return b"%b|%b" % (
json.dumps(metadata).encode("ascii"),
serializer(payload),
)
def _serialized_payload(
self, payload: ValuePayload, metadata: Optional[MetaDataType] = None
) -> BackendFormatted:
"""Return a backend formatted representation of a value.
If a serializer is in use then this will return a string representation
with the value formatted by the serializer.
"""
if metadata is None:
metadata = self._gen_metadata()
return self._serialize_cached_value_elements(payload, metadata)
def _serialized_cached_value(self, value: CachedValue) -> BackendFormatted:
"""Return a backend formatted representation of a :class:`.CachedValue`.
If a serializer is in use then this will return a string representation
with the value formatted by the serializer.
"""
assert self.serializer
return self._serialize_cached_value_elements(
value.payload, value.metadata
)
def _get_from_backend(self, key: KeyType) -> CacheReturnType:
if self.deserializer:
return self._parse_serialized_from_backend(
self.backend.get_serialized(key)
)
else:
return cast(CacheReturnType, self.backend.get(key))
def _get_multi_from_backend(
self, keys: Sequence[KeyType]
) -> Sequence[CacheReturnType]:
if self.deserializer:
return [
self._parse_serialized_from_backend(v)
for v in self.backend.get_serialized_multi(keys)
]
else:
return cast(
Sequence[CacheReturnType], self.backend.get_multi(keys)
)
def _set_cached_value_to_backend(
self, key: KeyType, value: CachedValue
) -> None:
if self.serializer:
self.backend.set_serialized(
key, self._serialized_cached_value(value)
)
else:
self.backend.set(key, value)
def _set_multi_cached_value_to_backend(
self, mapping: Mapping[KeyType, CachedValue]
) -> None:
if not mapping:
return
if self.serializer:
self.backend.set_serialized_multi(
{
k: self._serialized_cached_value(v)
for k, v in mapping.items()
}
)
else:
self.backend.set_multi(mapping)
def _gen_metadata(self) -> MetaDataType:
return {"ct": time.time(), "v": value_version}
def set(self, key: KeyType, value: ValuePayload) -> None:
def set(self, key, value):
"""Place a new value in the cache under the given key."""
if self.key_mangler:
key = self.key_mangler(key)
self.backend.set(key, self._value(value))
if self.serializer:
self.backend.set_serialized(key, self._serialized_payload(value))
else:
self.backend.set(key, self._value(value))
def set_multi(self, mapping):
"""Place new values in the cache under the given keys.
def set_multi(self, mapping: Mapping[KeyType, ValuePayload]) -> None:
"""Place new values in the cache under the given keys."""
.. versionadded:: 0.5.0
"""
if not mapping:
return
metadata = self._gen_metadata()
if self.serializer:
if self.key_mangler:
mapping = {
self.key_mangler(k): self._serialized_payload(
v, metadata=metadata
)
for k, v in mapping.items()
}
else:
mapping = {
k: self._serialized_payload(v, metadata=metadata)
for k, v in mapping.items()
}
self.backend.set_serialized_multi(mapping)
if self.key_mangler:
mapping = dict(
(self.key_mangler(k), self._value(v))
for k, v in mapping.items()
)
else:
if self.key_mangler:
mapping = {
self.key_mangler(k): self._value(v, metadata=metadata)
for k, v in mapping.items()
}
else:
mapping = {
k: self._value(v, metadata=metadata)
for k, v in mapping.items()
}
self.backend.set_multi(mapping)
mapping = dict((k, self._value(v)) for k, v in mapping.items())
self.backend.set_multi(mapping)
def delete(self, key: KeyType) -> None:
def delete(self, key):
"""Remove a value from the cache.
This operation is idempotent (can be called multiple times, or on a
@ -1366,7 +1141,7 @@ class CacheRegion:
self.backend.delete(key)
def delete_multi(self, keys: Sequence[KeyType]) -> None:
def delete_multi(self, keys):
"""Remove multiple values from the cache.
This operation is idempotent (can be called multiple times, or on a
@ -1377,19 +1152,18 @@ class CacheRegion:
"""
if self.key_mangler:
km = self.key_mangler
keys = [km(key) for key in keys]
keys = list(map(lambda key: self.key_mangler(key), keys))
self.backend.delete_multi(keys)
def cache_on_arguments(
self,
namespace: Optional[str] = None,
expiration_time: Union[float, ExpirationTimeCallable, None] = None,
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
to_str: Callable[[Any], str] = str,
function_key_generator: Optional[FunctionKeyGenerator] = None,
) -> Callable[[Callable[..., ValuePayload]], Callable[..., ValuePayload]]:
namespace=None,
expiration_time=None,
should_cache_fn=None,
to_str=compat.string_type,
function_key_generator=None,
):
"""A function decorator that will cache the return
value of the function using a key derived from the
function itself and its arguments.
@ -1482,7 +1256,7 @@ class CacheRegion:
(with caveats) for use with instance or class methods.
Given the example::
class MyClass:
class MyClass(object):
@region.cache_on_arguments(namespace="foo")
def one(self, a, b):
return a + b
@ -1496,12 +1270,12 @@ class CacheRegion:
name within the same module, as can occur when decorating
instance or class methods as below::
class MyClass:
class MyClass(object):
@region.cache_on_arguments(namespace='MC')
def somemethod(self, x, y):
""
class MyOtherClass:
class MyOtherClass(object):
@region.cache_on_arguments(namespace='MOC')
def somemethod(self, x, y):
""
@ -1539,8 +1313,14 @@ class CacheRegion:
end of the day, week or time period" and "cache until a certain date
or time passes".
.. versionchanged:: 0.5.0
``expiration_time`` may be passed as a callable to
:meth:`.CacheRegion.cache_on_arguments`.
:param should_cache_fn: passed to :meth:`.CacheRegion.get_or_create`.
.. versionadded:: 0.4.3
:param to_str: callable, will be called on each function argument
in order to convert to a string. Defaults to ``str()``. If the
function accepts non-ascii unicode arguments on Python 2.x, the
@ -1548,10 +1328,14 @@ class CacheRegion:
produce unicode cache keys which may require key mangling before
reaching the cache.
.. versionadded:: 0.5.0
:param function_key_generator: a function that will produce a
"cache key". This function will supersede the one configured on the
:class:`.CacheRegion` itself.
.. versionadded:: 0.5.5
.. seealso::
:meth:`.CacheRegion.cache_multi_on_arguments`
@ -1559,34 +1343,30 @@ class CacheRegion:
:meth:`.CacheRegion.get_or_create`
"""
expiration_time_is_callable = callable(expiration_time)
expiration_time_is_callable = compat.callable(expiration_time)
if function_key_generator is None:
_function_key_generator = self.function_key_generator
else:
_function_key_generator = function_key_generator
function_key_generator = self.function_key_generator
def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
key = key_generator(*arg, **kw)
timeout: Optional[float] = (
cast(ExpirationTimeCallable, expiration_time)()
timeout = (
expiration_time()
if expiration_time_is_callable
else cast(Optional[float], expiration_time)
else expiration_time
)
return self.get_or_create(
key, user_func, timeout, should_cache_fn, (arg, kw)
)
def cache_decorator(user_func):
if to_str is cast(Callable[[Any], str], str):
if to_str is compat.string_type:
# backwards compatible
key_generator = _function_key_generator(
namespace, user_func
) # type: ignore
key_generator = function_key_generator(namespace, user_func)
else:
key_generator = _function_key_generator(
namespace, user_func, to_str
key_generator = function_key_generator(
namespace, user_func, to_str=to_str
)
def refresh(*arg, **kw):
@ -1626,20 +1406,13 @@ class CacheRegion:
def cache_multi_on_arguments(
self,
namespace: Optional[str] = None,
expiration_time: Union[float, ExpirationTimeCallable, None] = None,
should_cache_fn: Optional[Callable[[ValuePayload], bool]] = None,
asdict: bool = False,
to_str: ToStr = str,
function_multi_key_generator: Optional[
FunctionMultiKeyGenerator
] = None,
) -> Callable[
[Callable[..., Sequence[ValuePayload]]],
Callable[
..., Union[Sequence[ValuePayload], Mapping[KeyType, ValuePayload]]
],
]:
namespace=None,
expiration_time=None,
should_cache_fn=None,
asdict=False,
to_str=compat.string_type,
function_multi_key_generator=None,
):
"""A function decorator that will cache multiple return
values from the function using a sequence of keys derived from the
function itself and the arguments passed to it.
@ -1756,19 +1529,12 @@ class CacheRegion:
:meth:`.CacheRegion.get_or_create_multi`
"""
expiration_time_is_callable = callable(expiration_time)
expiration_time_is_callable = compat.callable(expiration_time)
if function_multi_key_generator is None:
_function_multi_key_generator = self.function_multi_key_generator
else:
_function_multi_key_generator = function_multi_key_generator
function_multi_key_generator = self.function_multi_key_generator
def get_or_create_for_user_func(
key_generator: Callable[..., Sequence[KeyType]],
user_func: Callable[..., Sequence[ValuePayload]],
*arg: Any,
**kw: Any,
) -> Union[Sequence[ValuePayload], Mapping[KeyType, ValuePayload]]:
def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
cache_keys = arg
keys = key_generator(*arg, **kw)
key_lookup = dict(zip(keys, cache_keys))
@ -1777,16 +1543,12 @@ class CacheRegion:
def creator(*keys_to_create):
return user_func(*[key_lookup[k] for k in keys_to_create])
timeout: Optional[float] = (
cast(ExpirationTimeCallable, expiration_time)()
timeout = (
expiration_time()
if expiration_time_is_callable
else cast(Optional[float], expiration_time)
else expiration_time
)
result: Union[
Sequence[ValuePayload], Mapping[KeyType, ValuePayload]
]
if asdict:
def dict_create(*keys):
@ -1819,7 +1581,7 @@ class CacheRegion:
return result
def cache_decorator(user_func):
key_generator = _function_multi_key_generator(
key_generator = function_multi_key_generator(
namespace, user_func, to_str=to_str
)
@ -1865,7 +1627,7 @@ class CacheRegion:
return cache_decorator
def make_region(*arg: Any, **kw: Any) -> CacheRegion:
def make_region(*arg, **kw):
"""Instantiate a new :class:`.CacheRegion`.
Currently, :func:`.make_region` is a passthrough

View file

@ -4,7 +4,7 @@ from ..util import compat
from ..util import langhelpers
def function_key_generator(namespace, fn, to_str=str):
def function_key_generator(namespace, fn, to_str=compat.string_type):
"""Return a function that generates a string
key, based on a given function as well as
arguments to the returned function itself.
@ -45,7 +45,7 @@ def function_key_generator(namespace, fn, to_str=str):
return generate_key
def function_multi_key_generator(namespace, fn, to_str=str):
def function_multi_key_generator(namespace, fn, to_str=compat.string_type):
if namespace is None:
namespace = "%s:%s" % (fn.__module__, fn.__name__)
@ -68,7 +68,7 @@ def function_multi_key_generator(namespace, fn, to_str=str):
return generate_keys
def kwarg_function_key_generator(namespace, fn, to_str=str):
def kwarg_function_key_generator(namespace, fn, to_str=compat.string_type):
"""Return a function that generates a string
key, based on a given function as well as
arguments to the returned function itself.
@ -131,7 +131,7 @@ def kwarg_function_key_generator(namespace, fn, to_str=str):
def sha1_mangle_key(key):
"""a SHA1 key mangler."""
if isinstance(key, str):
if isinstance(key, compat.text_type):
key = key.encode("utf-8")
return sha1(key).hexdigest()
@ -162,7 +162,7 @@ PluginLoader = langhelpers.PluginLoader
to_list = langhelpers.to_list
class repr_obj:
class repr_obj(object):
__slots__ = ("value", "max_chars")

View file

@ -15,7 +15,7 @@ class NeedRegenerationException(Exception):
NOT_REGENERATED = object()
class Lock:
class Lock(object):
"""Dogpile lock class.
Provides an interface around an arbitrary mutex

View file

@ -1,6 +1,18 @@
import collections
import inspect
import sys
py2k = sys.version_info < (3, 0)
py3k = sys.version_info >= (3, 0)
py32 = sys.version_info >= (3, 2)
py27 = sys.version_info >= (2, 7)
jython = sys.platform.startswith("java")
win32 = sys.platform.startswith("win")
try:
import threading
except ImportError:
import dummy_threading as threading # noqa
FullArgSpec = collections.namedtuple(
"FullArgSpec",
@ -21,17 +33,7 @@ ArgSpec = collections.namedtuple(
def inspect_getfullargspec(func):
"""Fully vendored version of getfullargspec from Python 3.3.
This version is more performant than the one which appeared in
later Python 3 versions.
"""
# if a Signature is already present, as is the case with newer
# "decorator" package, defer back to built in
if hasattr(func, "__signature__"):
return inspect.getfullargspec(func)
"""Fully vendored version of getfullargspec from Python 3.3."""
if inspect.ismethod(func):
func = func.__func__
@ -44,7 +46,7 @@ def inspect_getfullargspec(func):
nargs = co.co_argcount
names = co.co_varnames
nkwargs = co.co_kwonlyargcount
nkwargs = co.co_kwonlyargcount if py3k else 0
args = list(names[:nargs])
kwonlyargs = list(names[nargs : nargs + nkwargs])
@ -63,10 +65,77 @@ def inspect_getfullargspec(func):
varkw,
func.__defaults__,
kwonlyargs,
func.__kwdefaults__,
func.__annotations__,
func.__kwdefaults__ if py3k else None,
func.__annotations__ if py3k else {},
)
def inspect_getargspec(func):
return ArgSpec(*inspect_getfullargspec(func)[0:4])
if py3k: # pragma: no cover
string_types = (str,)
text_type = str
string_type = str
if py32:
callable = callable # noqa
else:
def callable(fn): # noqa
return hasattr(fn, "__call__")
def u(s):
return s
def ue(s):
return s
import configparser
import io
import _thread as thread
else:
# Using noqa bellow due to tox -e pep8 who use
# python3.7 as the default interpreter
string_types = (basestring,) # noqa
text_type = unicode # noqa
string_type = str
def u(s):
return unicode(s, "utf-8") # noqa
def ue(s):
return unicode(s, "unicode_escape") # noqa
import ConfigParser as configparser # noqa
import StringIO as io # noqa
callable = callable # noqa
import thread # noqa
if py3k or jython:
import pickle
else:
import cPickle as pickle # noqa
if py3k:
def read_config_file(config, fileobj):
return config.read_file(fileobj)
else:
def read_config_file(config, fileobj):
return config.readfp(fileobj)
def timedelta_total_seconds(td):
if py27:
return td.total_seconds()
else:
return (
td.microseconds + (td.seconds + td.days * 24 * 3600) * 1e6
) / 1e6

View file

@ -1,17 +1,13 @@
import abc
import collections
import re
import threading
from typing import MutableMapping
from typing import MutableSet
import stevedore
from . import compat
def coerce_string_conf(d):
result = {}
for k, v in d.items():
if not isinstance(v, str):
if not isinstance(v, compat.string_types):
result[k] = v
continue
@ -29,26 +25,21 @@ def coerce_string_conf(d):
return result
class PluginLoader:
class PluginLoader(object):
def __init__(self, group):
self.group = group
self.impls = {} # loaded plugins
self._mgr = None # lazily defined stevedore manager
self._unloaded = {} # plugins registered but not loaded
self.impls = {}
def load(self, name):
if name in self._unloaded:
self.impls[name] = self._unloaded[name]()
return self.impls[name]
if name in self.impls:
return self.impls[name]
return self.impls[name]()
else: # pragma NO COVERAGE
if self._mgr is None:
self._mgr = stevedore.ExtensionManager(self.group)
try:
self.impls[name] = self._mgr[name].plugin
return self.impls[name]
except KeyError:
import pkg_resources
for impl in pkg_resources.iter_entry_points(self.group, name):
self.impls[name] = impl.load
return impl.load()
else:
raise self.NotFound(
"Can't load plugin %s %s" % (self.group, name)
)
@ -58,13 +49,13 @@ class PluginLoader:
mod = __import__(modulepath, fromlist=[objname])
return getattr(mod, objname)
self._unloaded[name] = load
self.impls[name] = load
class NotFound(Exception):
"""The specified plugin could not be found."""
class memoized_property:
class memoized_property(object):
"""A read-only @property that is only evaluated once."""
def __init__(self, fget, doc=None):
@ -89,23 +80,8 @@ def to_list(x, default=None):
return x
class Mutex(abc.ABC):
@abc.abstractmethod
def acquire(self, wait: bool = True) -> bool:
raise NotImplementedError()
@abc.abstractmethod
def release(self) -> None:
raise NotImplementedError()
class KeyReentrantMutex:
def __init__(
self,
key: str,
mutex: Mutex,
keys: MutableMapping[int, MutableSet[str]],
):
class KeyReentrantMutex(object):
def __init__(self, key, mutex, keys):
self.key = key
self.mutex = mutex
self.keys = keys
@ -115,9 +91,7 @@ class KeyReentrantMutex:
# this collection holds zero or one
# thread idents as the key; a set of
# keynames held as the value.
keystore: MutableMapping[
int, MutableSet[str]
] = collections.defaultdict(set)
keystore = collections.defaultdict(set)
def fac(key):
return KeyReentrantMutex(key, mutex, keystore)
@ -125,7 +99,7 @@ class KeyReentrantMutex:
return fac
def acquire(self, wait=True):
current_thread = threading.get_ident()
current_thread = compat.threading.current_thread().ident
keys = self.keys.get(current_thread)
if keys is not None and self.key not in keys:
# current lockholder, new key. add it in
@ -139,7 +113,7 @@ class KeyReentrantMutex:
return False
def release(self):
current_thread = threading.get_ident()
current_thread = compat.threading.current_thread().ident
keys = self.keys.get(current_thread)
assert keys is not None, "this thread didn't do the acquire"
assert self.key in keys, "No acquire held for key '%s'" % self.key
@ -149,10 +123,3 @@ class KeyReentrantMutex:
# the thread ident and unlock.
del self.keys[current_thread]
self.mutex.release()
def locked(self):
current_thread = threading.get_ident()
keys = self.keys.get(current_thread)
if keys is None:
return False
return self.key in keys

View file

@ -1,9 +1,7 @@
import threading
from typing import Any
from typing import Callable
from typing import MutableMapping
import weakref
from .compat import threading
class NameRegistry(object):
"""Generates and return an object, keeping it as a
@ -41,15 +39,19 @@ class NameRegistry(object):
"""
_locks = weakref.WeakValueDictionary()
_mutex = threading.RLock()
def __init__(self, creator: Callable[..., Any]):
"""Create a new :class:`.NameRegistry`."""
self._values: MutableMapping[str, Any] = weakref.WeakValueDictionary()
def __init__(self, creator):
"""Create a new :class:`.NameRegistry`.
"""
self._values = weakref.WeakValueDictionary()
self._mutex = threading.RLock()
self.creator = creator
def get(self, identifier: str, *args: Any, **kw: Any) -> Any:
def get(self, identifier, *args, **kw):
r"""Get and possibly create the value.
:param identifier: Hash key for the value.
@ -68,7 +70,7 @@ class NameRegistry(object):
except KeyError:
return self._sync_get(identifier, *args, **kw)
def _sync_get(self, identifier: str, *args: Any, **kw: Any) -> Any:
def _sync_get(self, identifier, *args, **kw):
self._mutex.acquire()
try:
try:

View file

@ -1,5 +1,6 @@
import logging
import threading
from .compat import threading
log = logging.getLogger(__name__)
@ -62,10 +63,10 @@ class ReadWriteMutex(object):
# check if we are the last asynchronous reader thread
# out the door.
if self.async_ == 0:
# yes. so if a sync operation is waiting, notify_all to wake
# yes. so if a sync operation is waiting, notifyAll to wake
# it up
if self.current_sync_operation is not None:
self.condition.notify_all()
self.condition.notifyAll()
elif self.async_ < 0:
raise LockError(
"Synchronizer error - too many "
@ -95,7 +96,7 @@ class ReadWriteMutex(object):
# establish ourselves as the current sync
# this indicates to other read/write operations
# that they should wait until this is None again
self.current_sync_operation = threading.current_thread()
self.current_sync_operation = threading.currentThread()
# now wait again for asyncs to finish
if self.async_ > 0:
@ -117,7 +118,7 @@ class ReadWriteMutex(object):
"""Release the 'write' lock."""
self.condition.acquire()
try:
if self.current_sync_operation is not threading.current_thread():
if self.current_sync_operation is not threading.currentThread():
raise LockError(
"Synchronizer error - current thread doesn't "
"have the write lock"
@ -128,7 +129,7 @@ class ReadWriteMutex(object):
self.current_sync_operation = None
# tell everyone to get ready
self.condition.notify_all()
self.condition.notifyAll()
log.debug("%s released write lock", self)
finally:

View file

@ -1,203 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Use a cache layer in front of entry point scanning."""
import errno
import glob
import hashlib
import itertools
import json
import logging
import os
import os.path
import struct
import sys
try:
# For python 3.8 and later
import importlib.metadata as importlib_metadata
except ImportError:
# For everyone else
import importlib_metadata
log = logging.getLogger('stevedore._cache')
def _get_cache_dir():
"""Locate a platform-appropriate cache directory to use.
Does not ensure that the cache directory exists.
"""
# Linux, Unix, AIX, etc.
if os.name == 'posix' and sys.platform != 'darwin':
# use ~/.cache if empty OR not set
base_path = os.environ.get("XDG_CACHE_HOME", None) \
or os.path.expanduser('~/.cache')
return os.path.join(base_path, 'python-entrypoints')
# Mac OS
elif sys.platform == 'darwin':
return os.path.expanduser('~/Library/Caches/Python Entry Points')
# Windows (hopefully)
else:
base_path = os.environ.get('LOCALAPPDATA', None) \
or os.path.expanduser('~\\AppData\\Local')
return os.path.join(base_path, 'Python Entry Points')
def _get_mtime(name):
try:
s = os.stat(name)
return s.st_mtime
except OSError as err:
if err.errno != errno.ENOENT:
raise
return -1.0
def _ftobytes(f):
return struct.Struct('f').pack(f)
def _hash_settings_for_path(path):
"""Return a hash and the path settings that created it."""
paths = []
h = hashlib.sha256()
# Tie the cache to the python interpreter, in case it is part of a
# virtualenv.
h.update(sys.executable.encode('utf-8'))
h.update(sys.prefix.encode('utf-8'))
for entry in path:
mtime = _get_mtime(entry)
h.update(entry.encode('utf-8'))
h.update(_ftobytes(mtime))
paths.append((entry, mtime))
for ep_file in itertools.chain(
glob.iglob(os.path.join(entry,
'*.dist-info',
'entry_points.txt')),
glob.iglob(os.path.join(entry,
'*.egg-info',
'entry_points.txt'))
):
mtime = _get_mtime(ep_file)
h.update(ep_file.encode('utf-8'))
h.update(_ftobytes(mtime))
paths.append((ep_file, mtime))
return (h.hexdigest(), paths)
def _build_cacheable_data(path):
real_groups = importlib_metadata.entry_points()
# Convert the namedtuple values to regular tuples
groups = {}
for name, group_data in real_groups.items():
existing = set()
members = []
groups[name] = members
for ep in group_data:
# Filter out duplicates that can occur when testing a
# package that provides entry points using tox, where the
# package is installed in the virtualenv that tox builds
# and is present in the path as '.'.
item = ep.name, ep.value, ep.group # convert to tuple
if item in existing:
continue
existing.add(item)
members.append(item)
return {
'groups': groups,
'sys.executable': sys.executable,
'sys.prefix': sys.prefix,
}
class Cache:
def __init__(self, cache_dir=None):
if cache_dir is None:
cache_dir = _get_cache_dir()
self._dir = cache_dir
self._internal = {}
self._disable_caching = False
# Caching can be disabled by either placing .disable file into the
# target directory or when python executable is under /tmp (this is the
# case when executed from ansible)
if any([os.path.isfile(os.path.join(self._dir, '.disable')),
sys.executable[0:4] == '/tmp']):
self._disable_caching = True
def _get_data_for_path(self, path):
if path is None:
path = sys.path
internal_key = tuple(path)
if internal_key in self._internal:
return self._internal[internal_key]
digest, path_values = _hash_settings_for_path(path)
filename = os.path.join(self._dir, digest)
try:
log.debug('reading %s', filename)
with open(filename, 'r') as f:
data = json.load(f)
except (IOError, json.JSONDecodeError):
data = _build_cacheable_data(path)
data['path_values'] = path_values
if not self._disable_caching:
try:
log.debug('writing to %s', filename)
os.makedirs(self._dir, exist_ok=True)
with open(filename, 'w') as f:
json.dump(data, f)
except (IOError, OSError):
# Could not create cache dir or write file.
pass
self._internal[internal_key] = data
return data
def get_group_all(self, group, path=None):
result = []
data = self._get_data_for_path(path)
group_data = data.get('groups', {}).get(group, [])
for vals in group_data:
result.append(importlib_metadata.EntryPoint(*vals))
return result
def get_group_named(self, group, path=None):
result = {}
for ep in self.get_group_all(group, path=path):
if ep.name not in result:
result[ep.name] = ep
return result
def get_single(self, group, name, path=None):
for name, ep in self.get_group_named(group, path=path).items():
if name == name:
return ep
raise ValueError('No entrypoint {!r} in group {!r}'.format(
group, name))
_c = Cache()
get_group_all = _c.get_group_all
get_group_named = _c.get_group_named
get_single = _c.get_single

View file

@ -142,7 +142,7 @@ class NameDispatchExtensionManager(DispatchExtensionManager):
then ignored
:type invoke_on_load: bool
:param on_load_failure_callback: Callback function that will be called when
an entrypoint can not be loaded. The arguments that will be provided
a entrypoint can not be loaded. The arguments that will be provided
when this is called (when an entrypoint fails to load) are
(manager, entrypoint, exception)
:type on_load_failure_callback: function

View file

@ -10,8 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from .exception import MultipleMatches
from .exception import NoMatches
from .exception import NoMatches, MultipleMatches
from .named import NamedExtensionManager
@ -34,7 +33,7 @@ class DriverManager(NamedExtensionManager):
is True.
:type invoke_kwds: dict
:param on_load_failure_callback: Callback function that will be called when
an entrypoint can not be loaded. The arguments that will be provided
a entrypoint can not be loaded. The arguments that will be provided
when this is called (when an entrypoint fails to load) are
(manager, entrypoint, exception)
:type on_load_failure_callback: function
@ -86,7 +85,7 @@ class DriverManager(NamedExtensionManager):
and then ignored
:type propagate_map_exceptions: bool
:param on_load_failure_callback: Callback function that will
be called when an entrypoint can not be loaded. The
be called when a entrypoint can not be loaded. The
arguments that will be provided when this is called (when
an entrypoint fails to load) are (manager, entrypoint,
exception)
@ -143,6 +142,7 @@ class DriverManager(NamedExtensionManager):
@property
def driver(self):
"""Returns the driver being used by this manager."""
"""Returns the driver being used by this manager.
"""
ext = self.extensions[0]
return ext.obj if ext.obj else ext.plugin

View file

@ -46,7 +46,7 @@ class EnabledExtensionManager(ExtensionManager):
then ignored
:type propagate_map_exceptions: bool
:param on_load_failure_callback: Callback function that will be called when
an entrypoint can not be loaded. The arguments that will be provided
a entrypoint can not be loaded. The arguments that will be provided
when this is called (when an entrypoint fails to load) are
(manager, entrypoint, exception)
:type on_load_failure_callback: function

View file

@ -1,18 +1,3 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc

View file

@ -1,17 +1,3 @@
# Copyright (C) 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from stevedore import driver

View file

@ -1,17 +1,3 @@
# Copyright (C) 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from stevedore import extension

View file

@ -1,19 +1,4 @@
# Copyright (C) 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import find_packages
from setuptools import setup
from setuptools import setup, find_packages
setup(
name='stevedore-examples',

View file

@ -1,21 +1,9 @@
# Copyright (C) 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from stevedore.example import base
class Simple(base.FormatterBase):
"""A very basic formatter."""
"""A very basic formatter.
"""
def format(self, data):
"""Format the data and return unicode text.

View file

@ -1,18 +1,3 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import textwrap
from stevedore.example import base

View file

@ -1,19 +1,4 @@
# Copyright (C) 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import find_packages
from setuptools import setup
from setuptools import setup, find_packages
setup(
name='stevedore-examples2',

View file

@ -13,10 +13,11 @@
"""ExtensionManager
"""
import logging
import operator
import pkg_resources
import logging
from . import _cache
from .exception import NoMatches
LOG = logging.getLogger(__name__)
@ -33,7 +34,7 @@ class Extension(object):
:param name: The entry point name.
:type name: str
:param entry_point: The EntryPoint instance returned by
:mod:`entrypoints`.
:mod:`pkg_resources`.
:type entry_point: EntryPoint
:param plugin: The value returned by entry_point.load()
:param obj: The object returned by ``plugin(*args, **kwds)`` if the
@ -47,38 +48,6 @@ class Extension(object):
self.plugin = plugin
self.obj = obj
@property
def module_name(self):
"""The name of the module from which the entry point is loaded.
:return: A string in 'dotted.module' format.
"""
# NOTE: importlib_metadata from PyPI includes this but the
# Python 3.8 standard library does not.
match = self.entry_point.pattern.match(self.entry_point.value)
return match.group('module')
@property
def extras(self):
"""The 'extras' settings for the plugin."""
# NOTE: The underlying package returns re.Match objects for
# some reason. Translate those to the matched strings, which
# seem more useful.
return [
# Python 3.6 returns _sre.SRE_Match objects. Later
# versions of python return re.Match objects. Both types
# have a 'string' attribute containing the text that
# matched the pattern.
getattr(e, 'string', e)
for e in self.entry_point.extras
]
@property
def attr(self):
"""The attribute of the module to be loaded."""
match = self.entry_point.pattern.match(self.entry_point.value)
return match.group('attr')
@property
def entry_point_target(self):
"""The module and attribute referenced by this extension's entry_point.
@ -86,7 +55,8 @@ class Extension(object):
:return: A string representation of the target of the entry point in
'dotted.module:object' format.
"""
return self.entry_point.value
return '%s:%s' % (self.entry_point.module_name,
self.entry_point.attrs[0])
class ExtensionManager(object):
@ -110,7 +80,7 @@ class ExtensionManager(object):
then ignored
:type propagate_map_exceptions: bool
:param on_load_failure_callback: Callback function that will be called when
an entrypoint can not be loaded. The arguments that will be provided
a entrypoint can not be loaded. The arguments that will be provided
when this is called (when an entrypoint fails to load) are
(manager, entrypoint, exception)
:type on_load_failure_callback: function
@ -156,7 +126,7 @@ class ExtensionManager(object):
are logged and then ignored
:type propagate_map_exceptions: bool
:param on_load_failure_callback: Callback function that will
be called when an entrypoint can not be loaded. The
be called when a entrypoint can not be loaded. The
arguments that will be provided when this is called (when
an entrypoint fails to load) are (manager, entrypoint,
exception)
@ -204,7 +174,7 @@ class ExtensionManager(object):
"""
if self.namespace not in self.ENTRY_POINT_CACHE:
eps = list(_cache.get_group_all(self.namespace))
eps = list(pkg_resources.iter_entry_points(self.namespace))
self.ENTRY_POINT_CACHE[self.namespace] = eps
return self.ENTRY_POINT_CACHE[self.namespace]
@ -252,7 +222,7 @@ class ExtensionManager(object):
ep.require()
plugin = ep.resolve()
else:
plugin = ep.load()
plugin = ep.load(require=verify_requirements)
if invoke_on_load:
obj = plugin(*invoke_args, **invoke_kwds)
else:
@ -331,7 +301,8 @@ class ExtensionManager(object):
LOG.exception(err)
def items(self):
"""Return an iterator of tuples of the form (name, extension).
"""
Return an iterator of tuples of the form (name, extension).
This is analogous to the Mapping.items() method.
"""
@ -355,5 +326,6 @@ class ExtensionManager(object):
return self._extensions_by_name[name]
def __contains__(self, name):
"""Return true if name is in list of enabled extensions."""
"""Return true if name is in list of enabled extensions.
"""
return any(extension.name == name for extension in self.extensions)

View file

@ -32,7 +32,7 @@ class HookManager(NamedExtensionManager):
is True.
:type invoke_kwds: dict
:param on_load_failure_callback: Callback function that will be called when
an entrypoint can not be loaded. The arguments that will be provided
a entrypoint can not be loaded. The arguments that will be provided
when this is called (when an entrypoint fails to load) are
(manager, entrypoint, exception)
:type on_load_failure_callback: function

View file

@ -46,7 +46,7 @@ class NamedExtensionManager(ExtensionManager):
then ignored
:type propagate_map_exceptions: bool
:param on_load_failure_callback: Callback function that will be called when
an entrypoint can not be loaded. The arguments that will be provided
a entrypoint can not be loaded. The arguments that will be provided
when this is called (when an entrypoint fails to load) are
(manager, entrypoint, exception)
:type on_load_failure_callback: function
@ -108,7 +108,7 @@ class NamedExtensionManager(ExtensionManager):
and then ignored
:type propagate_map_exceptions: bool
:param on_load_failure_callback: Callback function that will
be called when an entrypoint can not be loaded. The
be called when a entrypoint can not be loaded. The
arguments that will be provided when this is called (when
an entrypoint fails to load) are (manager, entrypoint,
exception)

View file

@ -34,32 +34,29 @@ def _simple_list(mgr):
doc = _get_docstring(ext.plugin) or '\n'
summary = doc.splitlines()[0].strip()
yield('* %s -- %s' % (ext.name, summary),
ext.module_name)
ext.entry_point.module_name)
def _detailed_list(mgr, over='', under='-', titlecase=False):
for name in sorted(mgr.names()):
ext = mgr[name]
if over:
yield (over * len(ext.name), ext.module_name)
yield (over * len(ext.name), ext.entry_point.module_name)
if titlecase:
yield (ext.name.title(), ext.module_name)
yield (ext.name.title(), ext.entry_point.module_name)
else:
yield (ext.name, ext.module_name)
yield (ext.name, ext.entry_point.module_name)
if under:
yield (under * len(ext.name), ext.module_name)
yield ('\n', ext.module_name)
yield (under * len(ext.name), ext.entry_point.module_name)
yield ('\n', ext.entry_point.module_name)
doc = _get_docstring(ext.plugin)
if doc:
yield (doc, ext.module_name)
yield (doc, ext.entry_point.module_name)
else:
yield (
'.. warning:: No documentation found for {} in {}'.format(
ext.name, ext.entry_point_target,
),
ext.module_name,
)
yield ('\n', ext.module_name)
yield ('.. warning:: No documentation found in %s'
% ext.entry_point,
ext.entry_point.module_name)
yield ('\n', ext.entry_point.module_name)
class ListPluginsDirective(rst.Directive):
@ -82,7 +79,7 @@ class ListPluginsDirective(rst.Directive):
underline_style = self.options.get('underline-style', '=')
def report_load_failure(mgr, ep, err):
LOG.warning(u'Failed to load %s: %s' % (ep.module, err))
LOG.warning(u'Failed to load %s: %s' % (ep.module_name, err))
mgr = extension.ExtensionManager(
namespace,

View file

@ -1,56 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for stevedore._cache
"""
import sys
from unittest import mock
from stevedore import _cache
from stevedore.tests import utils
class TestCache(utils.TestCase):
def test_disable_caching_executable(self):
"""Test caching is disabled if python interpreter is located under /tmp
directory (Ansible)
"""
with mock.patch.object(sys, 'executable', '/tmp/fake'):
sot = _cache.Cache()
self.assertTrue(sot._disable_caching)
def test_disable_caching_file(self):
"""Test caching is disabled if .disable file is present in target
dir
"""
cache_dir = _cache._get_cache_dir()
with mock.patch('os.path.isfile') as mock_path:
mock_path.return_value = True
sot = _cache.Cache()
mock_path.assert_called_with('%s/.disable' % cache_dir)
self.assertTrue(sot._disable_caching)
mock_path.return_value = False
sot = _cache.Cache()
self.assertFalse(sot._disable_caching)
@mock.patch('os.makedirs')
@mock.patch('builtins.open')
def test__get_data_for_path_no_write(self, mock_open, mock_mkdir):
sot = _cache.Cache()
sot._disable_caching = True
mock_open.side_effect = IOError
sot._get_data_for_path('fake')
mock_mkdir.assert_not_called()

View file

@ -10,8 +10,8 @@
# License for the specific language governing permissions and limitations
# under the License.
from stevedore import dispatch
from stevedore.tests import utils
from stevedore import dispatch
def check_dispatch(ep, *args, **kwds):

View file

@ -13,12 +13,7 @@
"""Tests for stevedore.extension
"""
try:
# For python 3.8 and later
import importlib.metadata as importlib_metadata
except ImportError:
# For everyone else
import importlib_metadata
import pkg_resources
from stevedore import driver
from stevedore import exception
@ -73,15 +68,13 @@ class TestCallback(utils.TestCase):
extensions = [
extension.Extension(
'backend',
importlib_metadata.EntryPoint(
'backend', 'pkg1:driver', 'backend'),
pkg_resources.EntryPoint.parse('backend = pkg1:driver'),
'pkg backend',
None,
),
extension.Extension(
'backend',
importlib_metadata.EntryPoint(
'backend', 'pkg2:driver', 'backend'),
pkg_resources.EntryPoint.parse('backend = pkg2:driver'),
'pkg backend',
None,
),

View file

@ -16,13 +16,6 @@
import operator
from unittest import mock
try:
# For python 3.8 and later
import importlib.metadata as importlib_metadata
except ImportError:
# For everyone else
import importlib_metadata
from stevedore import exception
from stevedore import extension
from stevedore.tests import utils
@ -103,13 +96,13 @@ class TestCallback(utils.TestCase):
def test_use_cache(self):
# If we insert something into the cache of entry points,
# the manager should not have to call into entrypoints
# the manager should not have to call into pkg_resources
# to find the plugins.
cache = extension.ExtensionManager.ENTRY_POINT_CACHE
cache['stevedore.test.faux'] = []
with mock.patch('stevedore._cache.get_group_all',
with mock.patch('pkg_resources.iter_entry_points',
side_effect=
AssertionError('called get_group_all')):
AssertionError('called iter_entry_points')):
em = extension.ExtensionManager('stevedore.test.faux')
names = em.names()
self.assertEqual(names, [])
@ -242,48 +235,9 @@ class TestLoadRequirementsOldSetuptools(utils.TestCase):
def test_verify_requirements(self):
self.em._load_one_plugin(self.mock_ep, False, (), {},
verify_requirements=True)
self.mock_ep.load.assert_called_once_with()
self.mock_ep.load.assert_called_once_with(require=True)
def test_no_verify_requirements(self):
self.em._load_one_plugin(self.mock_ep, False, (), {},
verify_requirements=False)
self.mock_ep.load.assert_called_once_with()
class TestExtensionProperties(utils.TestCase):
def setUp(self):
self.ext1 = extension.Extension(
'name',
importlib_metadata.EntryPoint(
'name', 'module.name:attribute.name [extra]', 'group_name',
),
mock.Mock(),
None,
)
self.ext2 = extension.Extension(
'name',
importlib_metadata.EntryPoint(
'name', 'module:attribute', 'group_name',
),
mock.Mock(),
None,
)
def test_module_name(self):
self.assertEqual('module.name', self.ext1.module_name)
self.assertEqual('module', self.ext2.module_name)
def test_extras(self):
self.assertEqual(['[extra]'], self.ext1.extras)
self.assertEqual([], self.ext2.extras)
def test_attr(self):
self.assertEqual('attribute.name', self.ext1.attr)
self.assertEqual('attribute', self.ext2.attr)
def test_entry_point_target(self):
self.assertEqual('module.name:attribute.name [extra]',
self.ext1.entry_point_target)
self.assertEqual('module:attribute',
self.ext2.entry_point_target)
self.mock_ep.load.assert_called_once_with(require=False)

View file

@ -12,26 +12,24 @@
"""Tests for the sphinx extension
"""
try:
# For python 3.8 and later
import importlib.metadata as importlib_metadata
except ImportError:
# For everyone else
import importlib_metadata
from unittest import mock
from stevedore import extension
from stevedore import sphinxext
from stevedore.tests import utils
import pkg_resources
def _make_ext(name, docstring):
def inner():
pass
inner.__doc__ = docstring
m1 = importlib_metadata.EntryPoint(
name, '{}_module:{}'.format(name, name), 'group',
)
m1 = mock.Mock(spec=pkg_resources.EntryPoint)
m1.module_name = '%s_module' % name
s = mock.Mock(return_value='ENTRY_POINT(%s)' % name)
m1.__str__ = s
return extension.Extension(name, m1, inner, None)
@ -113,8 +111,7 @@ class TestSphinxExt(utils.TestCase):
('nodoc', 'nodoc_module'),
('-----', 'nodoc_module'),
('\n', 'nodoc_module'),
(('.. warning:: No documentation found for '
'nodoc in nodoc_module:nodoc'),
('.. warning:: No documentation found in ENTRY_POINT(nodoc)',
'nodoc_module'),
('\n', 'nodoc_module'),
],

View file

@ -10,20 +10,15 @@
# License for the specific language governing permissions and limitations
# under the License.
from unittest.mock import Mock
from unittest.mock import sentinel
from unittest.mock import Mock, sentinel
from stevedore.dispatch import DispatchExtensionManager
from stevedore.dispatch import NameDispatchExtensionManager
from stevedore import (ExtensionManager, NamedExtensionManager, HookManager,
DriverManager, EnabledExtensionManager)
from stevedore.dispatch import (DispatchExtensionManager,
NameDispatchExtensionManager)
from stevedore.extension import Extension
from stevedore.tests import utils
from stevedore import DriverManager
from stevedore import EnabledExtensionManager
from stevedore import ExtensionManager
from stevedore import HookManager
from stevedore import NamedExtensionManager
test_extension = Extension('test_extension', None, None, None)
test_extension2 = Extension('another_one', None, None, None)