Update cloudinary to 1.20.0

This commit is contained in:
JonnyWong16 2020-03-21 19:11:41 -07:00
parent 1c56d9c513
commit 2984629b39
27 changed files with 2865 additions and 923 deletions

0
lib/cloudinary/cache/__init__.py vendored Normal file
View file

View file

View file

@ -0,0 +1,63 @@
from abc import ABCMeta, abstractmethod
class CacheAdapter:
"""
CacheAdapter Abstract Base Class
"""
__metaclass__ = ABCMeta
@abstractmethod
def get(self, public_id, type, resource_type, transformation, format):
"""
Gets value specified by parameters
:param public_id: The public ID of the resource
:param type: The storage type
:param resource_type: The type of the resource
:param transformation: The transformation string
:param format: The format of the resource
:return: None|mixed value, None if not found
"""
raise NotImplementedError
@abstractmethod
def set(self, public_id, type, resource_type, transformation, format, value):
"""
Sets value specified by parameters
:param public_id: The public ID of the resource
:param type: The storage type
:param resource_type: The type of the resource
:param transformation: The transformation string
:param format: The format of the resource
:param value: The value to set
:return: bool True on success or False on failure
"""
raise NotImplementedError
@abstractmethod
def delete(self, public_id, type, resource_type, transformation, format):
"""
Deletes entry specified by parameters
:param public_id: The public ID of the resource
:param type: The storage type
:param resource_type: The type of the resource
:param transformation: The transformation string
:param format: The format of the resource
:return: bool True on success or False on failure
"""
raise NotImplementedError
@abstractmethod
def flush_all(self):
"""
Flushes all entries from cache
:return: bool True on success or False on failure
"""
raise NotImplementedError

View file

@ -0,0 +1,61 @@
import json
from hashlib import sha1
from cloudinary.cache.adapter.cache_adapter import CacheAdapter
from cloudinary.cache.storage.key_value_storage import KeyValueStorage
from cloudinary.utils import check_property_enabled
class KeyValueCacheAdapter(CacheAdapter):
"""
A cache adapter for a key-value storage type
"""
def __init__(self, storage):
"""Create a new adapter for the provided storage interface"""
if not isinstance(storage, KeyValueStorage):
raise ValueError("An instance of valid KeyValueStorage must be provided")
self._key_value_storage = storage
@property
def enabled(self):
return self._key_value_storage is not None
@check_property_enabled
def get(self, public_id, type, resource_type, transformation, format):
key = self.generate_cache_key(public_id, type, resource_type, transformation, format)
value_str = self._key_value_storage.get(key)
return json.loads(value_str) if value_str else value_str
@check_property_enabled
def set(self, public_id, type, resource_type, transformation, format, value):
key = self.generate_cache_key(public_id, type, resource_type, transformation, format)
return self._key_value_storage.set(key, json.dumps(value))
@check_property_enabled
def delete(self, public_id, type, resource_type, transformation, format):
return self._key_value_storage.delete(
self.generate_cache_key(public_id, type, resource_type, transformation, format)
)
@check_property_enabled
def flush_all(self):
return self._key_value_storage.clear()
@staticmethod
def generate_cache_key(public_id, type, resource_type, transformation, format):
"""
Generates key-value storage key from parameters
:param public_id: The public ID of the resource
:param type: The storage type
:param resource_type: The type of the resource
:param transformation: The transformation string
:param format: The format of the resource
:return: Resulting cache key
"""
valid_params = [p for p in [public_id, type, resource_type, transformation, format] if p]
return sha1("/".join(valid_params).encode("utf-8")).hexdigest()

View file

@ -0,0 +1,124 @@
import copy
import collections
import cloudinary
from cloudinary.cache.adapter.cache_adapter import CacheAdapter
from cloudinary.utils import check_property_enabled
class ResponsiveBreakpointsCache:
"""
Caches breakpoint values for image resources
"""
def __init__(self, **cache_options):
"""
Initialize the cache
:param cache_options: Cache configuration options
"""
self._cache_adapter = None
cache_adapter = cache_options.get("cache_adapter")
self.set_cache_adapter(cache_adapter)
def set_cache_adapter(self, cache_adapter):
"""
Assigns cache adapter
:param cache_adapter: The cache adapter used to store and retrieve values
:return: Returns True if the cache_adapter is valid
"""
if cache_adapter is None or not isinstance(cache_adapter, CacheAdapter):
return False
self._cache_adapter = cache_adapter
return True
@property
def enabled(self):
"""
Indicates whether cache is enabled or not
:return: Rrue if a _cache_adapter has been set
"""
return self._cache_adapter is not None
@staticmethod
def _options_to_parameters(**options):
"""
Extract the parameters required in order to calculate the key of the cache.
:param options: Input options
:return: A list of values used to calculate the cache key
"""
options_copy = copy.deepcopy(options)
transformation, _ = cloudinary.utils.generate_transformation_string(**options_copy)
file_format = options.get("format", "")
storage_type = options.get("type", "upload")
resource_type = options.get("resource_type", "image")
return storage_type, resource_type, transformation, file_format
@check_property_enabled
def get(self, public_id, **options):
"""
Retrieve the breakpoints of a particular derived resource identified by the public_id and options
:param public_id: The public ID of the resource
:param options: The public ID of the resource
:return: Array of responsive breakpoints, None if not found
"""
params = self._options_to_parameters(**options)
return self._cache_adapter.get(public_id, *params)
@check_property_enabled
def set(self, public_id, value, **options):
"""
Set responsive breakpoints identified by public ID and options
:param public_id: The public ID of the resource
:param value: Array of responsive breakpoints to set
:param options: Additional options
:return: True on success or False on failure
"""
if not (isinstance(value, (list, tuple))):
raise ValueError("A list of breakpoints is expected")
storage_type, resource_type, transformation, file_format = self._options_to_parameters(**options)
return self._cache_adapter.set(public_id, storage_type, resource_type, transformation, file_format, value)
@check_property_enabled
def delete(self, public_id, **options):
"""
Delete responsive breakpoints identified by public ID and options
:param public_id: The public ID of the resource
:param options: Additional options
:return: True on success or False on failure
"""
params = self._options_to_parameters(**options)
return self._cache_adapter.delete(public_id, *params)
@check_property_enabled
def flush_all(self):
"""
Flush all entries from cache
:return: True on success or False on failure
"""
return self._cache_adapter.flush_all()
instance = ResponsiveBreakpointsCache()

View file

View file

@ -0,0 +1,79 @@
import glob
from tempfile import gettempdir
import os
import errno
from cloudinary.cache.storage.key_value_storage import KeyValueStorage
class FileSystemKeyValueStorage(KeyValueStorage):
"""File-based key-value storage"""
_item_ext = ".cldci"
def __init__(self, root_path):
"""
Create a new Storage object.
All files will be stored under the root_path location
:param root_path: The base folder for all storage files
"""
if root_path is None:
root_path = gettempdir()
if not os.path.isdir(root_path):
os.makedirs(root_path)
self._root_path = root_path
def get(self, key):
if not self._exists(key):
return None
with open(self._get_key_full_path(key), 'r') as f:
value = f.read()
return value
def set(self, key, value):
with open(self._get_key_full_path(key), 'w') as f:
f.write(value)
return True
def delete(self, key):
try:
os.remove(self._get_key_full_path(key))
except OSError as e:
if e.errno != errno.ENOENT: # errno.ENOENT - no such file or directory
raise # re-raise exception if a different error occurred
return True
def clear(self):
for cache_item_path in glob.iglob(os.path.join(self._root_path, '*' + self._item_ext)):
os.remove(cache_item_path)
return True
def _get_key_full_path(self, key):
"""
Generate the file path for the key
:param key: The key
:return: The absolute path of the value file associated with the key
"""
return os.path.join(self._root_path, key + self._item_ext)
def _exists(self, key):
"""
Indicate whether key exists
:param key: The key
:return: bool True if the file for the given key exists
"""
return os.path.isfile(self._get_key_full_path(key))

View file

@ -0,0 +1,51 @@
from abc import ABCMeta, abstractmethod
class KeyValueStorage:
"""
A simple key-value storage abstract base class
"""
__metaclass__ = ABCMeta
@abstractmethod
def get(self, key):
"""
Get a value identified by the given key
:param key: The unique identifier
:return: The value identified by key or None if no value was found
"""
raise NotImplementedError
@abstractmethod
def set(self, key, value):
"""
Store the value identified by the key
:param key: The unique identifier
:param value: Value to store
:return: bool True on success or False on failure
"""
raise NotImplementedError
@abstractmethod
def delete(self, key):
"""
Deletes item by key
:param key: The unique identifier
:return: bool True on success or False on failure
"""
raise NotImplementedError
@abstractmethod
def clear(self):
"""
Clears all entries
:return: bool True on success or False on failure
"""
raise NotImplementedError