diff --git a/lib/cloudinary/__init__.py b/lib/cloudinary/__init__.py index 19a1e716..5faf1c0b 100644 --- a/lib/cloudinary/__init__.py +++ b/lib/cloudinary/__init__.py @@ -1,29 +1,43 @@ from __future__ import absolute_import +from copy import deepcopy +import os +import re import logging +import numbers +import certifi +from math import ceil +from six import python_2_unicode_compatible + logger = logging.getLogger("Cloudinary") ch = logging.StreamHandler() formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) -import os -import re - -from six import python_2_unicode_compatible - from cloudinary import utils +from cloudinary.exceptions import GeneralError +from cloudinary.cache import responsive_breakpoints_cache +from cloudinary.http_client import HttpClient from cloudinary.compat import urlparse, parse_qs -from cloudinary.search import Search + +from platform import python_version + +CERT_KWARGS = { + 'cert_reqs': 'CERT_REQUIRED', + 'ca_certs': certifi.where(), +} CF_SHARED_CDN = "d3jpl91pxevbkh.cloudfront.net" OLD_AKAMAI_SHARED_CDN = "cloudinary-a.akamaihd.net" AKAMAI_SHARED_CDN = "res.cloudinary.com" SHARED_CDN = AKAMAI_SHARED_CDN CL_BLANK = "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" +URI_SCHEME = "cloudinary" -VERSION = "1.11.0" -USER_AGENT = "CloudinaryPython/" + VERSION +VERSION = "1.20.0" + +USER_AGENT = "CloudinaryPython/{} (Python {})".format(VERSION, python_version()) """ :const: USER_AGENT """ USER_PLATFORM = "" @@ -39,7 +53,8 @@ The format of the value should be /Version[ (comment)]. def get_user_agent(): - """Provides the `USER_AGENT` string that is passed to the Cloudinary servers. + """ + Provides the `USER_AGENT` string that is passed to the Cloudinary servers. Prepends `USER_PLATFORM` if it is defined. :returns: the user agent @@ -54,15 +69,27 @@ def get_user_agent(): def import_django_settings(): try: - import django.conf from django.core.exceptions import ImproperlyConfigured + try: - if 'CLOUDINARY' in dir(django.conf.settings): - return django.conf.settings.CLOUDINARY + from django.conf import settings as _django_settings + + # We can get a situation when Django module is installed in the system, but not initialized, + # which means we are running not in a Django process. + # In this case the following line throws ImproperlyConfigured exception + if 'cloudinary' in _django_settings.INSTALLED_APPS: + from django import get_version as _get_django_version + global USER_PLATFORM + USER_PLATFORM = "Django/{django_version}".format(django_version=_get_django_version()) + + if 'CLOUDINARY' in dir(_django_settings): + return _django_settings.CLOUDINARY else: return None + except ImproperlyConfigured: return None + except ImportError: return None @@ -78,14 +105,18 @@ class Config(object): api_key=os.environ.get("CLOUDINARY_API_KEY"), api_secret=os.environ.get("CLOUDINARY_API_SECRET"), secure_distribution=os.environ.get("CLOUDINARY_SECURE_DISTRIBUTION"), - private_cdn=os.environ.get("CLOUDINARY_PRIVATE_CDN") == 'true' + private_cdn=os.environ.get("CLOUDINARY_PRIVATE_CDN") == 'true', + api_proxy=os.environ.get("CLOUDINARY_API_PROXY"), ) elif os.environ.get("CLOUDINARY_URL"): cloudinary_url = os.environ.get("CLOUDINARY_URL") self._parse_cloudinary_url(cloudinary_url) def _parse_cloudinary_url(self, cloudinary_url): - uri = urlparse(cloudinary_url.replace("cloudinary://", "http://")) + uri = urlparse(cloudinary_url) + if not self._is_url_scheme_valid(uri): + raise ValueError("Invalid CLOUDINARY_URL scheme. Expecting to start with 'cloudinary://'") + for k, v in parse_qs(uri.query).items(): if self._is_nested_key(k): self._put_nested_key(k, v) @@ -115,7 +146,7 @@ class Config(object): def _put_nested_key(self, key, value): chain = re.split(r'[\[\]]+', key) - chain = [key for key in chain if key] + chain = [k for k in chain if k] outer = self.__dict__ last_key = chain.pop() for inner_key in chain: @@ -128,7 +159,21 @@ class Config(object): if isinstance(value, list): value = value[0] outer[last_key] = value - + + @staticmethod + def _is_url_scheme_valid(url): + """ + Helper function. Validates url scheme + + :param url: A named tuple containing URL components + + :return: bool True on success or False on failure + """ + if not url.scheme or url.scheme.lower() != URI_SCHEME: + return False + return True + + _config = Config() @@ -143,8 +188,35 @@ def reset_config(): _config = Config() +_http_client = HttpClient() + +# FIXME: circular import issue +from cloudinary.search import Search + + @python_2_unicode_compatible class CloudinaryResource(object): + """ + Recommended sources for video tag + """ + default_video_sources = [ + { + "type": "mp4", + "codecs": "hev1", + "transformations": {"video_codec": "h265"} + }, { + "type": "webm", + "codecs": "vp9", + "transformations": {"video_codec": "vp9"} + }, { + "type": "mp4", + "transformations": {"video_codec": "auto"} + }, { + "type": "webm", + "transformations": {"video_codec": "auto"} + }, + ] + def __init__(self, public_id=None, format=None, version=None, signature=None, url_options=None, metadata=None, type=None, resource_type=None, default_resource_type=None): @@ -174,9 +246,11 @@ class CloudinaryResource(object): return None prep = '' prep = prep + self.resource_type + '/' + self.type + '/' - if self.version: prep = prep + 'v' + str(self.version) + '/' + if self.version: + prep = prep + 'v' + str(self.version) + '/' prep = prep + self.public_id - if self.format: prep = prep + '.' + self.format + if self.format: + prep = prep + '.' + self.format return prep def get_presigned(self): @@ -199,28 +273,283 @@ class CloudinaryResource(object): def build_url(self, **options): return self.__build_url(**options)[0] - def default_poster_options(self, options): + @staticmethod + def default_poster_options(options): options["format"] = options.get("format", "jpg") - def default_source_types(self): + @staticmethod + def default_source_types(): return ['webm', 'mp4', 'ogv'] + @staticmethod + def _validate_srcset_data(srcset_data): + """ + Helper function. Validates srcset_data parameters + + :param srcset_data: A dictionary containing the following keys: + breakpoints A list of breakpoints. + min_width Minimal width of the srcset images + max_width Maximal width of the srcset images. + max_images Number of srcset images to generate. + + :return: bool True on success or False on failure + """ + if not all(k in srcset_data and isinstance(srcset_data[k], numbers.Number) for k in ("min_width", "max_width", + "max_images")): + logger.warning("Either valid (min_width, max_width, max_images)" + + "or breakpoints must be provided to the image srcset attribute") + return False + + if srcset_data["min_width"] > srcset_data["max_width"]: + logger.warning("min_width must be less than max_width") + return False + + if srcset_data["max_images"] <= 0: + logger.warning("max_images must be a positive integer") + return False + + return True + + def _generate_breakpoints(self, srcset_data): + """ + Helper function. Calculates static responsive breakpoints using provided parameters. + + Either the breakpoints or min_width, max_width, max_images must be provided. + + :param srcset_data: A dictionary containing the following keys: + breakpoints A list of breakpoints. + min_width Minimal width of the srcset images + max_width Maximal width of the srcset images. + max_images Number of srcset images to generate. + + :return: A list of breakpoints + + :raises ValueError: In case of invalid or missing parameters + """ + breakpoints = srcset_data.get("breakpoints", list()) + + if breakpoints: + return breakpoints + + if not self._validate_srcset_data(srcset_data): + return None + + min_width, max_width, max_images = srcset_data["min_width"], srcset_data["max_width"], srcset_data["max_images"] + + if max_images == 1: + # if user requested only 1 image in srcset, we return max_width one + min_width = max_width + + step_size = int(ceil(float(max_width - min_width) / (max_images - 1 if max_images > 1 else 1))) + + curr_breakpoint = min_width + + while curr_breakpoint < max_width: + breakpoints.append(curr_breakpoint) + curr_breakpoint += step_size + + breakpoints.append(max_width) + + return breakpoints + + def _fetch_breakpoints(self, srcset_data=None, **options): + """ + Helper function. Retrieves responsive breakpoints list from cloudinary server + + When passing special string to transformation `width` parameter of form `auto:breakpoints{parameters}:json`, + the response contains JSON with data of the responsive breakpoints + + :param srcset_data: A dictionary containing the following keys: + min_width Minimal width of the srcset images + max_width Maximal width of the srcset images + bytes_step Minimal bytes step between images + max_images Number of srcset images to generate + :param options: Additional options + + :return: Resulting breakpoints + """ + if srcset_data is None: + srcset_data = dict() + + min_width = srcset_data.get("min_width", 50) + max_width = srcset_data.get("max_width", 1000) + bytes_step = srcset_data.get("bytes_step", 20000) + max_images = srcset_data.get("max_images", 20) + transformation = srcset_data.get("transformation") + + kbytes_step = int(ceil(float(bytes_step)/1024)) + + breakpoints_width_param = "auto:breakpoints_{min_width}_{max_width}_{kbytes_step}_{max_images}:json".format( + min_width=min_width, max_width=max_width, kbytes_step=kbytes_step, max_images=max_images) + breakpoints_url = utils.cloudinary_scaled_url(self.public_id, breakpoints_width_param, transformation, options) + + return _http_client.get_json(breakpoints_url).get("breakpoints", None) + + def _get_or_generate_breakpoints(self, srcset_data, **options): + """ + Helper function. Gets from cache or calculates srcset breakpoints using provided parameters + + :param srcset_data: A dictionary containing the following keys: + breakpoints A list of breakpoints. + min_width Minimal width of the srcset images + max_width Maximal width of the srcset images + max_images Number of srcset images to generate + :param options: Additional options + + :return: Resulting breakpoints + """ + + breakpoints = srcset_data.get("breakpoints") + + if breakpoints: + return breakpoints + + if srcset_data.get("use_cache"): + breakpoints = responsive_breakpoints_cache.instance.get(self.public_id, **options) + if not breakpoints: + try: + breakpoints = self._fetch_breakpoints(srcset_data, **options) + except GeneralError as e: + logger.warning("Failed getting responsive breakpoints: {error}".format(error=e.message)) + + if breakpoints: + responsive_breakpoints_cache.instance.set(self.public_id, breakpoints, **options) + + if not breakpoints: + # Static calculation if cache is not enabled or we failed to fetch breakpoints + breakpoints = self._generate_breakpoints(srcset_data) + + return breakpoints + + def _generate_srcset_attribute(self, breakpoints, transformation=None, **options): + """ + Helper function. Generates srcset attribute value of the HTML img tag. + + :param breakpoints: A list of breakpoints. + :param transformation: Custom transformation + :param options: Additional options + + :return: Resulting srcset attribute value + + :raises ValueError: In case of invalid or missing parameters + """ + if not breakpoints: + return None + + if transformation is None: + transformation = dict() + + return ", ".join(["{0} {1}w".format(utils.cloudinary_scaled_url( + self.public_id, w, transformation, options), w) for w in breakpoints]) + + @staticmethod + def _generate_sizes_attribute(breakpoints): + """ + Helper function. Generates sizes attribute value of the HTML img tag. + + :param breakpoints: A list of breakpoints. + + :return: Resulting 'sizes' attribute value + """ + if not breakpoints: + return None + + return ", ".join("(max-width: {bp}px) {bp}px".format(bp=bp) for bp in breakpoints) + + def _generate_image_responsive_attributes(self, attributes, srcset_data, **options): + """ + Helper function. Generates srcset and sizes attributes of the image tag + + Create both srcset and sizes here to avoid fetching breakpoints twice + + :param attributes: Existing attributes + :param srcset_data: A dictionary containing the following keys: + breakpoints A list of breakpoints. + min_width Minimal width of the srcset images + max_width Maximal width of the srcset images. + max_images Number of srcset images to generate. + :param options: Additional options + + :return: The responsive attributes + """ + responsive_attributes = dict() + + if not srcset_data: + return responsive_attributes + + breakpoints = None + + if "srcset" not in attributes: + breakpoints = self._get_or_generate_breakpoints(srcset_data, **options) + transformation = srcset_data.get("transformation") + srcset_attr = self._generate_srcset_attribute(breakpoints, transformation, **options) + if srcset_attr: + responsive_attributes["srcset"] = srcset_attr + + if "sizes" not in attributes and srcset_data.get("sizes") is True: + if not breakpoints: + breakpoints = self._get_or_generate_breakpoints(srcset_data, **options) + sizes_attr = self._generate_sizes_attribute(breakpoints) + if sizes_attr: + responsive_attributes["sizes"] = sizes_attr + + return responsive_attributes + def image(self, **options): + """ + Generates HTML img tag + + :param options: Additional options + + :return: Resulting img tag + """ if options.get("resource_type", self.resource_type) == "video": self.default_poster_options(options) + + custom_attributes = options.pop("attributes", dict()) + + srcset_option = options.pop("srcset", dict()) + srcset_data = dict() + + if isinstance(srcset_option, dict): + srcset_data = config().srcset or dict() + srcset_data = srcset_data.copy() + srcset_data.update(srcset_option) + else: + if "srcset" not in custom_attributes: + custom_attributes["srcset"] = srcset_option + src, attrs = self.__build_url(**options) + client_hints = attrs.pop("client_hints", config().client_hints) responsive = attrs.pop("responsive", False) hidpi = attrs.pop("hidpi", False) + if (responsive or hidpi) and not client_hints: attrs["data-src"] = src - classes = "cld-responsive" if responsive else "cld-hidpi" - if "class" in attrs: classes += " " + attrs["class"] - attrs["class"] = classes - src = attrs.pop("responsive_placeholder", config().responsive_placeholder) - if src == "blank": src = CL_BLANK - if src: attrs["src"] = src + classes = "cld-responsive" if responsive else "cld-hidpi" + if "class" in attrs: + classes += " " + attrs["class"] + attrs["class"] = classes + + src = attrs.pop("responsive_placeholder", config().responsive_placeholder) + if src == "blank": + src = CL_BLANK + + responsive_attrs = self._generate_image_responsive_attributes(custom_attributes, srcset_data, **options) + + if responsive_attrs: + # width and height attributes override srcset behavior, they should be removed from html attributes. + for key in {"width", "height"}: + attrs.pop(key, None) + + attrs.update(responsive_attrs) + # Explicitly provided attributes override options + attrs.update(custom_attributes) + + if src: + attrs["src"] = src return u"".format(utils.html_attrs(attrs)) @@ -228,69 +557,231 @@ class CloudinaryResource(object): self.default_poster_options(options) return self.build_url(**options) - # Creates an HTML video tag for the provided +source+ - # - # ==== Options - # * source_types - Specify which source type the tag should include. defaults to webm, mp4 and ogv. - # * source_transformation - specific transformations to use for a specific source type. - # * poster - override default thumbnail: - # * url: provide an ad hoc url - # * options: with specific poster transformations and/or Cloudinary +:public_id+ - # - # ==== Examples - # CloudinaryResource("mymovie.mp4").video() - # CloudinaryResource("mymovie.mp4").video(source_types = 'webm') - # CloudinaryResource("mymovie.ogv").video(poster = "myspecialplaceholder.jpg") - # CloudinaryResource("mymovie.webm").video(source_types = ['webm', 'mp4'], poster = {'effect': 'sepia'}) - def video(self, **options): - public_id = options.get('public_id', self.public_id) - source = re.sub("\.({0})$".format("|".join(self.default_source_types())), '', public_id) + @staticmethod + def _video_mime_type(video_type, codecs=None): + """ + Helper function for video(), generates video MIME type string from video_type and codecs. + Example: video/mp4; codecs=mp4a.40.2 + :param video_type: mp4, webm, ogg etc. + :param codecs: List or string of codecs. E.g.: "avc1.42E01E" or "avc1.42E01E, mp4a.40.2" or + ["avc1.42E01E", "mp4a.40.2"] + + :return: Resulting mime type + """ + + video_type = 'ogg' if video_type == 'ogv' else video_type + + if not video_type: + return "" + + codecs_str = ", ".join(codecs) if isinstance(codecs, (list, tuple)) else codecs + codecs_attr = "; codecs={codecs_str}".format(codecs_str=codecs_str) if codecs_str else "" + + return "video/{}{}".format(video_type, codecs_attr) + + @staticmethod + def _collect_video_tag_attributes(video_options): + """ + Helper function for video tag, collects remaining options and returns them as attributes + + :param video_options: Remaining options + + :return: Resulting attributes + """ + attributes = video_options.copy() + + if 'html_width' in attributes: + attributes['width'] = attributes.pop('html_width') + if 'html_height' in attributes: + attributes['height'] = attributes.pop('html_height') + + if "poster" in attributes and not attributes["poster"]: + attributes.pop("poster", None) + + return attributes + + def _generate_video_poster_attr(self, source, video_options): + """ + Helper function for video tag, generates video poster URL + + :param source: The public ID of the resource + :param video_options: Additional options + + :return: Resulting video poster URL + """ + if 'poster' not in video_options: + return self.video_thumbnail(public_id=source, **video_options) + + poster_options = video_options['poster'] + + if not isinstance(poster_options, dict): + return poster_options + + if 'public_id' not in poster_options: + return self.video_thumbnail(public_id=source, **poster_options) + + return utils.cloudinary_url(poster_options['public_id'], **poster_options)[0] + + def _populate_video_source_tags(self, source, options): + """ + Helper function for video tag, populates source tags from provided options. + + source_types and sources are mutually exclusive, only one of them can be used. + If both are not provided, source types are used (for backwards compatibility) + + :param source: The public ID of the video + :param options: Additional options + + :return: Resulting source tags (may be empty) + """ + source_tags = [] + + # Consume all relevant options, otherwise they are left and passed as attributes + video_sources = options.pop('sources', []) source_types = options.pop('source_types', []) source_transformation = options.pop('source_transformation', {}) + + if video_sources and isinstance(video_sources, list): + # processing new source structure with codecs + for source_data in video_sources: + transformation = options.copy() + transformation.update(source_data.get("transformations", {})) + source_type = source_data.get("type", '') + src = utils.cloudinary_url(source, format=source_type, **transformation)[0] + codecs = source_data.get("codecs", []) + source_tags.append("".format( + attributes=utils.html_attrs({'src': src, 'type': self._video_mime_type(source_type, codecs)}))) + + return source_tags + + # processing old source_types structure with out codecs + if not source_types: + source_types = self.default_source_types() + + if not isinstance(source_types, (list, tuple)): + return source_tags + + for source_type in source_types: + transformation = options.copy() + transformation.update(source_transformation.get(source_type, {})) + src = utils.cloudinary_url(source, format=source_type, **transformation)[0] + source_tags.append("".format( + attributes=utils.html_attrs({'src': src, 'type': self._video_mime_type(source_type)}))) + + return source_tags + + def video(self, **options): + """ + Creates an HTML video tag for the provided +source+ + + Examples: + CloudinaryResource("mymovie.mp4").video() + CloudinaryResource("mymovie.mp4").video(source_types = 'webm') + CloudinaryResource("mymovie.ogv").video(poster = "myspecialplaceholder.jpg") + CloudinaryResource("mymovie.webm").video(source_types = ['webm', 'mp4'], poster = {'effect': 'sepia'}) + + :param options: + * source_types - Specify which source type the tag should include. + defaults to webm, mp4 and ogv. + * sources - Similar to source_types, but may contain codecs list. + source_types and sources are mutually exclusive, only one of + them can be used. If both are not provided, default source types + are used. + * source_transformation - specific transformations to use + for a specific source type. + * poster - override default thumbnail: + * url: provide an ad hoc url + * options: with specific poster transformations and/or Cloudinary +:public_id+ + + :return: Video tag + """ + public_id = options.get('public_id', self.public_id) + source = re.sub(r"\.({0})$".format("|".join(self.default_source_types())), '', public_id) + + custom_attributes = options.pop("attributes", dict()) + fallback = options.pop('fallback_content', '') - options['resource_type'] = options.pop('resource_type', self.resource_type or 'video') - if not source_types: source_types = self.default_source_types() - video_options = options.copy() + # Save source types for a single video source handling (it can be a single type) + source_types = options.get('source_types', "") - if 'poster' in video_options: - poster_options = video_options['poster'] - if isinstance(poster_options, dict): - if 'public_id' in poster_options: - video_options['poster'] = utils.cloudinary_url(poster_options['public_id'], **poster_options)[0] - else: - video_options['poster'] = self.video_thumbnail(public_id=source, **poster_options) - else: - video_options['poster'] = self.video_thumbnail(public_id=source, **options) + poster_options = options.copy() + if "poster" not in custom_attributes: + options["poster"] = self._generate_video_poster_attr(source, poster_options) - if not video_options['poster']: del video_options['poster'] + if "resource_type" not in options: + options["resource_type"] = self.resource_type or "video" - nested_source_types = isinstance(source_types, list) and len(source_types) > 1 - if not nested_source_types: + # populate video source tags + source_tags = self._populate_video_source_tags(source, options) + + if not source_tags: source = source + '.' + utils.build_array(source_types)[0] - video_url = utils.cloudinary_url(source, **video_options) - video_options = video_url[1] - if not nested_source_types: - video_options['src'] = video_url[0] - if 'html_width' in video_options: video_options['width'] = video_options.pop('html_width') - if 'html_height' in video_options: video_options['height'] = video_options.pop('html_height') + video_url, video_options = utils.cloudinary_url(source, **options) - sources = "" - if nested_source_types: - for source_type in source_types: - transformation = options.copy() - transformation.update(source_transformation.get(source_type, {})) - src = utils.cloudinary_url(source, format=source_type, **transformation)[0] - video_type = "ogg" if source_type == 'ogv' else source_type - mime_type = "video/" + video_type - sources += "".format(attributes=utils.html_attrs({'src': src, 'type': mime_type})) + if not source_tags: + custom_attributes['src'] = video_url + attributes = self._collect_video_tag_attributes(video_options) + attributes.update(custom_attributes) + + sources_str = ''.join(str(x) for x in source_tags) html = "".format( - attributes=utils.html_attrs(video_options), sources=sources, fallback=fallback) + attributes=utils.html_attrs(attributes), sources=sources_str, fallback=fallback) + return html + @staticmethod + def __generate_media_attr(**media_options): + media_query_conditions = [] + if "min_width" in media_options: + media_query_conditions.append("(min-width: {}px)".format(media_options["min_width"])) + if "max_width" in media_options: + media_query_conditions.append("(max-width: {}px)".format(media_options["max_width"])) + + return " and ".join(media_query_conditions) + + def source(self, **options): + attrs = options.get("attributes") or {} + + srcset_data = config().srcset or dict() + srcset_data = srcset_data.copy() + srcset_data.update(options.pop("srcset", dict())) + + responsive_attrs = self._generate_image_responsive_attributes(attrs, srcset_data, **options) + + attrs.update(responsive_attrs) + + # `source` tag under `picture` tag uses `srcset` attribute for both `srcset` and `src` urls + if "srcset" not in attrs: + attrs["srcset"], _ = self.__build_url(**options) + + if "media" not in attrs: + media_attr = self.__generate_media_attr(**(options.get("media", {}))) + if media_attr: + attrs["media"] = media_attr + + return u"".format(utils.html_attrs(attrs)) + + def picture(self, **options): + sub_tags = [] + sources = options.pop("sources") or list() + for source in sources: + curr_options = deepcopy(options) + + if "transformation" in source: + curr_options = utils.chain_transformations(curr_options, source["transformation"]) + + curr_options["media"] = dict((k, source[k]) for k in ['min_width', 'max_width'] if k in source) + + sub_tags.append(self.source(**curr_options)) + + sub_tags.append(self.image(**options)) + + return u"{}".format("".join(sub_tags)) + class CloudinaryImage(CloudinaryResource): def __init__(self, public_id=None, **kwargs): diff --git a/lib/cloudinary/api.py b/lib/cloudinary/api.py index ee92fa0b..5d9da408 100644 --- a/lib/cloudinary/api.py +++ b/lib/cloudinary/api.py @@ -4,28 +4,24 @@ import email.utils import json import socket -import cloudinary -from six import string_types - import urllib3 -import certifi - -from cloudinary import utils +from six import string_types from urllib3.exceptions import HTTPError +import cloudinary +from cloudinary import utils +from cloudinary.exceptions import ( + BadRequest, + AuthorizationRequired, + NotAllowed, + NotFound, + AlreadyExists, + RateLimited, + GeneralError +) + logger = cloudinary.logger -# intentionally one-liners -class Error(Exception): pass -class NotFound(Error): pass -class NotAllowed(Error): pass -class AlreadyExists(Error): pass -class RateLimited(Error): pass -class BadRequest(Error): pass -class GeneralError(Error): pass -class AuthorizationRequired(Error): pass - - EXCEPTION_CODES = { 400: BadRequest, 401: AuthorizationRequired, @@ -45,10 +41,8 @@ class Response(dict): self.rate_limit_reset_at = email.utils.parsedate(response.headers["x-featureratelimit-reset"]) self.rate_limit_remaining = int(response.headers["x-featureratelimit-remaining"]) -_http = urllib3.PoolManager( - cert_reqs='CERT_REQUIRED', - ca_certs=certifi.where() - ) + +_http = utils.get_http_connector(cloudinary.config(), cloudinary.CERT_KWARGS) def ping(**options): @@ -67,23 +61,26 @@ def resources(**options): resource_type = options.pop("resource_type", "image") upload_type = options.pop("type", None) uri = ["resources", resource_type] - if upload_type: uri.append(upload_type) - params = only(options, - "next_cursor", "max_results", "prefix", "tags", "context", "moderations", "direction", "start_at") + if upload_type: + uri.append(upload_type) + params = only(options, "next_cursor", "max_results", "prefix", "tags", + "context", "moderations", "direction", "start_at") return call_api("get", uri, params, **options) def resources_by_tag(tag, **options): resource_type = options.pop("resource_type", "image") uri = ["resources", resource_type, "tags", tag] - params = only(options, "next_cursor", "max_results", "tags", "context", "moderations", "direction") + params = only(options, "next_cursor", "max_results", "tags", + "context", "moderations", "direction") return call_api("get", uri, params, **options) def resources_by_moderation(kind, status, **options): resource_type = options.pop("resource_type", "image") uri = ["resources", resource_type, "moderations", kind, status] - params = only(options, "next_cursor", "max_results", "tags", "context", "moderations", "direction") + params = only(options, "next_cursor", "max_results", "tags", + "context", "moderations", "direction") return call_api("get", uri, params, **options) @@ -99,7 +96,8 @@ def resource(public_id, **options): resource_type = options.pop("resource_type", "image") upload_type = options.pop("type", "upload") uri = ["resources", resource_type, upload_type, public_id] - params = only(options, "exif", "faces", "colors", "image_metadata", "pages", "phash", "coordinates", "max_results") + params = only(options, "exif", "faces", "colors", "image_metadata", "cinemagraph_analysis", + "pages", "phash", "coordinates", "max_results", "quality_analysis", "derived_next_cursor") return call_api("get", uri, params, **options) @@ -114,9 +112,11 @@ def update(public_id, **options): if "tags" in options: params["tags"] = ",".join(utils.build_array(options["tags"])) if "face_coordinates" in options: - params["face_coordinates"] = utils.encode_double_array(options.get("face_coordinates")) + params["face_coordinates"] = utils.encode_double_array( + options.get("face_coordinates")) if "custom_coordinates" in options: - params["custom_coordinates"] = utils.encode_double_array(options.get("custom_coordinates")) + params["custom_coordinates"] = utils.encode_double_array( + options.get("custom_coordinates")) if "context" in options: params["context"] = utils.encode_context(options.get("context")) if "auto_tagging" in options: @@ -167,8 +167,7 @@ def delete_derived_resources(derived_resource_ids, **options): def delete_derived_by_transformation(public_ids, transformations, resource_type='image', type='upload', invalidate=None, **options): - """ - Delete derived resources of public ids, identified by transformations + """Delete derived resources of public ids, identified by transformations :param public_ids: the base resources :type public_ids: list of str @@ -202,33 +201,49 @@ def tags(**options): def transformations(**options): uri = ["transformations"] - return call_api("get", uri, only(options, "next_cursor", "max_results"), **options) + params = only(options, "named", "next_cursor", "max_results") + + return call_api("get", uri, params, **options) def transformation(transformation, **options): - uri = ["transformations", transformation_string(transformation)] - return call_api("get", uri, only(options, "next_cursor", "max_results"), **options) + uri = ["transformations"] + + params = only(options, "next_cursor", "max_results") + params["transformation"] = utils.build_single_eager(transformation) + + return call_api("get", uri, params, **options) def delete_transformation(transformation, **options): - uri = ["transformations", transformation_string(transformation)] - return call_api("delete", uri, {}, **options) + uri = ["transformations"] + + params = {"transformation": utils.build_single_eager(transformation)} + + return call_api("delete", uri, params, **options) -# updates - currently only supported update is the "allowed_for_strict" boolean flag and unsafe_update +# updates - currently only supported update is the "allowed_for_strict" +# boolean flag and unsafe_update def update_transformation(transformation, **options): - uri = ["transformations", transformation_string(transformation)] + uri = ["transformations"] + updates = only(options, "allowed_for_strict") + if "unsafe_update" in options: updates["unsafe_update"] = transformation_string(options.get("unsafe_update")) - if not updates: raise Exception("No updates given") + + updates["transformation"] = utils.build_single_eager(transformation) return call_api("put", uri, updates, **options) def create_transformation(name, definition, **options): - uri = ["transformations", name] - return call_api("post", uri, {"transformation": transformation_string(definition)}, **options) + uri = ["transformations"] + + params = {"name": name, "transformation": utils.build_single_eager(definition)} + + return call_api("post", uri, params, **options) def publish_by_ids(public_ids, **options): @@ -271,7 +286,7 @@ def update_upload_preset(name, **options): uri = ["upload_presets", name] params = utils.build_upload_params(**options) params = utils.cleanup_params(params) - params.update(only(options, "unsigned", "disallow_public_id")) + params.update(only(options, "unsigned", "disallow_public_id", "live")) return call_api("put", uri, params, **options) @@ -279,16 +294,33 @@ def create_upload_preset(**options): uri = ["upload_presets"] params = utils.build_upload_params(**options) params = utils.cleanup_params(params) - params.update(only(options, "unsigned", "disallow_public_id", "name")) + params.update(only(options, "unsigned", "disallow_public_id", "name", "live")) return call_api("post", uri, params, **options) +def create_folder(path, **options): + return call_api("post", ["folders", path], {}, **options) + + def root_folders(**options): - return call_api("get", ["folders"], {}, **options) + return call_api("get", ["folders"], only(options, "next_cursor", "max_results"), **options) def subfolders(of_folder_path, **options): - return call_api("get", ["folders", of_folder_path], {}, **options) + return call_api("get", ["folders", of_folder_path], only(options, "next_cursor", "max_results"), **options) + + +def delete_folder(path, **options): + """Deletes folder + + Deleted folder must be empty, but can have descendant empty sub folders + + :param path: The folder to delete + :param options: Additional options + + :rtype: Response + """ + return call_api("delete", ["folders", path], {}, **options) def restore(public_ids, **options): @@ -361,29 +393,48 @@ def update_streaming_profile(name, **options): def call_json_api(method, uri, jsonBody, **options): logger.debug(jsonBody) data = json.dumps(jsonBody).encode('utf-8') - return _call_api(method, uri, body=data, headers={'Content-Type': 'application/json'}, **options) + return _call_api(method, uri, body=data, + headers={'Content-Type': 'application/json'}, **options) def call_api(method, uri, params, **options): return _call_api(method, uri, params=params, **options) +def call_metadata_api(method, uri, params, **options): + """Private function that assists with performing an API call to the + metadata_fields part of the Admin API + + :param method: The HTTP method. Valid methods: get, post, put, delete + :param uri: REST endpoint of the API (without 'metadata_fields') + :param params: Query/body parameters passed to the method + :param options: Additional options + + :rtype: Response + """ + uri = ["metadata_fields"] + (uri or []) + return call_json_api(method, uri, params, **options) + + def _call_api(method, uri, params=None, body=None, headers=None, **options): prefix = options.pop("upload_prefix", cloudinary.config().upload_prefix) or "https://api.cloudinary.com" cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name) - if not cloud_name: raise Exception("Must supply cloud_name") + if not cloud_name: + raise Exception("Must supply cloud_name") api_key = options.pop("api_key", cloudinary.config().api_key) - if not api_key: raise Exception("Must supply api_key") + if not api_key: + raise Exception("Must supply api_key") api_secret = options.pop("api_secret", cloudinary.config().api_secret) - if not cloud_name: raise Exception("Must supply api_secret") + if not cloud_name: + raise Exception("Must supply api_secret") api_url = "/".join([prefix, "v1_1", cloud_name] + uri) processed_params = None if isinstance(params, dict): processed_params = {} for key, value in params.items(): - if isinstance(value, list): + if isinstance(value, list) or isinstance(value, tuple): value_list = {"{}[{}]".format(key, i): i_value for i, i_value in enumerate(value)} processed_params.update(value_list) elif value: @@ -437,12 +488,166 @@ def transformation_string(transformation): def __prepare_streaming_profile_params(**options): params = only(options, "display_name") if "representations" in options: - representations = [{"transformation": transformation_string(trans)} for trans in options["representations"]] + representations = [{"transformation": transformation_string(trans)} + for trans in options["representations"]] params["representations"] = json.dumps(representations) return params + def __delete_resource_params(options, **params): p = dict(transformations=utils.build_eager(options.get('transformations')), **only(options, "keep_original", "next_cursor", "invalidate")) p.update(params) return p + + +def list_metadata_fields(**options): + """Returns a list of all metadata field definitions + + See: `Get metadata fields API reference `_ + + :param options: Additional options + + :rtype: Response + """ + return call_metadata_api("get", [], {}, **options) + + +def metadata_field_by_field_id(field_external_id, **options): + """Gets a metadata field by external id + + See: `Get metadata field by external ID API reference + `_ + + :param field_external_id: The ID of the metadata field to retrieve + :param options: Additional options + + :rtype: Response + """ + uri = [field_external_id] + return call_metadata_api("get", uri, {}, **options) + + +def add_metadata_field(field, **options): + """Creates a new metadata field definition + + See: `Create metadata field API reference `_ + + :param field: The field to add + :param options: Additional options + + :rtype: Response + """ + params = only(field, "type", "external_id", "label", "mandatory", + "default_value", "validation", "datasource") + return call_metadata_api("post", [], params, **options) + + +def update_metadata_field(field_external_id, field, **options): + """Updates a metadata field by external id + + Updates a metadata field definition (partially, no need to pass the entire + object) passed as JSON data. + + See `Generic structure of a metadata field + `_ for details. + + :param field_external_id: The id of the metadata field to update + :param field: The field definition + :param options: Additional options + + :rtype: Response + """ + uri = [field_external_id] + params = only(field, "label", "mandatory", "default_value", "validation") + return call_metadata_api("put", uri, params, **options) + + +def delete_metadata_field(field_external_id, **options): + """Deletes a metadata field definition. + The field should no longer be considered a valid candidate for all other endpoints + + See: `Delete metadata field API reference + `_ + + :param field_external_id: The external id of the field to delete + :param options: Additional options + + :return: An array with a "message" key. "ok" value indicates a successful deletion. + :rtype: Response + """ + uri = [field_external_id] + return call_metadata_api("delete", uri, {}, **options) + + +def delete_datasource_entries(field_external_id, entries_external_id, **options): + """Deletes entries in a metadata field datasource + + Deletes (blocks) the datasource entries for a specified metadata field + definition. Sets the state of the entries to inactive. This is a soft delete, + the entries still exist under the hood and can be activated again with the + restore datasource entries method. + + See: `Delete entries in a metadata field datasource API reference + `_ + + :param field_external_id: The id of the field to update + :param entries_external_id: The ids of all the entries to delete from the + datasource + :param options: Additional options + + :rtype: Response + """ + uri = [field_external_id, "datasource"] + params = {"external_ids": entries_external_id} + return call_metadata_api("delete", uri, params, **options) + + +def update_metadata_field_datasource(field_external_id, entries_external_id, **options): + """Updates a metadata field datasource + + Updates the datasource of a supported field type (currently only enum and set), + passed as JSON data. The update is partial: datasource entries with an + existing external_id will be updated and entries with new external_id's (or + without external_id's) will be appended. + + See: `Update a metadata field datasource API reference + `_ + + :param field_external_id: The external id of the field to update + :param entries_external_id: + :param options: Additional options + + :rtype: Response + """ + values = [] + for item in entries_external_id: + external = only(item, "external_id", "value") + if external: + values.append(external) + + uri = [field_external_id, "datasource"] + params = {"values": values} + return call_metadata_api("put", uri, params, **options) + + +def restore_metadata_field_datasource(field_external_id, entries_external_ids, **options): + """Restores entries in a metadata field datasource + + Restores (unblocks) any previously deleted datasource entries for a specified + metadata field definition. + Sets the state of the entries to active. + + See: `Restore entries in a metadata field datasource API reference + `_ + + :param field_external_id: The ID of the metadata field + :param entries_external_ids: An array of IDs of datasource entries to restore + (unblock) + :param options: Additional options + + :rtype: Response + """ + uri = [field_external_id, 'datasource_restore'] + params = {"external_ids": entries_external_ids} + return call_metadata_api("post", uri, params, **options) diff --git a/lib/cloudinary/auth_token.py b/lib/cloudinary/auth_token.py index 72fc341e..6ef3874c 100644 --- a/lib/cloudinary/auth_token.py +++ b/lib/cloudinary/auth_token.py @@ -3,33 +3,37 @@ import hmac import re import time from binascii import a2b_hex -from cloudinary.compat import quote_plus + AUTH_TOKEN_NAME = "__cld_token__" +AUTH_TOKEN_SEPARATOR = "~" +AUTH_TOKEN_UNSAFE_RE = r'([ "#%&\'\/:;<=>?@\[\\\]^`{\|}~]+)' - -def generate(url=None, acl=None, start_time=None, duration=None, expiration=None, ip=None, key=None, - token_name=AUTH_TOKEN_NAME): +def generate(url=None, acl=None, start_time=None, duration=None, + expiration=None, ip=None, key=None, token_name=AUTH_TOKEN_NAME): if expiration is None: if duration is not None: - start = start_time if start_time is not None else int(time.mktime(time.gmtime())) + start = start_time if start_time is not None else int(time.time()) expiration = start + duration else: raise Exception("Must provide either expiration or duration") token_parts = [] - if ip is not None: token_parts.append("ip=" + ip) - if start_time is not None: token_parts.append("st=%d" % start_time) + if ip is not None: + token_parts.append("ip=" + ip) + if start_time is not None: + token_parts.append("st=%d" % start_time) token_parts.append("exp=%d" % expiration) - if acl is not None: token_parts.append("acl=%s" % _escape_to_lower(acl)) + if acl is not None: + token_parts.append("acl=%s" % _escape_to_lower(acl)) to_sign = list(token_parts) - if url is not None: + if url is not None and acl is None: to_sign.append("url=%s" % _escape_to_lower(url)) - auth = _digest("~".join(to_sign), key) + auth = _digest(AUTH_TOKEN_SEPARATOR.join(to_sign), key) token_parts.append("hmac=%s" % auth) - return "%(token_name)s=%(token)s" % {"token_name": token_name, "token": "~".join(token_parts)} + return "%(token_name)s=%(token)s" % {"token_name": token_name, "token": AUTH_TOKEN_SEPARATOR.join(token_parts)} def _digest(message, key): @@ -38,10 +42,8 @@ def _digest(message, key): def _escape_to_lower(url): - escaped_url = quote_plus(url) - - def toLowercase(match): - return match.group(0).lower() - - escaped_url = re.sub(r'%..', toLowercase, escaped_url) + # There is a circular import issue in this file, need to resolve it in the next major release + from cloudinary.utils import smart_escape + escaped_url = smart_escape(url, unsafe=AUTH_TOKEN_UNSAFE_RE) + escaped_url = re.sub(r"%[0-9A-F]{2}", lambda x: x.group(0).lower(), escaped_url) return escaped_url diff --git a/lib/cloudinary/cache/__init__.py b/lib/cloudinary/cache/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lib/cloudinary/cache/adapter/__init__.py b/lib/cloudinary/cache/adapter/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lib/cloudinary/cache/adapter/cache_adapter.py b/lib/cloudinary/cache/adapter/cache_adapter.py new file mode 100644 index 00000000..a8907d7c --- /dev/null +++ b/lib/cloudinary/cache/adapter/cache_adapter.py @@ -0,0 +1,63 @@ +from abc import ABCMeta, abstractmethod + + +class CacheAdapter: + """ + CacheAdapter Abstract Base Class + """ + __metaclass__ = ABCMeta + + @abstractmethod + def get(self, public_id, type, resource_type, transformation, format): + """ + Gets value specified by parameters + + :param public_id: The public ID of the resource + :param type: The storage type + :param resource_type: The type of the resource + :param transformation: The transformation string + :param format: The format of the resource + + :return: None|mixed value, None if not found + """ + raise NotImplementedError + + @abstractmethod + def set(self, public_id, type, resource_type, transformation, format, value): + """ + Sets value specified by parameters + + :param public_id: The public ID of the resource + :param type: The storage type + :param resource_type: The type of the resource + :param transformation: The transformation string + :param format: The format of the resource + :param value: The value to set + + :return: bool True on success or False on failure + """ + raise NotImplementedError + + @abstractmethod + def delete(self, public_id, type, resource_type, transformation, format): + """ + Deletes entry specified by parameters + + :param public_id: The public ID of the resource + :param type: The storage type + :param resource_type: The type of the resource + :param transformation: The transformation string + :param format: The format of the resource + + :return: bool True on success or False on failure + """ + raise NotImplementedError + + @abstractmethod + def flush_all(self): + """ + Flushes all entries from cache + + :return: bool True on success or False on failure + """ + raise NotImplementedError diff --git a/lib/cloudinary/cache/adapter/key_value_cache_adapter.py b/lib/cloudinary/cache/adapter/key_value_cache_adapter.py new file mode 100644 index 00000000..5b74bf07 --- /dev/null +++ b/lib/cloudinary/cache/adapter/key_value_cache_adapter.py @@ -0,0 +1,61 @@ +import json +from hashlib import sha1 + +from cloudinary.cache.adapter.cache_adapter import CacheAdapter +from cloudinary.cache.storage.key_value_storage import KeyValueStorage +from cloudinary.utils import check_property_enabled + + +class KeyValueCacheAdapter(CacheAdapter): + """ + A cache adapter for a key-value storage type + """ + def __init__(self, storage): + """Create a new adapter for the provided storage interface""" + if not isinstance(storage, KeyValueStorage): + raise ValueError("An instance of valid KeyValueStorage must be provided") + + self._key_value_storage = storage + + @property + def enabled(self): + return self._key_value_storage is not None + + @check_property_enabled + def get(self, public_id, type, resource_type, transformation, format): + key = self.generate_cache_key(public_id, type, resource_type, transformation, format) + value_str = self._key_value_storage.get(key) + return json.loads(value_str) if value_str else value_str + + @check_property_enabled + def set(self, public_id, type, resource_type, transformation, format, value): + key = self.generate_cache_key(public_id, type, resource_type, transformation, format) + return self._key_value_storage.set(key, json.dumps(value)) + + @check_property_enabled + def delete(self, public_id, type, resource_type, transformation, format): + return self._key_value_storage.delete( + self.generate_cache_key(public_id, type, resource_type, transformation, format) + ) + + @check_property_enabled + def flush_all(self): + return self._key_value_storage.clear() + + @staticmethod + def generate_cache_key(public_id, type, resource_type, transformation, format): + """ + Generates key-value storage key from parameters + + :param public_id: The public ID of the resource + :param type: The storage type + :param resource_type: The type of the resource + :param transformation: The transformation string + :param format: The format of the resource + + :return: Resulting cache key + """ + + valid_params = [p for p in [public_id, type, resource_type, transformation, format] if p] + + return sha1("/".join(valid_params).encode("utf-8")).hexdigest() diff --git a/lib/cloudinary/cache/responsive_breakpoints_cache.py b/lib/cloudinary/cache/responsive_breakpoints_cache.py new file mode 100644 index 00000000..ac2c2d57 --- /dev/null +++ b/lib/cloudinary/cache/responsive_breakpoints_cache.py @@ -0,0 +1,124 @@ +import copy + +import collections + +import cloudinary +from cloudinary.cache.adapter.cache_adapter import CacheAdapter +from cloudinary.utils import check_property_enabled + + +class ResponsiveBreakpointsCache: + """ + Caches breakpoint values for image resources + """ + def __init__(self, **cache_options): + """ + Initialize the cache + + :param cache_options: Cache configuration options + """ + + self._cache_adapter = None + + cache_adapter = cache_options.get("cache_adapter") + + self.set_cache_adapter(cache_adapter) + + def set_cache_adapter(self, cache_adapter): + """ + Assigns cache adapter + + :param cache_adapter: The cache adapter used to store and retrieve values + + :return: Returns True if the cache_adapter is valid + """ + if cache_adapter is None or not isinstance(cache_adapter, CacheAdapter): + return False + + self._cache_adapter = cache_adapter + + return True + + @property + def enabled(self): + """ + Indicates whether cache is enabled or not + + :return: Rrue if a _cache_adapter has been set + """ + return self._cache_adapter is not None + + @staticmethod + def _options_to_parameters(**options): + """ + Extract the parameters required in order to calculate the key of the cache. + + :param options: Input options + + :return: A list of values used to calculate the cache key + """ + options_copy = copy.deepcopy(options) + transformation, _ = cloudinary.utils.generate_transformation_string(**options_copy) + file_format = options.get("format", "") + storage_type = options.get("type", "upload") + resource_type = options.get("resource_type", "image") + + return storage_type, resource_type, transformation, file_format + + @check_property_enabled + def get(self, public_id, **options): + """ + Retrieve the breakpoints of a particular derived resource identified by the public_id and options + + :param public_id: The public ID of the resource + :param options: The public ID of the resource + + :return: Array of responsive breakpoints, None if not found + """ + params = self._options_to_parameters(**options) + + return self._cache_adapter.get(public_id, *params) + + @check_property_enabled + def set(self, public_id, value, **options): + """ + Set responsive breakpoints identified by public ID and options + + :param public_id: The public ID of the resource + :param value: Array of responsive breakpoints to set + :param options: Additional options + + :return: True on success or False on failure + """ + if not (isinstance(value, (list, tuple))): + raise ValueError("A list of breakpoints is expected") + + storage_type, resource_type, transformation, file_format = self._options_to_parameters(**options) + + return self._cache_adapter.set(public_id, storage_type, resource_type, transformation, file_format, value) + + @check_property_enabled + def delete(self, public_id, **options): + """ + Delete responsive breakpoints identified by public ID and options + + :param public_id: The public ID of the resource + :param options: Additional options + + :return: True on success or False on failure + """ + params = self._options_to_parameters(**options) + + return self._cache_adapter.delete(public_id, *params) + + @check_property_enabled + def flush_all(self): + """ + Flush all entries from cache + + :return: True on success or False on failure + """ + return self._cache_adapter.flush_all() + + +instance = ResponsiveBreakpointsCache() diff --git a/lib/cloudinary/cache/storage/__init__.py b/lib/cloudinary/cache/storage/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lib/cloudinary/cache/storage/file_system_key_value_storage.py b/lib/cloudinary/cache/storage/file_system_key_value_storage.py new file mode 100644 index 00000000..662d1aff --- /dev/null +++ b/lib/cloudinary/cache/storage/file_system_key_value_storage.py @@ -0,0 +1,79 @@ +import glob +from tempfile import gettempdir + +import os + +import errno + +from cloudinary.cache.storage.key_value_storage import KeyValueStorage + + +class FileSystemKeyValueStorage(KeyValueStorage): + """File-based key-value storage""" + _item_ext = ".cldci" + + def __init__(self, root_path): + """ + Create a new Storage object. + + All files will be stored under the root_path location + + :param root_path: The base folder for all storage files + """ + if root_path is None: + root_path = gettempdir() + + if not os.path.isdir(root_path): + os.makedirs(root_path) + + self._root_path = root_path + + def get(self, key): + if not self._exists(key): + return None + + with open(self._get_key_full_path(key), 'r') as f: + value = f.read() + + return value + + def set(self, key, value): + with open(self._get_key_full_path(key), 'w') as f: + f.write(value) + + return True + + def delete(self, key): + try: + os.remove(self._get_key_full_path(key)) + except OSError as e: + if e.errno != errno.ENOENT: # errno.ENOENT - no such file or directory + raise # re-raise exception if a different error occurred + + return True + + def clear(self): + for cache_item_path in glob.iglob(os.path.join(self._root_path, '*' + self._item_ext)): + os.remove(cache_item_path) + + return True + + def _get_key_full_path(self, key): + """ + Generate the file path for the key + + :param key: The key + + :return: The absolute path of the value file associated with the key + """ + return os.path.join(self._root_path, key + self._item_ext) + + def _exists(self, key): + """ + Indicate whether key exists + + :param key: The key + + :return: bool True if the file for the given key exists + """ + return os.path.isfile(self._get_key_full_path(key)) diff --git a/lib/cloudinary/cache/storage/key_value_storage.py b/lib/cloudinary/cache/storage/key_value_storage.py new file mode 100644 index 00000000..2a4d07d8 --- /dev/null +++ b/lib/cloudinary/cache/storage/key_value_storage.py @@ -0,0 +1,51 @@ +from abc import ABCMeta, abstractmethod + + +class KeyValueStorage: + """ + A simple key-value storage abstract base class + """ + __metaclass__ = ABCMeta + + @abstractmethod + def get(self, key): + """ + Get a value identified by the given key + + :param key: The unique identifier + + :return: The value identified by key or None if no value was found + """ + raise NotImplementedError + + @abstractmethod + def set(self, key, value): + """ + Store the value identified by the key + + :param key: The unique identifier + :param value: Value to store + + :return: bool True on success or False on failure + """ + raise NotImplementedError + + @abstractmethod + def delete(self, key): + """ + Deletes item by key + + :param key: The unique identifier + + :return: bool True on success or False on failure + """ + raise NotImplementedError + + @abstractmethod + def clear(self): + """ + Clears all entries + + :return: bool True on success or False on failure + """ + raise NotImplementedError diff --git a/lib/cloudinary/compat.py b/lib/cloudinary/compat.py index 430134ea..5b2b0689 100644 --- a/lib/cloudinary/compat.py +++ b/lib/cloudinary/compat.py @@ -1,5 +1,7 @@ # Copyright Cloudinary import six.moves.urllib.parse +from six import PY3, string_types, StringIO, BytesIO + urlencode = six.moves.urllib.parse.urlencode unquote = six.moves.urllib.parse.unquote urlparse = six.moves.urllib.parse.urlparse @@ -7,7 +9,6 @@ parse_qs = six.moves.urllib.parse.parse_qs parse_qsl = six.moves.urllib.parse.parse_qsl quote_plus = six.moves.urllib.parse.quote_plus httplib = six.moves.http_client -from six import PY3, string_types, StringIO, BytesIO urllib2 = six.moves.urllib.request NotConnected = six.moves.http_client.NotConnected diff --git a/lib/cloudinary/exceptions.py b/lib/cloudinary/exceptions.py new file mode 100644 index 00000000..6ed033bc --- /dev/null +++ b/lib/cloudinary/exceptions.py @@ -0,0 +1,33 @@ +# Copyright Cloudinary + + +class Error(Exception): + pass + + +class NotFound(Error): + pass + + +class NotAllowed(Error): + pass + + +class AlreadyExists(Error): + pass + + +class RateLimited(Error): + pass + + +class BadRequest(Error): + pass + + +class GeneralError(Error): + pass + + +class AuthorizationRequired(Error): + pass diff --git a/lib/cloudinary/forms.py b/lib/cloudinary/forms.py index 3465889a..ff83a98f 100644 --- a/lib/cloudinary/forms.py +++ b/lib/cloudinary/forms.py @@ -1,9 +1,10 @@ -from django import forms -from cloudinary import CloudinaryResource +import json +import re + import cloudinary.uploader import cloudinary.utils -import re -import json +from cloudinary import CloudinaryResource +from django import forms from django.utils.translation import ugettext_lazy as _ @@ -16,8 +17,8 @@ def cl_init_js_callbacks(form, request): class CloudinaryInput(forms.TextInput): input_type = 'file' - def render(self, name, value, attrs=None): - attrs = self.build_attrs(attrs) + def render(self, name, value, attrs=None, renderer=None): + attrs = dict(self.attrs, **attrs) options = attrs.get('options', {}) attrs["options"] = '' @@ -27,14 +28,16 @@ class CloudinaryInput(forms.TextInput): else: params = cloudinary.utils.sign_request(params, options) - if 'resource_type' not in options: options['resource_type'] = 'auto' + if 'resource_type' not in options: + options['resource_type'] = 'auto' cloudinary_upload_url = cloudinary.utils.cloudinary_api_url("upload", **options) attrs["data-url"] = cloudinary_upload_url attrs["data-form-data"] = json.dumps(params) attrs["data-cloudinary-field"] = name chunk_size = options.get("chunk_size", None) - if chunk_size: attrs["data-max-chunk-size"] = chunk_size + if chunk_size: + attrs["data-max-chunk-size"] = chunk_size attrs["class"] = " ".join(["cloudinary-fileupload", attrs.get("class", "")]) widget = super(CloudinaryInput, self).render("file", None, attrs=attrs) @@ -53,8 +56,10 @@ class CloudinaryJsFileField(forms.Field): } def __init__(self, attrs=None, options=None, autosave=True, *args, **kwargs): - if attrs is None: attrs = {} - if options is None: options = {} + if attrs is None: + attrs = {} + if options is None: + options = {} self.autosave = autosave attrs = attrs.copy() attrs["options"] = options.copy() @@ -70,7 +75,8 @@ class CloudinaryJsFileField(forms.Field): def to_python(self, value): """Convert to CloudinaryResource""" - if not value: return None + if not value: + return None m = re.search(r'^([^/]+)/([^/]+)/v(\d+)/([^#]+)#([^/]+)$', value) if not m: raise forms.ValidationError("Invalid format") @@ -95,7 +101,8 @@ class CloudinaryJsFileField(forms.Field): """Validate the signature""" # Use the parent's handling of required fields, etc. super(CloudinaryJsFileField, self).validate(value) - if not value: return + if not value: + return if not value.validate(): raise forms.ValidationError("Signature mismatch") @@ -108,7 +115,8 @@ class CloudinaryUnsignedJsFileField(CloudinaryJsFileField): options = {} options = options.copy() options.update({"unsigned": True, "upload_preset": upload_preset}) - super(CloudinaryUnsignedJsFileField, self).__init__(attrs, options, autosave, *args, **kwargs) + super(CloudinaryUnsignedJsFileField, self).__init__( + attrs, options, autosave, *args, **kwargs) class CloudinaryFileField(forms.FileField): @@ -117,7 +125,7 @@ class CloudinaryFileField(forms.FileField): } default_error_messages = forms.FileField.default_error_messages.copy() default_error_messages.update(my_default_error_messages) - + def __init__(self, options=None, autosave=True, *args, **kwargs): self.autosave = autosave self.options = options or {} diff --git a/lib/cloudinary/http_client.py b/lib/cloudinary/http_client.py new file mode 100644 index 00000000..4355b017 --- /dev/null +++ b/lib/cloudinary/http_client.py @@ -0,0 +1,43 @@ +import json +import socket + +import certifi +from urllib3 import PoolManager +from urllib3.exceptions import HTTPError + +from cloudinary.exceptions import GeneralError + + +class HttpClient: + DEFAULT_HTTP_TIMEOUT = 60 + + def __init__(self, **options): + # Lazy initialization of the client, to improve performance when HttpClient is initialized but not used + self._http_client_instance = None + self.timeout = options.get("timeout", self.DEFAULT_HTTP_TIMEOUT) + + @property + def _http_client(self): + if self._http_client_instance is None: + self._http_client_instance = PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) + return self._http_client_instance + + def get_json(self, url): + try: + response = self._http_client.request("GET", url, timeout=self.timeout) + body = response.data + except HTTPError as e: + raise GeneralError("Unexpected error %s" % str(e)) + except socket.error as e: + raise GeneralError("Socket Error: %s" % str(e)) + + if response.status != 200: + raise GeneralError("Server returned unexpected status code - {} - {}".format(response.status, + response.data)) + try: + result = json.loads(body.decode('utf-8')) + except Exception as e: + # Error is parsing json + raise GeneralError("Error parsing server response (%d) - %s. Got - %s" % (response.status, body, e)) + + return result diff --git a/lib/cloudinary/models.py b/lib/cloudinary/models.py index 9f15383d..a3c7591c 100644 --- a/lib/cloudinary/models.py +++ b/lib/cloudinary/models.py @@ -1,10 +1,10 @@ import re - from cloudinary import CloudinaryResource, forms, uploader - from django.core.files.uploadedfile import UploadedFile from django.db import models +from cloudinary.uploader import upload_options +from cloudinary.utils import upload_params # Add introspection rules for South, if it's installed. try: @@ -13,15 +13,23 @@ try: except ImportError: pass -CLOUDINARY_FIELD_DB_RE = r'(?:(?Pimage|raw|video)/(?Pupload|private|authenticated)/)?(?:v(?P\d+)/)?(?P.*?)(\.(?P[^.]+))?$' +CLOUDINARY_FIELD_DB_RE = r'(?:(?Pimage|raw|video)/' \ + r'(?Pupload|private|authenticated)/)?' \ + r'(?:v(?P\d+)/)?' \ + r'(?P.*?)' \ + r'(\.(?P[^.]+))?$' -# Taken from six - https://pythonhosted.org/six/ def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. + """ + Create a base class with a metaclass. + + This requires a bit of explanation: the basic idea is to make a dummy + metaclass for one level of class instantiation that replaces itself with + the actual metaclass. + + Taken from six - https://pythonhosted.org/six/ + """ class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) @@ -32,23 +40,32 @@ class CloudinaryField(models.Field): description = "A resource stored in Cloudinary" def __init__(self, *args, **kwargs): - options = {'max_length': 255} self.default_form_class = kwargs.pop("default_form_class", forms.CloudinaryFileField) - options.update(kwargs) - self.type = options.pop("type", "upload") - self.resource_type = options.pop("resource_type", "image") - self.width_field = options.pop("width_field", None) - self.height_field = options.pop("height_field", None) - super(CloudinaryField, self).__init__(*args, **options) + self.type = kwargs.pop("type", "upload") + self.resource_type = kwargs.pop("resource_type", "image") + self.width_field = kwargs.pop("width_field", None) + self.height_field = kwargs.pop("height_field", None) + # Collect all options related to Cloudinary upload + self.options = {key: kwargs.pop(key) for key in set(kwargs.keys()) if key in upload_params + upload_options} + + field_options = kwargs + field_options['max_length'] = 255 + super(CloudinaryField, self).__init__(*args, **field_options) def get_internal_type(self): return 'CharField' def value_to_string(self, obj): - # We need to support both legacy `_get_val_from_obj` and new `value_from_object` models.Field methods. - # It would be better to wrap it with try -> except AttributeError -> fallback to legacy. - # Unfortunately, we can catch AttributeError exception from `value_from_object` function itself. - # Parsing exception string is an overkill here, that's why we check for attribute existence + """ + We need to support both legacy `_get_val_from_obj` and new `value_from_object` models.Field methods. + It would be better to wrap it with try -> except AttributeError -> fallback to legacy. + Unfortunately, we can catch AttributeError exception from `value_from_object` function itself. + Parsing exception string is an overkill here, that's why we check for attribute existence + + :param obj: Value to serialize + + :return: Serialized value + """ if hasattr(self, 'value_from_object'): value = self.value_from_object(obj) @@ -69,38 +86,33 @@ class CloudinaryField(models.Field): format=m.group('format') ) - def from_db_value(self, value, expression, connection, context): - if value is None: - return value - return self.parse_cloudinary_resource(value) + def from_db_value(self, value, expression, connection, *args, **kwargs): + # TODO: when dropping support for versions prior to 2.0, you may return + # the signature to from_db_value(value, expression, connection) + if value is not None: + return self.parse_cloudinary_resource(value) def to_python(self, value): if isinstance(value, CloudinaryResource): return value elif isinstance(value, UploadedFile): return value - elif value is None: + elif value is None or value is False: return value else: return self.parse_cloudinary_resource(value) - def upload_options_with_filename(self, model_instance, filename): - return self.upload_options(model_instance) - - def upload_options(self, model_instance): - return {} - def pre_save(self, model_instance, add): value = super(CloudinaryField, self).pre_save(model_instance, add) if isinstance(value, UploadedFile): options = {"type": self.type, "resource_type": self.resource_type} - options.update(self.upload_options_with_filename(model_instance, value.name)) + options.update(self.options) instance_value = uploader.upload_resource(value, **options) setattr(model_instance, self.attname, instance_value) if self.width_field: - setattr(model_instance, self.width_field, instance_value.metadata['width']) + setattr(model_instance, self.width_field, instance_value.metadata.get('width')) if self.height_field: - setattr(model_instance, self.height_field, instance_value.metadata['height']) + setattr(model_instance, self.height_field, instance_value.metadata.get('height')) return self.get_prep_value(instance_value) else: return value diff --git a/lib/cloudinary/poster/__init__.py b/lib/cloudinary/poster/__init__.py index 9110fa42..9359a53d 100644 --- a/lib/cloudinary/poster/__init__.py +++ b/lib/cloudinary/poster/__init__.py @@ -1,17 +1,17 @@ # MIT licensed code copied from https://bitbucket.org/chrisatlee/poster # # Copyright (c) 2011 Chris AtLee -# +# # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: -# +# # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. -# +# # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE @@ -28,7 +28,4 @@ New releases of poster will always have a version number that compares greater than an older version of poster. New in version 0.6.""" -import cloudinary.poster.streaminghttp -import cloudinary.poster.encode - -version = (0, 8, 2) # Thanks JP! +version = (0, 8, 2) # Thanks JP! diff --git a/lib/cloudinary/poster/encode.py b/lib/cloudinary/poster/encode.py index 4900eee4..90ef3069 100644 --- a/lib/cloudinary/poster/encode.py +++ b/lib/cloudinary/poster/encode.py @@ -6,9 +6,17 @@ as multipart/form-data suitable for a HTTP POST or PUT request. multipart/form-data is the standard way to upload files over HTTP""" -__all__ = ['gen_boundary', 'encode_and_quote', 'MultipartParam', - 'encode_string', 'encode_file_header', 'get_body_size', 'get_headers', - 'multipart_encode'] +import mimetypes +import os +import re +from email.header import Header + +from cloudinary.compat import (PY3, advance_iterator, quote_plus, to_bytearray, + to_bytes, to_string) + +__all__ = [ + 'gen_boundary', 'encode_and_quote', 'MultipartParam', 'encode_string', + 'encode_file_header', 'get_body_size', 'get_headers', 'multipart_encode'] try: from io import UnsupportedOperation @@ -17,25 +25,19 @@ except ImportError: try: import uuid + def gen_boundary(): """Returns a random string to use as the boundary for a message""" return uuid.uuid4().hex except ImportError: - import random, sha + import random + import sha + def gen_boundary(): """Returns a random string to use as the boundary for a message""" bits = random.getrandbits(160) return sha.new(str(bits)).hexdigest() -import re, os, mimetypes -from cloudinary.compat import (PY3, string_types, to_bytes, to_string, - to_bytearray, quote_plus, advance_iterator) -try: - from email.header import Header -except ImportError: - # Python 2.4 - from email.Header import Header - if PY3: def encode_and_quote(data): if data is None: @@ -47,7 +49,7 @@ else: """If ``data`` is unicode, return quote_plus(data.encode("utf-8")) otherwise return quote_plus(data)""" if data is None: return None - + if isinstance(data, unicode): data = data.encode("utf-8") return quote_plus(data) @@ -65,13 +67,15 @@ if PY3: return to_bytes(str(s)) else: def _strify(s): - """If s is a unicode string, encode it to UTF-8 and return the results, otherwise return str(s), or None if s is None""" + """If s is a unicode string, encode it to UTF-8 and return the results, + otherwise return str(s), or None if s is None""" if s is None: return None if isinstance(s, unicode): return s.encode("utf-8") return str(s) + class MultipartParam(object): """Represents a single parameter in a multipart/form-data request @@ -105,7 +109,7 @@ class MultipartParam(object): transferred, and the total size. """ def __init__(self, name, value=None, filename=None, filetype=None, - filesize=None, fileobj=None, cb=None): + filesize=None, fileobj=None, cb=None): self.name = Header(name).encode() self.value = _strify(value) if filename is None: @@ -141,7 +145,7 @@ class MultipartParam(object): fileobj.seek(0, 2) self.filesize = fileobj.tell() fileobj.seek(0) - except: + except Exception: raise ValueError("Could not determine filesize") def __cmp__(self, other): @@ -169,9 +173,9 @@ class MultipartParam(object): """ return cls(paramname, filename=os.path.basename(filename), - filetype=mimetypes.guess_type(filename)[0], - filesize=os.path.getsize(filename), - fileobj=open(filename, "rb")) + filetype=mimetypes.guess_type(filename)[0], + filesize=os.path.getsize(filename), + fileobj=open(filename, "rb")) @classmethod def from_params(cls, params): @@ -204,7 +208,7 @@ class MultipartParam(object): filetype = None retval.append(cls(name=name, filename=filename, - filetype=filetype, fileobj=value)) + filetype=filetype, fileobj=value)) else: retval.append(cls(name, value)) return retval @@ -216,8 +220,8 @@ class MultipartParam(object): headers = ["--%s" % boundary] if self.filename: - disposition = 'form-data; name="%s"; filename="%s"' % (self.name, - to_string(self.filename)) + disposition = 'form-data; name="%s"; filename="%s"' % ( + self.name, to_string(self.filename)) else: disposition = 'form-data; name="%s"' % self.name @@ -267,8 +271,8 @@ class MultipartParam(object): self.cb(self, current, total) last_block = to_bytearray("") encoded_boundary = "--%s" % encode_and_quote(boundary) - boundary_exp = re.compile(to_bytes("^%s$" % re.escape(encoded_boundary)), - re.M) + boundary_exp = re.compile( + to_bytes("^%s$" % re.escape(encoded_boundary)), re.M) while True: block = self.fileobj.read(blocksize) if not block: @@ -296,6 +300,7 @@ class MultipartParam(object): return len(self.encode_hdr(boundary)) + 2 + valuesize + def encode_string(boundary, name, value): """Returns ``name`` and ``value`` encoded as a multipart/form-data variable. ``boundary`` is the boundary string used throughout @@ -303,8 +308,8 @@ def encode_string(boundary, name, value): return MultipartParam(name, value).encode(boundary) -def encode_file_header(boundary, paramname, filesize, filename=None, - filetype=None): + +def encode_file_header(boundary, paramname, filesize, filename=None, filetype=None): """Returns the leading data for a multipart/form-data field that contains file data. @@ -324,7 +329,8 @@ def encode_file_header(boundary, paramname, filesize, filename=None, """ return MultipartParam(paramname, filesize=filesize, filename=filename, - filetype=filetype).encode_hdr(boundary) + filetype=filetype).encode_hdr(boundary) + def get_body_size(params, boundary): """Returns the number of bytes that the multipart/form-data encoding @@ -332,6 +338,7 @@ def get_body_size(params, boundary): size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params)) return size + len(boundary) + 6 + def get_headers(params, boundary): """Returns a dictionary with Content-Type and Content-Length headers for the multipart/form-data encoding of ``params``.""" @@ -341,6 +348,7 @@ def get_headers(params, boundary): headers['Content-Length'] = str(get_body_size(params, boundary)) return headers + class multipart_yielder: def __init__(self, params, boundary, cb): self.params = params @@ -396,6 +404,7 @@ class multipart_yielder: for param in self.params: param.reset() + def multipart_encode(params, boundary=None, cb=None): """Encode ``params`` as multipart/form-data. diff --git a/lib/cloudinary/poster/streaminghttp.py b/lib/cloudinary/poster/streaminghttp.py index d8af5212..f5713cc0 100644 --- a/lib/cloudinary/poster/streaminghttp.py +++ b/lib/cloudinary/poster/streaminghttp.py @@ -27,15 +27,18 @@ Example usage: ... {'Content-Length': str(len(s))}) """ -import sys, socket -from cloudinary.compat import httplib, urllib2, NotConnected +import socket +import sys + +from cloudinary.compat import NotConnected, httplib, urllib2 __all__ = ['StreamingHTTPConnection', 'StreamingHTTPRedirectHandler', - 'StreamingHTTPHandler', 'register_openers'] + 'StreamingHTTPHandler', 'register_openers'] if hasattr(httplib, 'HTTPS'): __all__.extend(['StreamingHTTPSHandler', 'StreamingHTTPSConnection']) + class _StreamingHTTPMixin: """Mixin class for HTTP and HTTPS connections that implements a streaming send method.""" @@ -62,7 +65,7 @@ class _StreamingHTTPMixin: print("send:", repr(value)) try: blocksize = 8192 - if hasattr(value, 'read') : + if hasattr(value, 'read'): if hasattr(value, 'seek'): value.seek(0) if self.debuglevel > 0: @@ -86,10 +89,12 @@ class _StreamingHTTPMixin: self.close() raise + class StreamingHTTPConnection(_StreamingHTTPMixin, httplib.HTTPConnection): """Subclass of `httplib.HTTPConnection` that overrides the `send()` method to support iterable body objects""" + class StreamingHTTPRedirectHandler(urllib2.HTTPRedirectHandler): """Subclass of `urllib2.HTTPRedirectHandler` that overrides the `redirect_request` method to properly handle redirected POST requests @@ -114,7 +119,7 @@ class StreamingHTTPRedirectHandler(urllib2.HTTPRedirectHandler): """ m = req.get_method() if (code in (301, 302, 303, 307) and m in ("GET", "HEAD") - or code in (301, 302, 303) and m == "POST"): + or code in (301, 302, 303) and m == "POST"): # Strictly (according to RFC 2616), 301 or 302 in response # to a POST MUST NOT cause a redirection without confirmation # from the user (of urllib2, in this case). In practice, @@ -125,14 +130,16 @@ class StreamingHTTPRedirectHandler(urllib2.HTTPRedirectHandler): newheaders = dict((k, v) for k, v in req.headers.items() if k.lower() not in ( "content-length", "content-type") - ) - return urllib2.Request(newurl, - headers=newheaders, - origin_req_host=req.get_origin_req_host(), - unverifiable=True) + ) + return urllib2.Request( + newurl, + headers=newheaders, + origin_req_host=req.get_origin_req_host(), + unverifiable=True) else: raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp) + class StreamingHTTPHandler(urllib2.HTTPHandler): """Subclass of `urllib2.HTTPHandler` that uses StreamingHTTPConnection as its http connection class.""" @@ -156,9 +163,9 @@ class StreamingHTTPHandler(urllib2.HTTPHandler): "No Content-Length specified for iterable body") return urllib2.HTTPHandler.do_request_(self, req) + if hasattr(httplib, 'HTTPS'): - class StreamingHTTPSConnection(_StreamingHTTPMixin, - httplib.HTTPSConnection): + class StreamingHTTPSConnection(_StreamingHTTPMixin, httplib.HTTPSConnection): """Subclass of `httplib.HTTSConnection` that overrides the `send()` method to support iterable body objects""" @@ -179,7 +186,7 @@ if hasattr(httplib, 'HTTPS'): if hasattr(data, 'read') or hasattr(data, 'next'): if not req.has_header('Content-length'): raise ValueError( - "No Content-Length specified for iterable body") + "No Content-Length specified for iterable body") return urllib2.HTTPSHandler.do_request_(self, req) @@ -188,7 +195,8 @@ def get_handlers(): if hasattr(httplib, "HTTPS"): handlers.append(StreamingHTTPSHandler) return handlers - + + def register_openers(): """Register the streaming http handlers in the global urllib2 default opener object. diff --git a/lib/cloudinary/search.py b/lib/cloudinary/search.py index 2decef84..e3a52b49 100644 --- a/lib/cloudinary/search.py +++ b/lib/cloudinary/search.py @@ -1,6 +1,7 @@ import json from copy import deepcopy -from . import api + +from cloudinary.api import call_json_api class Search: @@ -46,8 +47,8 @@ class Search: def execute(self, **options): """Execute the search and return results.""" options["content_type"] = 'application/json' - uri = ['resources','search'] - return api.call_json_api('post', uri, self.as_dict(), **options) + uri = ['resources', 'search'] + return call_json_api('post', uri, self.as_dict(), **options) def _add(self, name, value): if name not in self.query: @@ -56,4 +57,4 @@ class Search: return self def as_dict(self): - return deepcopy(self.query) \ No newline at end of file + return deepcopy(self.query) diff --git a/lib/cloudinary/static/js/jquery.cloudinary.js b/lib/cloudinary/static/js/jquery.cloudinary.js index 36c0034c..9def7cf7 100644 --- a/lib/cloudinary/static/js/jquery.cloudinary.js +++ b/lib/cloudinary/static/js/jquery.cloudinary.js @@ -802,7 +802,7 @@ var slice = [].slice, function TextLayer(options) { var keys; TextLayer.__super__.constructor.call(this, options); - keys = ["resourceType", "resourceType", "fontFamily", "fontSize", "fontWeight", "fontStyle", "textDecoration", "textAlign", "stroke", "letterSpacing", "lineSpacing", "text"]; + keys = ["resourceType", "resourceType", "fontFamily", "fontSize", "fontWeight", "fontStyle", "textDecoration", "textAlign", "stroke", "letterSpacing", "lineSpacing", "fontHinting", "fontAntialiasing", "text"]; if (options != null) { keys.forEach((function(_this) { return function(key) { @@ -871,6 +871,16 @@ var slice = [].slice, return this; }; + TextLayer.prototype.fontAntialiasing = function(fontAntialiasing){ + this.options.fontAntialiasing = fontAntialiasing; + return this; + }; + + TextLayer.prototype.fontHinting = function(fontHinting ){ + this.options.fontHinting = fontHinting ; + return this; + }; + TextLayer.prototype.text = function(text) { this.options.text = text; return this; @@ -932,6 +942,12 @@ var slice = [].slice, if (!(Util.isEmpty(this.options.lineSpacing) && !Util.isNumberLike(this.options.lineSpacing))) { components.push("line_spacing_" + this.options.lineSpacing); } + if (this.options.fontAntialiasing !== "none") { + components.push("antialias_"+this.options.fontAntialiasing); + } + if (this.options.fontHinting !== "none") { + components.push("hinting_"+this.options.fontHinting); + } if (!Util.isEmpty(Util.compact(components))) { if (Util.isEmpty(this.options.fontFamily)) { throw "Must supply fontFamily. " + components; @@ -2780,6 +2796,20 @@ var slice = [].slice, return this.param(value, "gravity", "g"); }; + Transformation.prototype.fps = function(value) { + return this.param(value, "fps", "fps", (function(_this) { + return function(fps) { + if (Util.isString(fps)) { + return fps; + } else if (Util.isArray(fps)) { + return fps.join("-"); + } else { + return fps; + } + }; + })(this)); + }; + Transformation.prototype.height = function(value) { return this.param(value, "height", "h", (function(_this) { return function() { diff --git a/lib/cloudinary/static/js/jquery.fileupload.js b/lib/cloudinary/static/js/jquery.fileupload.js index f20bc6d0..700f9013 100644 --- a/lib/cloudinary/static/js/jquery.fileupload.js +++ b/lib/cloudinary/static/js/jquery.fileupload.js @@ -43,7 +43,7 @@ '|(Kindle/(1\\.0|2\\.[05]|3\\.0))' ).test(window.navigator.userAgent) || // Feature detection for all other devices: - $('').prop('disabled')); + $('').prop('disabled')); // The FileReader API is not actually used, but works as feature detection, // as some Safari versions (5?) support XHR file uploads via the FormData API, @@ -261,6 +261,9 @@ // Callback for dragover events of the dropZone(s): // dragover: function (e) {}, // .bind('fileuploaddragover', func); + // Callback before the start of each chunk upload request (before form data initialization): + // chunkbeforesend: function (e, data) {}, // .bind('fileuploadchunkbeforesend', func); + // Callback for the start of each chunk upload request: // chunksend: function (e, data) {}, // .bind('fileuploadchunksend', func); @@ -434,6 +437,13 @@ } }, + _deinitProgressListener: function (options) { + var xhr = options.xhr ? options.xhr() : $.ajaxSettings.xhr(); + if (xhr.upload) { + $(xhr.upload).unbind('progress'); + } + }, + _isInstanceOf: function (type, obj) { // Cross-frame instanceof check return Object.prototype.toString.call(obj) === '[object ' + type + ']'; @@ -453,7 +463,7 @@ } if (!multipart || options.blob || !this._isInstanceOf('File', file)) { options.headers['Content-Disposition'] = 'attachment; filename="' + - encodeURI(file.name) + '"'; + encodeURI(file.uploadName || file.name) + '"'; } if (!multipart) { options.contentType = file.type || 'application/octet-stream'; @@ -489,7 +499,11 @@ }); } if (options.blob) { - formData.append(paramName, options.blob, file.name); + formData.append( + paramName, + options.blob, + file.uploadName || file.name + ); } else { $.each(options.files, function (index, file) { // This check allows the tests to run with @@ -762,6 +776,8 @@ // Expose the chunk bytes position range: o.contentRange = 'bytes ' + ub + '-' + (ub + o.chunkSize - 1) + '/' + fs; + // Trigger chunkbeforesend to allow form data to be updated for this chunk + that._trigger('chunkbeforesend', null, o); // Process the upload data (the blob and potential form data): that._initXHRData(o); // Add progress listeners for this chunk upload: @@ -808,6 +824,9 @@ o.context, [jqXHR, textStatus, errorThrown] ); + }) + .always(function () { + that._deinitProgressListener(o); }); }; this._enhancePromise(promise); @@ -909,6 +928,7 @@ }).fail(function (jqXHR, textStatus, errorThrown) { that._onFail(jqXHR, textStatus, errorThrown, options); }).always(function (jqXHRorResult, textStatus, jqXHRorError) { + that._deinitProgressListener(options); that._onAlways( jqXHRorResult, textStatus, @@ -1126,7 +1146,7 @@ dirReader = entry.createReader(); readEntries(); } else { - // Return an empy list for file system items + // Return an empty list for file system items // other than files or directories: dfd.resolve([]); } diff --git a/lib/cloudinary/static/js/jquery.ui.widget.js b/lib/cloudinary/static/js/jquery.ui.widget.js index e08df3fd..914b8ffb 100644 --- a/lib/cloudinary/static/js/jquery.ui.widget.js +++ b/lib/cloudinary/static/js/jquery.ui.widget.js @@ -1,571 +1,751 @@ -/*! jQuery UI - v1.11.4+CommonJS - 2015-08-28 -* http://jqueryui.com -* Includes: widget.js -* Copyright 2015 jQuery Foundation and other contributors; Licensed MIT */ +/*! jQuery UI - v1.12.1+CommonJS - 2018-02-10 + * http://jqueryui.com + * Includes: widget.js + * Copyright jQuery Foundation and other contributors; Licensed MIT */ (function( factory ) { - if ( typeof define === "function" && define.amd ) { + if ( typeof define === "function" && define.amd ) { - // AMD. Register as an anonymous module. - define([ "jquery" ], factory ); + // AMD. Register as an anonymous module. + define([ "jquery" ], factory ); + } else if ( typeof exports === "object" ) { - } else if ( typeof exports === "object" ) { + // Node/CommonJS + factory( require( "jquery" ) ); + } else { - // Node/CommonJS - factory( require( "jquery" ) ); - - } else { - - // Browser globals - factory( jQuery ); - } + // Browser globals + factory( jQuery ); + } }(function( $ ) { -/*! - * jQuery UI Widget 1.11.4 - * http://jqueryui.com - * - * Copyright jQuery Foundation and other contributors - * Released under the MIT license. - * http://jquery.org/license - * - * http://api.jqueryui.com/jQuery.widget/ - */ + + $.ui = $.ui || {}; + + var version = $.ui.version = "1.12.1"; -var widget_uuid = 0, - widget_slice = Array.prototype.slice; + /*! + * jQuery UI Widget 1.12.1 + * http://jqueryui.com + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license. + * http://jquery.org/license + */ -$.cleanData = (function( orig ) { - return function( elems ) { - var events, elem, i; - for ( i = 0; (elem = elems[i]) != null; i++ ) { - try { + //>>label: Widget + //>>group: Core + //>>description: Provides a factory for creating stateful widgets with a common API. + //>>docs: http://api.jqueryui.com/jQuery.widget/ + //>>demos: http://jqueryui.com/widget/ - // Only trigger remove when necessary to save time - events = $._data( elem, "events" ); - if ( events && events.remove ) { - $( elem ).triggerHandler( "remove" ); - } - // http://bugs.jquery.com/ticket/8235 - } catch ( e ) {} - } - orig( elems ); - }; -})( $.cleanData ); -$.widget = function( name, base, prototype ) { - var fullName, existingConstructor, constructor, basePrototype, - // proxiedPrototype allows the provided prototype to remain unmodified - // so that it can be used as a mixin for multiple widgets (#8876) - proxiedPrototype = {}, - namespace = name.split( "." )[ 0 ]; + var widgetUuid = 0; + var widgetSlice = Array.prototype.slice; - name = name.split( "." )[ 1 ]; - fullName = namespace + "-" + name; + $.cleanData = ( function( orig ) { + return function( elems ) { + var events, elem, i; + for ( i = 0; ( elem = elems[ i ] ) != null; i++ ) { + try { - if ( !prototype ) { - prototype = base; - base = $.Widget; - } + // Only trigger remove when necessary to save time + events = $._data( elem, "events" ); + if ( events && events.remove ) { + $( elem ).triggerHandler( "remove" ); + } - // create selector for plugin - $.expr[ ":" ][ fullName.toLowerCase() ] = function( elem ) { - return !!$.data( elem, fullName ); - }; + // Http://bugs.jquery.com/ticket/8235 + } catch ( e ) {} + } + orig( elems ); + }; + } )( $.cleanData ); - $[ namespace ] = $[ namespace ] || {}; - existingConstructor = $[ namespace ][ name ]; - constructor = $[ namespace ][ name ] = function( options, element ) { - // allow instantiation without "new" keyword - if ( !this._createWidget ) { - return new constructor( options, element ); - } + $.widget = function( name, base, prototype ) { + var existingConstructor, constructor, basePrototype; - // allow instantiation without initializing for simple inheritance - // must use "new" keyword (the code above always passes args) - if ( arguments.length ) { - this._createWidget( options, element ); - } - }; - // extend with the existing constructor to carry over any static properties - $.extend( constructor, existingConstructor, { - version: prototype.version, - // copy the object used to create the prototype in case we need to - // redefine the widget later - _proto: $.extend( {}, prototype ), - // track widgets that inherit from this widget in case this widget is - // redefined after a widget inherits from it - _childConstructors: [] - }); + // ProxiedPrototype allows the provided prototype to remain unmodified + // so that it can be used as a mixin for multiple widgets (#8876) + var proxiedPrototype = {}; - basePrototype = new base(); - // we need to make the options hash a property directly on the new instance - // otherwise we'll modify the options hash on the prototype that we're - // inheriting from - basePrototype.options = $.widget.extend( {}, basePrototype.options ); - $.each( prototype, function( prop, value ) { - if ( !$.isFunction( value ) ) { - proxiedPrototype[ prop ] = value; - return; - } - proxiedPrototype[ prop ] = (function() { - var _super = function() { - return base.prototype[ prop ].apply( this, arguments ); - }, - _superApply = function( args ) { - return base.prototype[ prop ].apply( this, args ); - }; - return function() { - var __super = this._super, - __superApply = this._superApply, - returnValue; + var namespace = name.split( "." )[ 0 ]; + name = name.split( "." )[ 1 ]; + var fullName = namespace + "-" + name; - this._super = _super; - this._superApply = _superApply; + if ( !prototype ) { + prototype = base; + base = $.Widget; + } - returnValue = value.apply( this, arguments ); + if ( $.isArray( prototype ) ) { + prototype = $.extend.apply( null, [ {} ].concat( prototype ) ); + } - this._super = __super; - this._superApply = __superApply; + // Create selector for plugin + $.expr[ ":" ][ fullName.toLowerCase() ] = function( elem ) { + return !!$.data( elem, fullName ); + }; - return returnValue; - }; - })(); - }); - constructor.prototype = $.widget.extend( basePrototype, { - // TODO: remove support for widgetEventPrefix - // always use the name + a colon as the prefix, e.g., draggable:start - // don't prefix for widgets that aren't DOM-based - widgetEventPrefix: existingConstructor ? (basePrototype.widgetEventPrefix || name) : name - }, proxiedPrototype, { - constructor: constructor, - namespace: namespace, - widgetName: name, - widgetFullName: fullName - }); + $[ namespace ] = $[ namespace ] || {}; + existingConstructor = $[ namespace ][ name ]; + constructor = $[ namespace ][ name ] = function( options, element ) { - // If this widget is being redefined then we need to find all widgets that - // are inheriting from it and redefine all of them so that they inherit from - // the new version of this widget. We're essentially trying to replace one - // level in the prototype chain. - if ( existingConstructor ) { - $.each( existingConstructor._childConstructors, function( i, child ) { - var childPrototype = child.prototype; + // Allow instantiation without "new" keyword + if ( !this._createWidget ) { + return new constructor( options, element ); + } - // redefine the child widget using the same prototype that was - // originally used, but inherit from the new version of the base - $.widget( childPrototype.namespace + "." + childPrototype.widgetName, constructor, child._proto ); - }); - // remove the list of existing child constructors from the old constructor - // so the old child constructors can be garbage collected - delete existingConstructor._childConstructors; - } else { - base._childConstructors.push( constructor ); - } + // Allow instantiation without initializing for simple inheritance + // must use "new" keyword (the code above always passes args) + if ( arguments.length ) { + this._createWidget( options, element ); + } + }; - $.widget.bridge( name, constructor ); + // Extend with the existing constructor to carry over any static properties + $.extend( constructor, existingConstructor, { + version: prototype.version, - return constructor; -}; + // Copy the object used to create the prototype in case we need to + // redefine the widget later + _proto: $.extend( {}, prototype ), -$.widget.extend = function( target ) { - var input = widget_slice.call( arguments, 1 ), - inputIndex = 0, - inputLength = input.length, - key, - value; - for ( ; inputIndex < inputLength; inputIndex++ ) { - for ( key in input[ inputIndex ] ) { - value = input[ inputIndex ][ key ]; - if ( input[ inputIndex ].hasOwnProperty( key ) && value !== undefined ) { - // Clone objects - if ( $.isPlainObject( value ) ) { - target[ key ] = $.isPlainObject( target[ key ] ) ? - $.widget.extend( {}, target[ key ], value ) : - // Don't extend strings, arrays, etc. with objects - $.widget.extend( {}, value ); - // Copy everything else by reference - } else { - target[ key ] = value; - } - } - } - } - return target; -}; + // Track widgets that inherit from this widget in case this widget is + // redefined after a widget inherits from it + _childConstructors: [] + } ); -$.widget.bridge = function( name, object ) { - var fullName = object.prototype.widgetFullName || name; - $.fn[ name ] = function( options ) { - var isMethodCall = typeof options === "string", - args = widget_slice.call( arguments, 1 ), - returnValue = this; + basePrototype = new base(); - if ( isMethodCall ) { - this.each(function() { - var methodValue, - instance = $.data( this, fullName ); - if ( options === "instance" ) { - returnValue = instance; - return false; - } - if ( !instance ) { - return $.error( "cannot call methods on " + name + " prior to initialization; " + - "attempted to call method '" + options + "'" ); - } - if ( !$.isFunction( instance[options] ) || options.charAt( 0 ) === "_" ) { - return $.error( "no such method '" + options + "' for " + name + " widget instance" ); - } - methodValue = instance[ options ].apply( instance, args ); - if ( methodValue !== instance && methodValue !== undefined ) { - returnValue = methodValue && methodValue.jquery ? - returnValue.pushStack( methodValue.get() ) : - methodValue; - return false; - } - }); - } else { + // We need to make the options hash a property directly on the new instance + // otherwise we'll modify the options hash on the prototype that we're + // inheriting from + basePrototype.options = $.widget.extend( {}, basePrototype.options ); + $.each( prototype, function( prop, value ) { + if ( !$.isFunction( value ) ) { + proxiedPrototype[ prop ] = value; + return; + } + proxiedPrototype[ prop ] = ( function() { + function _super() { + return base.prototype[ prop ].apply( this, arguments ); + } - // Allow multiple hashes to be passed on init - if ( args.length ) { - options = $.widget.extend.apply( null, [ options ].concat(args) ); - } + function _superApply( args ) { + return base.prototype[ prop ].apply( this, args ); + } - this.each(function() { - var instance = $.data( this, fullName ); - if ( instance ) { - instance.option( options || {} ); - if ( instance._init ) { - instance._init(); - } - } else { - $.data( this, fullName, new object( options, this ) ); - } - }); - } + return function() { + var __super = this._super; + var __superApply = this._superApply; + var returnValue; - return returnValue; - }; -}; + this._super = _super; + this._superApply = _superApply; -$.Widget = function( /* options, element */ ) {}; -$.Widget._childConstructors = []; + returnValue = value.apply( this, arguments ); -$.Widget.prototype = { - widgetName: "widget", - widgetEventPrefix: "", - defaultElement: "
", - options: { - disabled: false, + this._super = __super; + this._superApply = __superApply; - // callbacks - create: null - }, - _createWidget: function( options, element ) { - element = $( element || this.defaultElement || this )[ 0 ]; - this.element = $( element ); - this.uuid = widget_uuid++; - this.eventNamespace = "." + this.widgetName + this.uuid; + return returnValue; + }; + } )(); + } ); + constructor.prototype = $.widget.extend( basePrototype, { - this.bindings = $(); - this.hoverable = $(); - this.focusable = $(); + // TODO: remove support for widgetEventPrefix + // always use the name + a colon as the prefix, e.g., draggable:start + // don't prefix for widgets that aren't DOM-based + widgetEventPrefix: existingConstructor ? ( basePrototype.widgetEventPrefix || name ) : name + }, proxiedPrototype, { + constructor: constructor, + namespace: namespace, + widgetName: name, + widgetFullName: fullName + } ); - if ( element !== this ) { - $.data( element, this.widgetFullName, this ); - this._on( true, this.element, { - remove: function( event ) { - if ( event.target === element ) { - this.destroy(); - } - } - }); - this.document = $( element.style ? - // element within the document - element.ownerDocument : - // element is window or document - element.document || element ); - this.window = $( this.document[0].defaultView || this.document[0].parentWindow ); - } + // If this widget is being redefined then we need to find all widgets that + // are inheriting from it and redefine all of them so that they inherit from + // the new version of this widget. We're essentially trying to replace one + // level in the prototype chain. + if ( existingConstructor ) { + $.each( existingConstructor._childConstructors, function( i, child ) { + var childPrototype = child.prototype; - this.options = $.widget.extend( {}, - this.options, - this._getCreateOptions(), - options ); + // Redefine the child widget using the same prototype that was + // originally used, but inherit from the new version of the base + $.widget( childPrototype.namespace + "." + childPrototype.widgetName, constructor, + child._proto ); + } ); - this._create(); - this._trigger( "create", null, this._getCreateEventData() ); - this._init(); - }, - _getCreateOptions: $.noop, - _getCreateEventData: $.noop, - _create: $.noop, - _init: $.noop, + // Remove the list of existing child constructors from the old constructor + // so the old child constructors can be garbage collected + delete existingConstructor._childConstructors; + } else { + base._childConstructors.push( constructor ); + } - destroy: function() { - this._destroy(); - // we can probably remove the unbind calls in 2.0 - // all event bindings should go through this._on() - this.element - .unbind( this.eventNamespace ) - .removeData( this.widgetFullName ) - // support: jquery <1.6.3 - // http://bugs.jquery.com/ticket/9413 - .removeData( $.camelCase( this.widgetFullName ) ); - this.widget() - .unbind( this.eventNamespace ) - .removeAttr( "aria-disabled" ) - .removeClass( - this.widgetFullName + "-disabled " + - "ui-state-disabled" ); + $.widget.bridge( name, constructor ); - // clean up events and states - this.bindings.unbind( this.eventNamespace ); - this.hoverable.removeClass( "ui-state-hover" ); - this.focusable.removeClass( "ui-state-focus" ); - }, - _destroy: $.noop, + return constructor; + }; - widget: function() { - return this.element; - }, + $.widget.extend = function( target ) { + var input = widgetSlice.call( arguments, 1 ); + var inputIndex = 0; + var inputLength = input.length; + var key; + var value; - option: function( key, value ) { - var options = key, - parts, - curOption, - i; + for ( ; inputIndex < inputLength; inputIndex++ ) { + for ( key in input[ inputIndex ] ) { + value = input[ inputIndex ][ key ]; + if ( input[ inputIndex ].hasOwnProperty( key ) && value !== undefined ) { - if ( arguments.length === 0 ) { - // don't return a reference to the internal hash - return $.widget.extend( {}, this.options ); - } + // Clone objects + if ( $.isPlainObject( value ) ) { + target[ key ] = $.isPlainObject( target[ key ] ) ? + $.widget.extend( {}, target[ key ], value ) : - if ( typeof key === "string" ) { - // handle nested keys, e.g., "foo.bar" => { foo: { bar: ___ } } - options = {}; - parts = key.split( "." ); - key = parts.shift(); - if ( parts.length ) { - curOption = options[ key ] = $.widget.extend( {}, this.options[ key ] ); - for ( i = 0; i < parts.length - 1; i++ ) { - curOption[ parts[ i ] ] = curOption[ parts[ i ] ] || {}; - curOption = curOption[ parts[ i ] ]; - } - key = parts.pop(); - if ( arguments.length === 1 ) { - return curOption[ key ] === undefined ? null : curOption[ key ]; - } - curOption[ key ] = value; - } else { - if ( arguments.length === 1 ) { - return this.options[ key ] === undefined ? null : this.options[ key ]; - } - options[ key ] = value; - } - } + // Don't extend strings, arrays, etc. with objects + $.widget.extend( {}, value ); - this._setOptions( options ); + // Copy everything else by reference + } else { + target[ key ] = value; + } + } + } + } + return target; + }; - return this; - }, - _setOptions: function( options ) { - var key; + $.widget.bridge = function( name, object ) { + var fullName = object.prototype.widgetFullName || name; + $.fn[ name ] = function( options ) { + var isMethodCall = typeof options === "string"; + var args = widgetSlice.call( arguments, 1 ); + var returnValue = this; - for ( key in options ) { - this._setOption( key, options[ key ] ); - } + if ( isMethodCall ) { - return this; - }, - _setOption: function( key, value ) { - this.options[ key ] = value; + // If this is an empty collection, we need to have the instance method + // return undefined instead of the jQuery instance + if ( !this.length && options === "instance" ) { + returnValue = undefined; + } else { + this.each( function() { + var methodValue; + var instance = $.data( this, fullName ); - if ( key === "disabled" ) { - this.widget() - .toggleClass( this.widgetFullName + "-disabled", !!value ); + if ( options === "instance" ) { + returnValue = instance; + return false; + } - // If the widget is becoming disabled, then nothing is interactive - if ( value ) { - this.hoverable.removeClass( "ui-state-hover" ); - this.focusable.removeClass( "ui-state-focus" ); - } - } + if ( !instance ) { + return $.error( "cannot call methods on " + name + + " prior to initialization; " + + "attempted to call method '" + options + "'" ); + } - return this; - }, + if ( !$.isFunction( instance[ options ] ) || options.charAt( 0 ) === "_" ) { + return $.error( "no such method '" + options + "' for " + name + + " widget instance" ); + } - enable: function() { - return this._setOptions({ disabled: false }); - }, - disable: function() { - return this._setOptions({ disabled: true }); - }, + methodValue = instance[ options ].apply( instance, args ); - _on: function( suppressDisabledCheck, element, handlers ) { - var delegateElement, - instance = this; + if ( methodValue !== instance && methodValue !== undefined ) { + returnValue = methodValue && methodValue.jquery ? + returnValue.pushStack( methodValue.get() ) : + methodValue; + return false; + } + } ); + } + } else { - // no suppressDisabledCheck flag, shuffle arguments - if ( typeof suppressDisabledCheck !== "boolean" ) { - handlers = element; - element = suppressDisabledCheck; - suppressDisabledCheck = false; - } + // Allow multiple hashes to be passed on init + if ( args.length ) { + options = $.widget.extend.apply( null, [ options ].concat( args ) ); + } - // no element argument, shuffle and use this.element - if ( !handlers ) { - handlers = element; - element = this.element; - delegateElement = this.widget(); - } else { - element = delegateElement = $( element ); - this.bindings = this.bindings.add( element ); - } + this.each( function() { + var instance = $.data( this, fullName ); + if ( instance ) { + instance.option( options || {} ); + if ( instance._init ) { + instance._init(); + } + } else { + $.data( this, fullName, new object( options, this ) ); + } + } ); + } - $.each( handlers, function( event, handler ) { - function handlerProxy() { - // allow widgets to customize the disabled handling - // - disabled as an array instead of boolean - // - disabled class as method for disabling individual parts - if ( !suppressDisabledCheck && - ( instance.options.disabled === true || - $( this ).hasClass( "ui-state-disabled" ) ) ) { - return; - } - return ( typeof handler === "string" ? instance[ handler ] : handler ) - .apply( instance, arguments ); - } + return returnValue; + }; + }; - // copy the guid so direct unbinding works - if ( typeof handler !== "string" ) { - handlerProxy.guid = handler.guid = - handler.guid || handlerProxy.guid || $.guid++; - } + $.Widget = function( /* options, element */ ) {}; + $.Widget._childConstructors = []; - var match = event.match( /^([\w:-]*)\s*(.*)$/ ), - eventName = match[1] + instance.eventNamespace, - selector = match[2]; - if ( selector ) { - delegateElement.delegate( selector, eventName, handlerProxy ); - } else { - element.bind( eventName, handlerProxy ); - } - }); - }, + $.Widget.prototype = { + widgetName: "widget", + widgetEventPrefix: "", + defaultElement: "
", - _off: function( element, eventName ) { - eventName = (eventName || "").split( " " ).join( this.eventNamespace + " " ) + - this.eventNamespace; - element.unbind( eventName ).undelegate( eventName ); + options: { + classes: {}, + disabled: false, - // Clear the stack to avoid memory leaks (#10056) - this.bindings = $( this.bindings.not( element ).get() ); - this.focusable = $( this.focusable.not( element ).get() ); - this.hoverable = $( this.hoverable.not( element ).get() ); - }, + // Callbacks + create: null + }, - _delay: function( handler, delay ) { - function handlerProxy() { - return ( typeof handler === "string" ? instance[ handler ] : handler ) - .apply( instance, arguments ); - } - var instance = this; - return setTimeout( handlerProxy, delay || 0 ); - }, + _createWidget: function( options, element ) { + element = $( element || this.defaultElement || this )[ 0 ]; + this.element = $( element ); + this.uuid = widgetUuid++; + this.eventNamespace = "." + this.widgetName + this.uuid; - _hoverable: function( element ) { - this.hoverable = this.hoverable.add( element ); - this._on( element, { - mouseenter: function( event ) { - $( event.currentTarget ).addClass( "ui-state-hover" ); - }, - mouseleave: function( event ) { - $( event.currentTarget ).removeClass( "ui-state-hover" ); - } - }); - }, + this.bindings = $(); + this.hoverable = $(); + this.focusable = $(); + this.classesElementLookup = {}; - _focusable: function( element ) { - this.focusable = this.focusable.add( element ); - this._on( element, { - focusin: function( event ) { - $( event.currentTarget ).addClass( "ui-state-focus" ); - }, - focusout: function( event ) { - $( event.currentTarget ).removeClass( "ui-state-focus" ); - } - }); - }, + if ( element !== this ) { + $.data( element, this.widgetFullName, this ); + this._on( true, this.element, { + remove: function( event ) { + if ( event.target === element ) { + this.destroy(); + } + } + } ); + this.document = $( element.style ? - _trigger: function( type, event, data ) { - var prop, orig, - callback = this.options[ type ]; + // Element within the document + element.ownerDocument : - data = data || {}; - event = $.Event( event ); - event.type = ( type === this.widgetEventPrefix ? - type : - this.widgetEventPrefix + type ).toLowerCase(); - // the original event may come from any element - // so we need to reset the target on the new event - event.target = this.element[ 0 ]; + // Element is window or document + element.document || element ); + this.window = $( this.document[ 0 ].defaultView || this.document[ 0 ].parentWindow ); + } - // copy original event properties over to the new event - orig = event.originalEvent; - if ( orig ) { - for ( prop in orig ) { - if ( !( prop in event ) ) { - event[ prop ] = orig[ prop ]; - } - } - } + this.options = $.widget.extend( {}, + this.options, + this._getCreateOptions(), + options ); - this.element.trigger( event, data ); - return !( $.isFunction( callback ) && - callback.apply( this.element[0], [ event ].concat( data ) ) === false || - event.isDefaultPrevented() ); - } -}; + this._create(); -$.each( { show: "fadeIn", hide: "fadeOut" }, function( method, defaultEffect ) { - $.Widget.prototype[ "_" + method ] = function( element, options, callback ) { - if ( typeof options === "string" ) { - options = { effect: options }; - } - var hasOptions, - effectName = !options ? - method : - options === true || typeof options === "number" ? - defaultEffect : - options.effect || defaultEffect; - options = options || {}; - if ( typeof options === "number" ) { - options = { duration: options }; - } - hasOptions = !$.isEmptyObject( options ); - options.complete = callback; - if ( options.delay ) { - element.delay( options.delay ); - } - if ( hasOptions && $.effects && $.effects.effect[ effectName ] ) { - element[ method ]( options ); - } else if ( effectName !== method && element[ effectName ] ) { - element[ effectName ]( options.duration, options.easing, callback ); - } else { - element.queue(function( next ) { - $( this )[ method ](); - if ( callback ) { - callback.call( element[ 0 ] ); - } - next(); - }); - } - }; -}); + if ( this.options.disabled ) { + this._setOptionDisabled( this.options.disabled ); + } + + this._trigger( "create", null, this._getCreateEventData() ); + this._init(); + }, + + _getCreateOptions: function() { + return {}; + }, + + _getCreateEventData: $.noop, + + _create: $.noop, + + _init: $.noop, + + destroy: function() { + var that = this; + + this._destroy(); + $.each( this.classesElementLookup, function( key, value ) { + that._removeClass( value, key ); + } ); + + // We can probably remove the unbind calls in 2.0 + // all event bindings should go through this._on() + this.element + .off( this.eventNamespace ) + .removeData( this.widgetFullName ); + this.widget() + .off( this.eventNamespace ) + .removeAttr( "aria-disabled" ); + + // Clean up events and states + this.bindings.off( this.eventNamespace ); + }, + + _destroy: $.noop, + + widget: function() { + return this.element; + }, + + option: function( key, value ) { + var options = key; + var parts; + var curOption; + var i; + + if ( arguments.length === 0 ) { + + // Don't return a reference to the internal hash + return $.widget.extend( {}, this.options ); + } + + if ( typeof key === "string" ) { + + // Handle nested keys, e.g., "foo.bar" => { foo: { bar: ___ } } + options = {}; + parts = key.split( "." ); + key = parts.shift(); + if ( parts.length ) { + curOption = options[ key ] = $.widget.extend( {}, this.options[ key ] ); + for ( i = 0; i < parts.length - 1; i++ ) { + curOption[ parts[ i ] ] = curOption[ parts[ i ] ] || {}; + curOption = curOption[ parts[ i ] ]; + } + key = parts.pop(); + if ( arguments.length === 1 ) { + return curOption[ key ] === undefined ? null : curOption[ key ]; + } + curOption[ key ] = value; + } else { + if ( arguments.length === 1 ) { + return this.options[ key ] === undefined ? null : this.options[ key ]; + } + options[ key ] = value; + } + } + + this._setOptions( options ); + + return this; + }, + + _setOptions: function( options ) { + var key; + + for ( key in options ) { + this._setOption( key, options[ key ] ); + } + + return this; + }, + + _setOption: function( key, value ) { + if ( key === "classes" ) { + this._setOptionClasses( value ); + } + + this.options[ key ] = value; + + if ( key === "disabled" ) { + this._setOptionDisabled( value ); + } + + return this; + }, + + _setOptionClasses: function( value ) { + var classKey, elements, currentElements; + + for ( classKey in value ) { + currentElements = this.classesElementLookup[ classKey ]; + if ( value[ classKey ] === this.options.classes[ classKey ] || + !currentElements || + !currentElements.length ) { + continue; + } + + // We are doing this to create a new jQuery object because the _removeClass() call + // on the next line is going to destroy the reference to the current elements being + // tracked. We need to save a copy of this collection so that we can add the new classes + // below. + elements = $( currentElements.get() ); + this._removeClass( currentElements, classKey ); + + // We don't use _addClass() here, because that uses this.options.classes + // for generating the string of classes. We want to use the value passed in from + // _setOption(), this is the new value of the classes option which was passed to + // _setOption(). We pass this value directly to _classes(). + elements.addClass( this._classes( { + element: elements, + keys: classKey, + classes: value, + add: true + } ) ); + } + }, + + _setOptionDisabled: function( value ) { + this._toggleClass( this.widget(), this.widgetFullName + "-disabled", null, !!value ); + + // If the widget is becoming disabled, then nothing is interactive + if ( value ) { + this._removeClass( this.hoverable, null, "ui-state-hover" ); + this._removeClass( this.focusable, null, "ui-state-focus" ); + } + }, + + enable: function() { + return this._setOptions( { disabled: false } ); + }, + + disable: function() { + return this._setOptions( { disabled: true } ); + }, + + _classes: function( options ) { + var full = []; + var that = this; + + options = $.extend( { + element: this.element, + classes: this.options.classes || {} + }, options ); + + function processClassString( classes, checkOption ) { + var current, i; + for ( i = 0; i < classes.length; i++ ) { + current = that.classesElementLookup[ classes[ i ] ] || $(); + if ( options.add ) { + current = $( $.unique( current.get().concat( options.element.get() ) ) ); + } else { + current = $( current.not( options.element ).get() ); + } + that.classesElementLookup[ classes[ i ] ] = current; + full.push( classes[ i ] ); + if ( checkOption && options.classes[ classes[ i ] ] ) { + full.push( options.classes[ classes[ i ] ] ); + } + } + } + + this._on( options.element, { + "remove": "_untrackClassesElement" + } ); + + if ( options.keys ) { + processClassString( options.keys.match( /\S+/g ) || [], true ); + } + if ( options.extra ) { + processClassString( options.extra.match( /\S+/g ) || [] ); + } + + return full.join( " " ); + }, + + _untrackClassesElement: function( event ) { + var that = this; + $.each( that.classesElementLookup, function( key, value ) { + if ( $.inArray( event.target, value ) !== -1 ) { + that.classesElementLookup[ key ] = $( value.not( event.target ).get() ); + } + } ); + }, + + _removeClass: function( element, keys, extra ) { + return this._toggleClass( element, keys, extra, false ); + }, + + _addClass: function( element, keys, extra ) { + return this._toggleClass( element, keys, extra, true ); + }, + + _toggleClass: function( element, keys, extra, add ) { + add = ( typeof add === "boolean" ) ? add : extra; + var shift = ( typeof element === "string" || element === null ), + options = { + extra: shift ? keys : extra, + keys: shift ? element : keys, + element: shift ? this.element : element, + add: add + }; + options.element.toggleClass( this._classes( options ), add ); + return this; + }, + + _on: function( suppressDisabledCheck, element, handlers ) { + var delegateElement; + var instance = this; + + // No suppressDisabledCheck flag, shuffle arguments + if ( typeof suppressDisabledCheck !== "boolean" ) { + handlers = element; + element = suppressDisabledCheck; + suppressDisabledCheck = false; + } + + // No element argument, shuffle and use this.element + if ( !handlers ) { + handlers = element; + element = this.element; + delegateElement = this.widget(); + } else { + element = delegateElement = $( element ); + this.bindings = this.bindings.add( element ); + } + + $.each( handlers, function( event, handler ) { + function handlerProxy() { + + // Allow widgets to customize the disabled handling + // - disabled as an array instead of boolean + // - disabled class as method for disabling individual parts + if ( !suppressDisabledCheck && + ( instance.options.disabled === true || + $( this ).hasClass( "ui-state-disabled" ) ) ) { + return; + } + return ( typeof handler === "string" ? instance[ handler ] : handler ) + .apply( instance, arguments ); + } + + // Copy the guid so direct unbinding works + if ( typeof handler !== "string" ) { + handlerProxy.guid = handler.guid = + handler.guid || handlerProxy.guid || $.guid++; + } + + var match = event.match( /^([\w:-]*)\s*(.*)$/ ); + var eventName = match[ 1 ] + instance.eventNamespace; + var selector = match[ 2 ]; + + if ( selector ) { + delegateElement.on( eventName, selector, handlerProxy ); + } else { + element.on( eventName, handlerProxy ); + } + } ); + }, + + _off: function( element, eventName ) { + eventName = ( eventName || "" ).split( " " ).join( this.eventNamespace + " " ) + + this.eventNamespace; + element.off( eventName ).off( eventName ); + + // Clear the stack to avoid memory leaks (#10056) + this.bindings = $( this.bindings.not( element ).get() ); + this.focusable = $( this.focusable.not( element ).get() ); + this.hoverable = $( this.hoverable.not( element ).get() ); + }, + + _delay: function( handler, delay ) { + function handlerProxy() { + return ( typeof handler === "string" ? instance[ handler ] : handler ) + .apply( instance, arguments ); + } + var instance = this; + return setTimeout( handlerProxy, delay || 0 ); + }, + + _hoverable: function( element ) { + this.hoverable = this.hoverable.add( element ); + this._on( element, { + mouseenter: function( event ) { + this._addClass( $( event.currentTarget ), null, "ui-state-hover" ); + }, + mouseleave: function( event ) { + this._removeClass( $( event.currentTarget ), null, "ui-state-hover" ); + } + } ); + }, + + _focusable: function( element ) { + this.focusable = this.focusable.add( element ); + this._on( element, { + focusin: function( event ) { + this._addClass( $( event.currentTarget ), null, "ui-state-focus" ); + }, + focusout: function( event ) { + this._removeClass( $( event.currentTarget ), null, "ui-state-focus" ); + } + } ); + }, + + _trigger: function( type, event, data ) { + var prop, orig; + var callback = this.options[ type ]; + + data = data || {}; + event = $.Event( event ); + event.type = ( type === this.widgetEventPrefix ? + type : + this.widgetEventPrefix + type ).toLowerCase(); + + // The original event may come from any element + // so we need to reset the target on the new event + event.target = this.element[ 0 ]; + + // Copy original event properties over to the new event + orig = event.originalEvent; + if ( orig ) { + for ( prop in orig ) { + if ( !( prop in event ) ) { + event[ prop ] = orig[ prop ]; + } + } + } + + this.element.trigger( event, data ); + return !( $.isFunction( callback ) && + callback.apply( this.element[ 0 ], [ event ].concat( data ) ) === false || + event.isDefaultPrevented() ); + } + }; + + $.each( { show: "fadeIn", hide: "fadeOut" }, function( method, defaultEffect ) { + $.Widget.prototype[ "_" + method ] = function( element, options, callback ) { + if ( typeof options === "string" ) { + options = { effect: options }; + } + + var hasOptions; + var effectName = !options ? + method : + options === true || typeof options === "number" ? + defaultEffect : + options.effect || defaultEffect; + + options = options || {}; + if ( typeof options === "number" ) { + options = { duration: options }; + } + + hasOptions = !$.isEmptyObject( options ); + options.complete = callback; + + if ( options.delay ) { + element.delay( options.delay ); + } + + if ( hasOptions && $.effects && $.effects.effect[ effectName ] ) { + element[ method ]( options ); + } else if ( effectName !== method && element[ effectName ] ) { + element[ effectName ]( options.duration, options.easing, callback ); + } else { + element.queue( function( next ) { + $( this )[ method ](); + if ( callback ) { + callback.call( element[ 0 ] ); + } + next(); + } ); + } + }; + } ); + + var widget = $.widget; -var widget = $.widget; diff --git a/lib/cloudinary/static/js/load-image.all.min.js b/lib/cloudinary/static/js/load-image.all.min.js index acd7113e..9111d131 100644 --- a/lib/cloudinary/static/js/load-image.all.min.js +++ b/lib/cloudinary/static/js/load-image.all.min.js @@ -1,2 +1,2 @@ -!function(e){"use strict";function t(e,i,a){var o,n=document.createElement("img");return n.onerror=function(o){return t.onerror(n,o,e,i,a)},n.onload=function(o){return t.onload(n,o,e,i,a)},"string"==typeof e?(t.fetchBlob(e,function(i){i?(e=i,o=t.createObjectURL(e)):(o=e,a&&a.crossOrigin&&(n.crossOrigin=a.crossOrigin)),n.src=o},a),n):t.isInstanceOf("Blob",e)||t.isInstanceOf("File",e)?(o=n._objectURL=t.createObjectURL(e))?(n.src=o,n):t.readFile(e,function(e){var t=e.target;t&&t.result?n.src=t.result:i&&i(e)}):void 0}function i(e,i){!e._objectURL||i&&i.noRevoke||(t.revokeObjectURL(e._objectURL),delete e._objectURL)}var a=e.createObjectURL&&e||e.URL&&URL.revokeObjectURL&&URL||e.webkitURL&&webkitURL;t.fetchBlob=function(e,t,i){t()},t.isInstanceOf=function(e,t){return Object.prototype.toString.call(t)==="[object "+e+"]"},t.transform=function(e,t,i,a,o){i(e,o)},t.onerror=function(e,t,a,o,n){i(e,n),o&&o.call(e,t)},t.onload=function(e,a,o,n,r){i(e,r),n&&t.transform(e,r,n,o,{})},t.createObjectURL=function(e){return!!a&&a.createObjectURL(e)},t.revokeObjectURL=function(e){return!!a&&a.revokeObjectURL(e)},t.readFile=function(t,i,a){if(e.FileReader){var o=new FileReader;if(o.onload=o.onerror=i,a=a||"readAsDataURL",o[a])return o[a](t),o}return!1},"function"==typeof define&&define.amd?define(function(){return t}):"object"==typeof module&&module.exports?module.exports=t:e.loadImage=t}("undefined"!=typeof window&&window||this),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image"],e):e("object"==typeof module&&module.exports?require("./load-image"):window.loadImage)}(function(e){"use strict";var t=e.transform;e.transform=function(i,a,o,n,r){t.call(e,e.scale(i,a,r),a,o,n,r)},e.transformCoordinates=function(){},e.getTransformedOptions=function(e,t){var i,a,o,n,r=t.aspectRatio;if(!r)return t;i={};for(a in t)t.hasOwnProperty(a)&&(i[a]=t[a]);return i.crop=!0,o=e.naturalWidth||e.width,n=e.naturalHeight||e.height,o/n>r?(i.maxWidth=n*r,i.maxHeight=n):(i.maxWidth=o,i.maxHeight=o/r),i},e.renderImageToCanvas=function(e,t,i,a,o,n,r,s,l,d){return e.getContext("2d").drawImage(t,i,a,o,n,r,s,l,d),e},e.hasCanvasOption=function(e){return e.canvas||e.crop||!!e.aspectRatio},e.scale=function(t,i,a){function o(){var e=Math.max((l||v)/v,(d||P)/P);e>1&&(v*=e,P*=e)}function n(){var e=Math.min((r||v)/v,(s||P)/P);e<1&&(v*=e,P*=e)}i=i||{};var r,s,l,d,c,u,f,g,h,m,p,S=document.createElement("canvas"),b=t.getContext||e.hasCanvasOption(i)&&S.getContext,y=t.naturalWidth||t.width,x=t.naturalHeight||t.height,v=y,P=x;if(b&&(f=(i=e.getTransformedOptions(t,i,a)).left||0,g=i.top||0,i.sourceWidth?(c=i.sourceWidth,void 0!==i.right&&void 0===i.left&&(f=y-c-i.right)):c=y-f-(i.right||0),i.sourceHeight?(u=i.sourceHeight,void 0!==i.bottom&&void 0===i.top&&(g=x-u-i.bottom)):u=x-g-(i.bottom||0),v=c,P=u),r=i.maxWidth,s=i.maxHeight,l=i.minWidth,d=i.minHeight,b&&r&&s&&i.crop?(v=r,P=s,(p=c/u-r/s)<0?(u=s*c/r,void 0===i.top&&void 0===i.bottom&&(g=(x-u)/2)):p>0&&(c=r*u/s,void 0===i.left&&void 0===i.right&&(f=(y-c)/2))):((i.contain||i.cover)&&(l=r=r||l,d=s=s||d),i.cover?(n(),o()):(o(),n())),b){if((h=i.pixelRatio)>1&&(S.style.width=v+"px",S.style.height=P+"px",v*=h,P*=h,S.getContext("2d").scale(h,h)),(m=i.downsamplingRatio)>0&&m<1&&vv;)S.width=c*m,S.height=u*m,e.renderImageToCanvas(S,t,f,g,c,u,0,0,S.width,S.height),f=0,g=0,c=S.width,u=S.height,(t=document.createElement("canvas")).width=c,t.height=u,e.renderImageToCanvas(t,S,0,0,c,u,0,0,c,u);return S.width=v,S.height=P,e.transformCoordinates(S,i),e.renderImageToCanvas(S,t,f,g,c,u,0,0,v,P)}return t.width=v,t.height=P,t}}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image"],e):e("object"==typeof module&&module.exports?require("./load-image"):window.loadImage)}(function(e){"use strict";var t="undefined"!=typeof Blob&&(Blob.prototype.slice||Blob.prototype.webkitSlice||Blob.prototype.mozSlice);e.blobSlice=t&&function(){return(this.slice||this.webkitSlice||this.mozSlice).apply(this,arguments)},e.metaDataParsers={jpeg:{65505:[]}},e.parseMetaData=function(t,i,a,o){a=a||{},o=o||{};var n=this,r=a.maxMetaDataSize||262144;!!("undefined"!=typeof DataView&&t&&t.size>=12&&"image/jpeg"===t.type&&e.blobSlice)&&e.readFile(e.blobSlice.call(t,0,r),function(t){if(t.target.error)return console.log(t.target.error),void i(o);var r,s,l,d,c=t.target.result,u=new DataView(c),f=2,g=u.byteLength-4,h=f;if(65496===u.getUint16(0)){for(;f=65504&&r<=65519||65534===r);){if(s=u.getUint16(f+2)+2,f+s>u.byteLength){console.log("Invalid meta data: Invalid segment size.");break}if(l=e.metaDataParsers.jpeg[r])for(d=0;d6&&(c.slice?o.imageHead=c.slice(0,h):o.imageHead=new Uint8Array(c).subarray(0,h))}else console.log("Invalid JPEG file: Missing JPEG marker.");i(o)},"readAsArrayBuffer")||i(o)},e.hasMetaOption=function(e){return e&&e.meta};var i=e.transform;e.transform=function(t,a,o,n,r){e.hasMetaOption(a)?e.parseMetaData(n,function(r){i.call(e,t,a,o,n,r)},a,r):i.apply(e,arguments)}}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image","./load-image-meta"],e):"object"==typeof module&&module.exports?e(require("./load-image"),require("./load-image-meta")):e(window.loadImage)}(function(e){"use strict";"undefined"!=typeof fetch&&"undefined"!=typeof Request&&(e.fetchBlob=function(t,i,a){if(e.hasMetaOption(a))return fetch(new Request(t,a)).then(function(e){return e.blob()}).then(i).catch(function(e){console.log(e),i()});i()})}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image","./load-image-meta"],e):"object"==typeof module&&module.exports?e(require("./load-image"),require("./load-image-meta")):e(window.loadImage)}(function(e){"use strict";e.ExifMap=function(){return this},e.ExifMap.prototype.map={Orientation:274},e.ExifMap.prototype.get=function(e){return this[e]||this[this.map[e]]},e.getExifThumbnail=function(e,t,i){var a,o,n;{if(i&&!(t+i>e.byteLength)){for(a=[],o=0;o4?i+t.getUint32(a+8,r):a+8)+s>t.byteLength)){if(1===n)return g.getValue(t,l,r);for(d=[],c=0;ce.byteLength)console.log("Invalid Exif data: Invalid directory offset.");else{if(n=e.getUint16(i,a),!((r=i+2+12*n)+4>e.byteLength)){for(s=0;st.byteLength)console.log("Invalid Exif data: Invalid segment size.");else if(0===t.getUint16(i+8)){switch(t.getUint16(d)){case 18761:r=!0;break;case 19789:r=!1;break;default:return void console.log("Invalid Exif data: Invalid byte alignment marker.")}42===t.getUint16(d+2,r)?(s=t.getUint32(d+4,r),o.exif=new e.ExifMap,(s=e.parseExifTags(t,d,d+s,r,o))&&!n.disableExifThumbnail&&(l={exif:{}},s=e.parseExifTags(t,d,d+s,r,l),l.exif[513]&&(o.exif.Thumbnail=e.getExifThumbnail(t,d+l.exif[513],l.exif[514]))),o.exif[34665]&&!n.disableExifSub&&e.parseExifTags(t,d,d+o.exif[34665],r,o),o.exif[34853]&&!n.disableExifGps&&e.parseExifTags(t,d,d+o.exif[34853],r,o)):console.log("Invalid Exif data: Missing TIFF marker.")}else console.log("Invalid Exif data: Missing byte alignment offset.")}},e.metaDataParsers.jpeg[65505].push(e.parseExifData)}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image","./load-image-exif"],e):"object"==typeof module&&module.exports?e(require("./load-image"),require("./load-image-exif")):e(window.loadImage)}(function(e){"use strict";e.ExifMap.prototype.tags={256:"ImageWidth",257:"ImageHeight",34665:"ExifIFDPointer",34853:"GPSInfoIFDPointer",40965:"InteroperabilityIFDPointer",258:"BitsPerSample",259:"Compression",262:"PhotometricInterpretation",274:"Orientation",277:"SamplesPerPixel",284:"PlanarConfiguration",530:"YCbCrSubSampling",531:"YCbCrPositioning",282:"XResolution",283:"YResolution",296:"ResolutionUnit",273:"StripOffsets",278:"RowsPerStrip",279:"StripByteCounts",513:"JPEGInterchangeFormat",514:"JPEGInterchangeFormatLength",301:"TransferFunction",318:"WhitePoint",319:"PrimaryChromaticities",529:"YCbCrCoefficients",532:"ReferenceBlackWhite",306:"DateTime",270:"ImageDescription",271:"Make",272:"Model",305:"Software",315:"Artist",33432:"Copyright",36864:"ExifVersion",40960:"FlashpixVersion",40961:"ColorSpace",40962:"PixelXDimension",40963:"PixelYDimension",42240:"Gamma",37121:"ComponentsConfiguration",37122:"CompressedBitsPerPixel",37500:"MakerNote",37510:"UserComment",40964:"RelatedSoundFile",36867:"DateTimeOriginal",36868:"DateTimeDigitized",37520:"SubSecTime",37521:"SubSecTimeOriginal",37522:"SubSecTimeDigitized",33434:"ExposureTime",33437:"FNumber",34850:"ExposureProgram",34852:"SpectralSensitivity",34855:"PhotographicSensitivity",34856:"OECF",34864:"SensitivityType",34865:"StandardOutputSensitivity",34866:"RecommendedExposureIndex",34867:"ISOSpeed",34868:"ISOSpeedLatitudeyyy",34869:"ISOSpeedLatitudezzz",37377:"ShutterSpeedValue",37378:"ApertureValue",37379:"BrightnessValue",37380:"ExposureBias",37381:"MaxApertureValue",37382:"SubjectDistance",37383:"MeteringMode",37384:"LightSource",37385:"Flash",37396:"SubjectArea",37386:"FocalLength",41483:"FlashEnergy",41484:"SpatialFrequencyResponse",41486:"FocalPlaneXResolution",41487:"FocalPlaneYResolution",41488:"FocalPlaneResolutionUnit",41492:"SubjectLocation",41493:"ExposureIndex",41495:"SensingMethod",41728:"FileSource",41729:"SceneType",41730:"CFAPattern",41985:"CustomRendered",41986:"ExposureMode",41987:"WhiteBalance",41988:"DigitalZoomRatio",41989:"FocalLengthIn35mmFilm",41990:"SceneCaptureType",41991:"GainControl",41992:"Contrast",41993:"Saturation",41994:"Sharpness",41995:"DeviceSettingDescription",41996:"SubjectDistanceRange",42016:"ImageUniqueID",42032:"CameraOwnerName",42033:"BodySerialNumber",42034:"LensSpecification",42035:"LensMake",42036:"LensModel",42037:"LensSerialNumber",0:"GPSVersionID",1:"GPSLatitudeRef",2:"GPSLatitude",3:"GPSLongitudeRef",4:"GPSLongitude",5:"GPSAltitudeRef",6:"GPSAltitude",7:"GPSTimeStamp",8:"GPSSatellites",9:"GPSStatus",10:"GPSMeasureMode",11:"GPSDOP",12:"GPSSpeedRef",13:"GPSSpeed",14:"GPSTrackRef",15:"GPSTrack",16:"GPSImgDirectionRef",17:"GPSImgDirection",18:"GPSMapDatum",19:"GPSDestLatitudeRef",20:"GPSDestLatitude",21:"GPSDestLongitudeRef",22:"GPSDestLongitude",23:"GPSDestBearingRef",24:"GPSDestBearing",25:"GPSDestDistanceRef",26:"GPSDestDistance",27:"GPSProcessingMethod",28:"GPSAreaInformation",29:"GPSDateStamp",30:"GPSDifferential",31:"GPSHPositioningError"},e.ExifMap.prototype.stringValues={ExposureProgram:{0:"Undefined",1:"Manual",2:"Normal program",3:"Aperture priority",4:"Shutter priority",5:"Creative program",6:"Action program",7:"Portrait mode",8:"Landscape mode"},MeteringMode:{0:"Unknown",1:"Average",2:"CenterWeightedAverage",3:"Spot",4:"MultiSpot",5:"Pattern",6:"Partial",255:"Other"},LightSource:{0:"Unknown",1:"Daylight",2:"Fluorescent",3:"Tungsten (incandescent light)",4:"Flash",9:"Fine weather",10:"Cloudy weather",11:"Shade",12:"Daylight fluorescent (D 5700 - 7100K)",13:"Day white fluorescent (N 4600 - 5400K)",14:"Cool white fluorescent (W 3900 - 4500K)",15:"White fluorescent (WW 3200 - 3700K)",17:"Standard light A",18:"Standard light B",19:"Standard light C",20:"D55",21:"D65",22:"D75",23:"D50",24:"ISO studio tungsten",255:"Other"},Flash:{0:"Flash did not fire",1:"Flash fired",5:"Strobe return light not detected",7:"Strobe return light detected",9:"Flash fired, compulsory flash mode",13:"Flash fired, compulsory flash mode, return light not detected",15:"Flash fired, compulsory flash mode, return light detected",16:"Flash did not fire, compulsory flash mode",24:"Flash did not fire, auto mode",25:"Flash fired, auto mode",29:"Flash fired, auto mode, return light not detected",31:"Flash fired, auto mode, return light detected",32:"No flash function",65:"Flash fired, red-eye reduction mode",69:"Flash fired, red-eye reduction mode, return light not detected",71:"Flash fired, red-eye reduction mode, return light detected",73:"Flash fired, compulsory flash mode, red-eye reduction mode",77:"Flash fired, compulsory flash mode, red-eye reduction mode, return light not detected",79:"Flash fired, compulsory flash mode, red-eye reduction mode, return light detected",89:"Flash fired, auto mode, red-eye reduction mode",93:"Flash fired, auto mode, return light not detected, red-eye reduction mode",95:"Flash fired, auto mode, return light detected, red-eye reduction mode"},SensingMethod:{1:"Undefined",2:"One-chip color area sensor",3:"Two-chip color area sensor",4:"Three-chip color area sensor",5:"Color sequential area sensor",7:"Trilinear sensor",8:"Color sequential linear sensor"},SceneCaptureType:{0:"Standard",1:"Landscape",2:"Portrait",3:"Night scene"},SceneType:{1:"Directly photographed"},CustomRendered:{0:"Normal process",1:"Custom process"},WhiteBalance:{0:"Auto white balance",1:"Manual white balance"},GainControl:{0:"None",1:"Low gain up",2:"High gain up",3:"Low gain down",4:"High gain down"},Contrast:{0:"Normal",1:"Soft",2:"Hard"},Saturation:{0:"Normal",1:"Low saturation",2:"High saturation"},Sharpness:{0:"Normal",1:"Soft",2:"Hard"},SubjectDistanceRange:{0:"Unknown",1:"Macro",2:"Close view",3:"Distant view"},FileSource:{3:"DSC"},ComponentsConfiguration:{0:"",1:"Y",2:"Cb",3:"Cr",4:"R",5:"G",6:"B"},Orientation:{1:"top-left",2:"top-right",3:"bottom-right",4:"bottom-left",5:"left-top",6:"right-top",7:"right-bottom",8:"left-bottom"}},e.ExifMap.prototype.getText=function(e){var t=this.get(e);switch(e){case"LightSource":case"Flash":case"MeteringMode":case"ExposureProgram":case"SensingMethod":case"SceneCaptureType":case"SceneType":case"CustomRendered":case"WhiteBalance":case"GainControl":case"Contrast":case"Saturation":case"Sharpness":case"SubjectDistanceRange":case"FileSource":case"Orientation":return this.stringValues[e][t];case"ExifVersion":case"FlashpixVersion":if(!t)return;return String.fromCharCode(t[0],t[1],t[2],t[3]);case"ComponentsConfiguration":if(!t)return;return this.stringValues[e][t[0]]+this.stringValues[e][t[1]]+this.stringValues[e][t[2]]+this.stringValues[e][t[3]];case"GPSVersionID":if(!t)return;return t[0]+"."+t[1]+"."+t[2]+"."+t[3]}return String(t)},function(e){var t,i=e.tags,a=e.map;for(t in i)i.hasOwnProperty(t)&&(a[i[t]]=t)}(e.ExifMap.prototype),e.ExifMap.prototype.getAll=function(){var e,t,i={};for(e in this)this.hasOwnProperty(e)&&(t=this.tags[e])&&(i[t]=this.getText(t));return i}}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image","./load-image-scale","./load-image-meta"],e):"object"==typeof module&&module.exports?e(require("./load-image"),require("./load-image-scale"),require("./load-image-meta")):e(window.loadImage)}(function(e){"use strict";var t=e.hasCanvasOption,i=e.hasMetaOption,a=e.transformCoordinates,o=e.getTransformedOptions;e.hasCanvasOption=function(i){return!!i.orientation||t.call(e,i)},e.hasMetaOption=function(t){return t&&!0===t.orientation||i.call(e,t)},e.transformCoordinates=function(t,i){a.call(e,t,i);var o=t.getContext("2d"),n=t.width,r=t.height,s=t.style.width,l=t.style.height,d=i.orientation;if(d&&!(d>8))switch(d>4&&(t.width=r,t.height=n,t.style.width=l,t.style.height=s),d){case 2:o.translate(n,0),o.scale(-1,1);break;case 3:o.translate(n,r),o.rotate(Math.PI);break;case 4:o.translate(0,r),o.scale(1,-1);break;case 5:o.rotate(.5*Math.PI),o.scale(1,-1);break;case 6:o.rotate(.5*Math.PI),o.translate(0,-r);break;case 7:o.rotate(.5*Math.PI),o.translate(n,-r),o.scale(-1,1);break;case 8:o.rotate(-.5*Math.PI),o.translate(-n,0)}},e.getTransformedOptions=function(t,i,a){var n,r,s=o.call(e,t,i),l=s.orientation;if(!0===l&&a&&a.exif&&(l=a.exif.get("Orientation")),!l||l>8||1===l)return s;n={};for(r in s)s.hasOwnProperty(r)&&(n[r]=s[r]);switch(n.orientation=l,l){case 2:n.left=s.right,n.right=s.left;break;case 3:n.left=s.right,n.top=s.bottom,n.right=s.left,n.bottom=s.top;break;case 4:n.top=s.bottom,n.bottom=s.top;break;case 5:n.left=s.top,n.top=s.left,n.right=s.bottom,n.bottom=s.right;break;case 6:n.left=s.top,n.top=s.right,n.right=s.bottom,n.bottom=s.left;break;case 7:n.left=s.bottom,n.top=s.right,n.right=s.top,n.bottom=s.left;break;case 8:n.left=s.bottom,n.top=s.left,n.right=s.top,n.bottom=s.right}return n.orientation>4&&(n.maxWidth=s.maxHeight,n.maxHeight=s.maxWidth,n.minWidth=s.minHeight,n.minHeight=s.minWidth,n.sourceWidth=s.sourceHeight,n.sourceHeight=s.sourceWidth),n}}); +!function(e){"use strict";function t(e,i,a){var o,n=document.createElement("img");return n.onerror=function(o){return t.onerror(n,o,e,i,a)},n.onload=function(o){return t.onload(n,o,e,i,a)},"string"==typeof e?(t.fetchBlob(e,function(i){i?(e=i,o=t.createObjectURL(e)):(o=e,a&&a.crossOrigin&&(n.crossOrigin=a.crossOrigin)),n.src=o},a),n):t.isInstanceOf("Blob",e)||t.isInstanceOf("File",e)?(o=n._objectURL=t.createObjectURL(e))?(n.src=o,n):t.readFile(e,function(e){var t=e.target;t&&t.result?n.src=t.result:i&&i(e)}):void 0}function i(e,i){!e._objectURL||i&&i.noRevoke||(t.revokeObjectURL(e._objectURL),delete e._objectURL)}var a=e.createObjectURL&&e||e.URL&&URL.revokeObjectURL&&URL||e.webkitURL&&webkitURL;t.fetchBlob=function(e,t,i){t()},t.isInstanceOf=function(e,t){return Object.prototype.toString.call(t)==="[object "+e+"]"},t.transform=function(e,t,i,a,o){i(e,o)},t.onerror=function(e,t,a,o,n){i(e,n),o&&o.call(e,t)},t.onload=function(e,a,o,n,r){i(e,r),n&&t.transform(e,r,n,o,{})},t.createObjectURL=function(e){return!!a&&a.createObjectURL(e)},t.revokeObjectURL=function(e){return!!a&&a.revokeObjectURL(e)},t.readFile=function(t,i,a){if(e.FileReader){var o=new FileReader;if(o.onload=o.onerror=i,a=a||"readAsDataURL",o[a])return o[a](t),o}return!1},"function"==typeof define&&define.amd?define(function(){return t}):"object"==typeof module&&module.exports?module.exports=t:e.loadImage=t}("undefined"!=typeof window&&window||this),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image"],e):e("object"==typeof module&&module.exports?require("./load-image"):window.loadImage)}(function(e){"use strict";var t=e.transform;e.transform=function(i,a,o,n,r){t.call(e,e.scale(i,a,r),a,o,n,r)},e.transformCoordinates=function(){},e.getTransformedOptions=function(e,t){var i,a,o,n,r=t.aspectRatio;if(!r)return t;i={};for(a in t)t.hasOwnProperty(a)&&(i[a]=t[a]);return i.crop=!0,o=e.naturalWidth||e.width,n=e.naturalHeight||e.height,o/n>r?(i.maxWidth=n*r,i.maxHeight=n):(i.maxWidth=o,i.maxHeight=o/r),i},e.renderImageToCanvas=function(e,t,i,a,o,n,r,s,l,c){return e.getContext("2d").drawImage(t,i,a,o,n,r,s,l,c),e},e.hasCanvasOption=function(e){return e.canvas||e.crop||!!e.aspectRatio},e.scale=function(t,i,a){function o(){var e=Math.max((l||I)/I,(c||v)/v);e>1&&(I*=e,v*=e)}function n(){var e=Math.min((r||I)/I,(s||v)/v);e<1&&(I*=e,v*=e)}i=i||{};var r,s,l,c,d,u,f,g,p,m,h,S=document.createElement("canvas"),b=t.getContext||e.hasCanvasOption(i)&&S.getContext,y=t.naturalWidth||t.width,x=t.naturalHeight||t.height,I=y,v=x;if(b&&(f=(i=e.getTransformedOptions(t,i,a)).left||0,g=i.top||0,i.sourceWidth?(d=i.sourceWidth,void 0!==i.right&&void 0===i.left&&(f=y-d-i.right)):d=y-f-(i.right||0),i.sourceHeight?(u=i.sourceHeight,void 0!==i.bottom&&void 0===i.top&&(g=x-u-i.bottom)):u=x-g-(i.bottom||0),I=d,v=u),r=i.maxWidth,s=i.maxHeight,l=i.minWidth,c=i.minHeight,b&&r&&s&&i.crop?(I=r,v=s,(h=d/u-r/s)<0?(u=s*d/r,void 0===i.top&&void 0===i.bottom&&(g=(x-u)/2)):h>0&&(d=r*u/s,void 0===i.left&&void 0===i.right&&(f=(y-d)/2))):((i.contain||i.cover)&&(l=r=r||l,c=s=s||c),i.cover?(n(),o()):(o(),n())),b){if((p=i.pixelRatio)>1&&(S.style.width=I+"px",S.style.height=v+"px",I*=p,v*=p,S.getContext("2d").scale(p,p)),(m=i.downsamplingRatio)>0&&m<1&&II;)S.width=d*m,S.height=u*m,e.renderImageToCanvas(S,t,f,g,d,u,0,0,S.width,S.height),f=0,g=0,d=S.width,u=S.height,(t=document.createElement("canvas")).width=d,t.height=u,e.renderImageToCanvas(t,S,0,0,d,u,0,0,d,u);return S.width=I,S.height=v,e.transformCoordinates(S,i),e.renderImageToCanvas(S,t,f,g,d,u,0,0,I,v)}return t.width=I,t.height=v,t}}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image"],e):e("object"==typeof module&&module.exports?require("./load-image"):window.loadImage)}(function(e){"use strict";var t="undefined"!=typeof Blob&&(Blob.prototype.slice||Blob.prototype.webkitSlice||Blob.prototype.mozSlice);e.blobSlice=t&&function(){return(this.slice||this.webkitSlice||this.mozSlice).apply(this,arguments)},e.metaDataParsers={jpeg:{65505:[],65517:[]}},e.parseMetaData=function(t,i,a,o){a=a||{},o=o||{};var n=this,r=a.maxMetaDataSize||262144;!!("undefined"!=typeof DataView&&t&&t.size>=12&&"image/jpeg"===t.type&&e.blobSlice)&&e.readFile(e.blobSlice.call(t,0,r),function(t){if(t.target.error)return console.log(t.target.error),void i(o);var r,s,l,c,d=t.target.result,u=new DataView(d),f=2,g=u.byteLength-4,p=f;if(65496===u.getUint16(0)){for(;f=65504&&r<=65519||65534===r);){if(s=u.getUint16(f+2)+2,f+s>u.byteLength){console.log("Invalid meta data: Invalid segment size.");break}if(l=e.metaDataParsers.jpeg[r])for(c=0;c6&&(d.slice?o.imageHead=d.slice(0,p):o.imageHead=new Uint8Array(d).subarray(0,p))}else console.log("Invalid JPEG file: Missing JPEG marker.");i(o)},"readAsArrayBuffer")||i(o)},e.hasMetaOption=function(e){return e&&e.meta};var i=e.transform;e.transform=function(t,a,o,n,r){e.hasMetaOption(a)?e.parseMetaData(n,function(r){i.call(e,t,a,o,n,r)},a,r):i.apply(e,arguments)}}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image","./load-image-meta"],e):"object"==typeof module&&module.exports?e(require("./load-image"),require("./load-image-meta")):e(window.loadImage)}(function(e){"use strict";"undefined"!=typeof fetch&&"undefined"!=typeof Request&&(e.fetchBlob=function(t,i,a){if(e.hasMetaOption(a))return fetch(new Request(t,a)).then(function(e){return e.blob()}).then(i).catch(function(e){console.log(e),i()});i()})}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image","./load-image-scale","./load-image-meta"],e):"object"==typeof module&&module.exports?e(require("./load-image"),require("./load-image-scale"),require("./load-image-meta")):e(window.loadImage)}(function(e){"use strict";var t=e.hasCanvasOption,i=e.hasMetaOption,a=e.transformCoordinates,o=e.getTransformedOptions;e.hasCanvasOption=function(i){return!!i.orientation||t.call(e,i)},e.hasMetaOption=function(t){return t&&!0===t.orientation||i.call(e,t)},e.transformCoordinates=function(t,i){a.call(e,t,i);var o=t.getContext("2d"),n=t.width,r=t.height,s=t.style.width,l=t.style.height,c=i.orientation;if(c&&!(c>8))switch(c>4&&(t.width=r,t.height=n,t.style.width=l,t.style.height=s),c){case 2:o.translate(n,0),o.scale(-1,1);break;case 3:o.translate(n,r),o.rotate(Math.PI);break;case 4:o.translate(0,r),o.scale(1,-1);break;case 5:o.rotate(.5*Math.PI),o.scale(1,-1);break;case 6:o.rotate(.5*Math.PI),o.translate(0,-r);break;case 7:o.rotate(.5*Math.PI),o.translate(n,-r),o.scale(-1,1);break;case 8:o.rotate(-.5*Math.PI),o.translate(-n,0)}},e.getTransformedOptions=function(t,i,a){var n,r,s=o.call(e,t,i),l=s.orientation;if(!0===l&&a&&a.exif&&(l=a.exif.get("Orientation")),!l||l>8||1===l)return s;n={};for(r in s)s.hasOwnProperty(r)&&(n[r]=s[r]);switch(n.orientation=l,l){case 2:n.left=s.right,n.right=s.left;break;case 3:n.left=s.right,n.top=s.bottom,n.right=s.left,n.bottom=s.top;break;case 4:n.top=s.bottom,n.bottom=s.top;break;case 5:n.left=s.top,n.top=s.left,n.right=s.bottom,n.bottom=s.right;break;case 6:n.left=s.top,n.top=s.right,n.right=s.bottom,n.bottom=s.left;break;case 7:n.left=s.bottom,n.top=s.right,n.right=s.top,n.bottom=s.left;break;case 8:n.left=s.bottom,n.top=s.left,n.right=s.top,n.bottom=s.right}return n.orientation>4&&(n.maxWidth=s.maxHeight,n.maxHeight=s.maxWidth,n.minWidth=s.minHeight,n.minHeight=s.minWidth,n.sourceWidth=s.sourceHeight,n.sourceHeight=s.sourceWidth),n}}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image","./load-image-meta"],e):"object"==typeof module&&module.exports?e(require("./load-image"),require("./load-image-meta")):e(window.loadImage)}(function(e){"use strict";e.ExifMap=function(){return this},e.ExifMap.prototype.map={Orientation:274},e.ExifMap.prototype.get=function(e){return this[e]||this[this.map[e]]},e.getExifThumbnail=function(t,i,a){if(a&&!(i+a>t.byteLength))return e.createObjectURL(new Blob([t.buffer.slice(i,i+a)]));console.log("Invalid Exif data: Invalid thumbnail data.")},e.exifTagTypes={1:{getValue:function(e,t){return e.getUint8(t)},size:1},2:{getValue:function(e,t){return String.fromCharCode(e.getUint8(t))},size:1,ascii:!0},3:{getValue:function(e,t,i){return e.getUint16(t,i)},size:2},4:{getValue:function(e,t,i){return e.getUint32(t,i)},size:4},5:{getValue:function(e,t,i){return e.getUint32(t,i)/e.getUint32(t+4,i)},size:8},9:{getValue:function(e,t,i){return e.getInt32(t,i)},size:4},10:{getValue:function(e,t,i){return e.getInt32(t,i)/e.getInt32(t+4,i)},size:8}},e.exifTagTypes[7]=e.exifTagTypes[1],e.getExifValue=function(t,i,a,o,n,r){var s,l,c,d,u,f,g=e.exifTagTypes[o];if(g){if(s=g.size*n,!((l=s>4?i+t.getUint32(a+8,r):a+8)+s>t.byteLength)){if(1===n)return g.getValue(t,l,r);for(c=[],d=0;de.byteLength)console.log("Invalid Exif data: Invalid directory offset.");else{if(n=e.getUint16(i,a),!((r=i+2+12*n)+4>e.byteLength)){for(s=0;st.byteLength)console.log("Invalid Exif data: Invalid segment size.");else if(0===t.getUint16(i+8)){switch(t.getUint16(c)){case 18761:r=!0;break;case 19789:r=!1;break;default:return void console.log("Invalid Exif data: Invalid byte alignment marker.")}42===t.getUint16(c+2,r)?(s=t.getUint32(c+4,r),o.exif=new e.ExifMap,(s=e.parseExifTags(t,c,c+s,r,o))&&!n.disableExifThumbnail&&(l={exif:{}},s=e.parseExifTags(t,c,c+s,r,l),l.exif[513]&&(o.exif.Thumbnail=e.getExifThumbnail(t,c+l.exif[513],l.exif[514]))),o.exif[34665]&&!n.disableExifSub&&e.parseExifTags(t,c,c+o.exif[34665],r,o),o.exif[34853]&&!n.disableExifGps&&e.parseExifTags(t,c,c+o.exif[34853],r,o)):console.log("Invalid Exif data: Missing TIFF marker.")}else console.log("Invalid Exif data: Missing byte alignment offset.")}},e.metaDataParsers.jpeg[65505].push(e.parseExifData)}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image","./load-image-exif"],e):"object"==typeof module&&module.exports?e(require("./load-image"),require("./load-image-exif")):e(window.loadImage)}(function(e){"use strict";e.ExifMap.prototype.tags={256:"ImageWidth",257:"ImageHeight",34665:"ExifIFDPointer",34853:"GPSInfoIFDPointer",40965:"InteroperabilityIFDPointer",258:"BitsPerSample",259:"Compression",262:"PhotometricInterpretation",274:"Orientation",277:"SamplesPerPixel",284:"PlanarConfiguration",530:"YCbCrSubSampling",531:"YCbCrPositioning",282:"XResolution",283:"YResolution",296:"ResolutionUnit",273:"StripOffsets",278:"RowsPerStrip",279:"StripByteCounts",513:"JPEGInterchangeFormat",514:"JPEGInterchangeFormatLength",301:"TransferFunction",318:"WhitePoint",319:"PrimaryChromaticities",529:"YCbCrCoefficients",532:"ReferenceBlackWhite",306:"DateTime",270:"ImageDescription",271:"Make",272:"Model",305:"Software",315:"Artist",33432:"Copyright",36864:"ExifVersion",40960:"FlashpixVersion",40961:"ColorSpace",40962:"PixelXDimension",40963:"PixelYDimension",42240:"Gamma",37121:"ComponentsConfiguration",37122:"CompressedBitsPerPixel",37500:"MakerNote",37510:"UserComment",40964:"RelatedSoundFile",36867:"DateTimeOriginal",36868:"DateTimeDigitized",37520:"SubSecTime",37521:"SubSecTimeOriginal",37522:"SubSecTimeDigitized",33434:"ExposureTime",33437:"FNumber",34850:"ExposureProgram",34852:"SpectralSensitivity",34855:"PhotographicSensitivity",34856:"OECF",34864:"SensitivityType",34865:"StandardOutputSensitivity",34866:"RecommendedExposureIndex",34867:"ISOSpeed",34868:"ISOSpeedLatitudeyyy",34869:"ISOSpeedLatitudezzz",37377:"ShutterSpeedValue",37378:"ApertureValue",37379:"BrightnessValue",37380:"ExposureBias",37381:"MaxApertureValue",37382:"SubjectDistance",37383:"MeteringMode",37384:"LightSource",37385:"Flash",37396:"SubjectArea",37386:"FocalLength",41483:"FlashEnergy",41484:"SpatialFrequencyResponse",41486:"FocalPlaneXResolution",41487:"FocalPlaneYResolution",41488:"FocalPlaneResolutionUnit",41492:"SubjectLocation",41493:"ExposureIndex",41495:"SensingMethod",41728:"FileSource",41729:"SceneType",41730:"CFAPattern",41985:"CustomRendered",41986:"ExposureMode",41987:"WhiteBalance",41988:"DigitalZoomRatio",41989:"FocalLengthIn35mmFilm",41990:"SceneCaptureType",41991:"GainControl",41992:"Contrast",41993:"Saturation",41994:"Sharpness",41995:"DeviceSettingDescription",41996:"SubjectDistanceRange",42016:"ImageUniqueID",42032:"CameraOwnerName",42033:"BodySerialNumber",42034:"LensSpecification",42035:"LensMake",42036:"LensModel",42037:"LensSerialNumber",0:"GPSVersionID",1:"GPSLatitudeRef",2:"GPSLatitude",3:"GPSLongitudeRef",4:"GPSLongitude",5:"GPSAltitudeRef",6:"GPSAltitude",7:"GPSTimeStamp",8:"GPSSatellites",9:"GPSStatus",10:"GPSMeasureMode",11:"GPSDOP",12:"GPSSpeedRef",13:"GPSSpeed",14:"GPSTrackRef",15:"GPSTrack",16:"GPSImgDirectionRef",17:"GPSImgDirection",18:"GPSMapDatum",19:"GPSDestLatitudeRef",20:"GPSDestLatitude",21:"GPSDestLongitudeRef",22:"GPSDestLongitude",23:"GPSDestBearingRef",24:"GPSDestBearing",25:"GPSDestDistanceRef",26:"GPSDestDistance",27:"GPSProcessingMethod",28:"GPSAreaInformation",29:"GPSDateStamp",30:"GPSDifferential",31:"GPSHPositioningError"},e.ExifMap.prototype.stringValues={ExposureProgram:{0:"Undefined",1:"Manual",2:"Normal program",3:"Aperture priority",4:"Shutter priority",5:"Creative program",6:"Action program",7:"Portrait mode",8:"Landscape mode"},MeteringMode:{0:"Unknown",1:"Average",2:"CenterWeightedAverage",3:"Spot",4:"MultiSpot",5:"Pattern",6:"Partial",255:"Other"},LightSource:{0:"Unknown",1:"Daylight",2:"Fluorescent",3:"Tungsten (incandescent light)",4:"Flash",9:"Fine weather",10:"Cloudy weather",11:"Shade",12:"Daylight fluorescent (D 5700 - 7100K)",13:"Day white fluorescent (N 4600 - 5400K)",14:"Cool white fluorescent (W 3900 - 4500K)",15:"White fluorescent (WW 3200 - 3700K)",17:"Standard light A",18:"Standard light B",19:"Standard light C",20:"D55",21:"D65",22:"D75",23:"D50",24:"ISO studio tungsten",255:"Other"},Flash:{0:"Flash did not fire",1:"Flash fired",5:"Strobe return light not detected",7:"Strobe return light detected",9:"Flash fired, compulsory flash mode",13:"Flash fired, compulsory flash mode, return light not detected",15:"Flash fired, compulsory flash mode, return light detected",16:"Flash did not fire, compulsory flash mode",24:"Flash did not fire, auto mode",25:"Flash fired, auto mode",29:"Flash fired, auto mode, return light not detected",31:"Flash fired, auto mode, return light detected",32:"No flash function",65:"Flash fired, red-eye reduction mode",69:"Flash fired, red-eye reduction mode, return light not detected",71:"Flash fired, red-eye reduction mode, return light detected",73:"Flash fired, compulsory flash mode, red-eye reduction mode",77:"Flash fired, compulsory flash mode, red-eye reduction mode, return light not detected",79:"Flash fired, compulsory flash mode, red-eye reduction mode, return light detected",89:"Flash fired, auto mode, red-eye reduction mode",93:"Flash fired, auto mode, return light not detected, red-eye reduction mode",95:"Flash fired, auto mode, return light detected, red-eye reduction mode"},SensingMethod:{1:"Undefined",2:"One-chip color area sensor",3:"Two-chip color area sensor",4:"Three-chip color area sensor",5:"Color sequential area sensor",7:"Trilinear sensor",8:"Color sequential linear sensor"},SceneCaptureType:{0:"Standard",1:"Landscape",2:"Portrait",3:"Night scene"},SceneType:{1:"Directly photographed"},CustomRendered:{0:"Normal process",1:"Custom process"},WhiteBalance:{0:"Auto white balance",1:"Manual white balance"},GainControl:{0:"None",1:"Low gain up",2:"High gain up",3:"Low gain down",4:"High gain down"},Contrast:{0:"Normal",1:"Soft",2:"Hard"},Saturation:{0:"Normal",1:"Low saturation",2:"High saturation"},Sharpness:{0:"Normal",1:"Soft",2:"Hard"},SubjectDistanceRange:{0:"Unknown",1:"Macro",2:"Close view",3:"Distant view"},FileSource:{3:"DSC"},ComponentsConfiguration:{0:"",1:"Y",2:"Cb",3:"Cr",4:"R",5:"G",6:"B"},Orientation:{1:"top-left",2:"top-right",3:"bottom-right",4:"bottom-left",5:"left-top",6:"right-top",7:"right-bottom",8:"left-bottom"}},e.ExifMap.prototype.getText=function(e){var t=this.get(e);switch(e){case"LightSource":case"Flash":case"MeteringMode":case"ExposureProgram":case"SensingMethod":case"SceneCaptureType":case"SceneType":case"CustomRendered":case"WhiteBalance":case"GainControl":case"Contrast":case"Saturation":case"Sharpness":case"SubjectDistanceRange":case"FileSource":case"Orientation":return this.stringValues[e][t];case"ExifVersion":case"FlashpixVersion":if(!t)return;return String.fromCharCode(t[0],t[1],t[2],t[3]);case"ComponentsConfiguration":if(!t)return;return this.stringValues[e][t[0]]+this.stringValues[e][t[1]]+this.stringValues[e][t[2]]+this.stringValues[e][t[3]];case"GPSVersionID":if(!t)return;return t[0]+"."+t[1]+"."+t[2]+"."+t[3]}return String(t)},function(e){var t,i=e.tags,a=e.map;for(t in i)i.hasOwnProperty(t)&&(a[i[t]]=t)}(e.ExifMap.prototype),e.ExifMap.prototype.getAll=function(){var e,t,i={};for(e in this)this.hasOwnProperty(e)&&(t=this.tags[e])&&(i[t]=this.getText(t));return i}}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image","./load-image-meta"],e):"object"==typeof module&&module.exports?e(require("./load-image"),require("./load-image-meta")):e(window.loadImage)}(function(e){"use strict";e.IptcMap=function(){return this},e.IptcMap.prototype.map={ObjectName:5},e.IptcMap.prototype.get=function(e){return this[e]||this[this.map[e]]},e.parseIptcTags=function(e,t,i,a){for(var o,n,r=t;rr){console.log("Invalid IPTC data: Invalid segment offset.");break}var c=t.getUint16(i+6+s);if(i+c>r){console.log("Invalid IPTC data: Invalid segment size.");break}return o.iptc=new e.IptcMap,e.parseIptcTags(t,l,c,o)}i++}console.log("No IPTC data at this offset - could be XMP")}},e.metaDataParsers.jpeg[65517].push(e.parseIptcData)}),function(e){"use strict";"function"==typeof define&&define.amd?define(["./load-image","./load-image-iptc"],e):"object"==typeof module&&module.exports?e(require("./load-image"),require("./load-image-iptc")):e(window.loadImage)}(function(e){"use strict";e.IptcMap.prototype.tags={3:"ObjectType",4:"ObjectAttribute",5:"ObjectName",7:"EditStatus",8:"EditorialUpdate",10:"Urgency",12:"SubjectRef",15:"Category",20:"SupplCategory",22:"FixtureID",25:"Keywords",26:"ContentLocCode",27:"ContentLocName",30:"ReleaseDate",35:"ReleaseTime",37:"ExpirationDate",38:"ExpirationTime",40:"SpecialInstructions",42:"ActionAdvised",45:"RefService",47:"RefDate",50:"RefNumber",55:"DateCreated",60:"TimeCreated",62:"DigitalCreationDate",63:"DigitalCreationTime",65:"OriginatingProgram",70:"ProgramVersion",75:"ObjectCycle",80:"Byline",85:"BylineTitle",90:"City",92:"Sublocation",95:"State",100:"CountryCode",101:"CountryName",103:"OrigTransRef",105:"Headline",110:"Credit",115:"Source",116:"CopyrightNotice",118:"Contact",120:"Caption",122:"WriterEditor",130:"ImageType",131:"ImageOrientation",135:"LanguageID"},e.IptcMap.prototype.getText=function(e){var t=this.get(e);return String(t)},function(e){var t,i=e.tags,a=e.map||{};for(t in i)i.hasOwnProperty(t)&&(a[i[t]]=t)}(e.IptcMap.prototype),e.IptcMap.prototype.getAll=function(){var e,t,i={};for(e in this)this.hasOwnProperty(e)&&(t=this.tags[e])&&(i[t]=this.getText(t));return i}}); //# sourceMappingURL=load-image.all.min.js.map \ No newline at end of file diff --git a/lib/cloudinary/templatetags/cloudinary.py b/lib/cloudinary/templatetags/cloudinary.py index febe1e2f..0a669053 100644 --- a/lib/cloudinary/templatetags/cloudinary.py +++ b/lib/cloudinary/templatetags/cloudinary.py @@ -2,15 +2,14 @@ from __future__ import absolute_import import json +import cloudinary +from cloudinary import CloudinaryResource, utils +from cloudinary.compat import PY3 +from cloudinary.forms import CloudinaryJsFileField, cl_init_js_callbacks from django import template from django.forms import Form from django.utils.safestring import mark_safe -import cloudinary -from cloudinary import CloudinaryResource, utils, uploader -from cloudinary.forms import CloudinaryJsFileField, cl_init_js_callbacks -from cloudinary.compat import PY3 - register = template.Library() @@ -57,9 +56,9 @@ def cloudinary_direct_upload_field(field_name="image", request=None): return value -"""Deprecated - please use cloudinary_direct_upload_field, or a proper form""" @register.inclusion_tag('cloudinary_direct_upload.html') def cloudinary_direct_upload(callback_url, **options): + """Deprecated - please use cloudinary_direct_upload_field, or a proper form""" params = utils.build_upload_params(callback=callback_url, **options) params = utils.sign_request(params, options) @@ -75,6 +74,8 @@ def cloudinary_includes(processing=False): CLOUDINARY_JS_CONFIG_PARAMS = ("api_key", "cloud_name", "private_cdn", "secure_distribution", "cdn_subdomain") + + @register.inclusion_tag('cloudinary_js_config.html') def cloudinary_js_config(): config = cloudinary.config() diff --git a/lib/cloudinary/uploader.py b/lib/cloudinary/uploader.py index 4230f393..a396645c 100644 --- a/lib/cloudinary/uploader.py +++ b/lib/cloudinary/uploader.py @@ -1,17 +1,17 @@ # Copyright Cloudinary import json -import re +import os import socket -from os.path import getsize + +import certifi +from six import string_types +from urllib3 import PoolManager, ProxyManager +from urllib3.exceptions import HTTPError import cloudinary -import urllib3 -import certifi from cloudinary import utils -from cloudinary.api import Error -from cloudinary.compat import string_types -from urllib3.exceptions import HTTPError -from urllib3 import PoolManager +from cloudinary.exceptions import Error +from cloudinary.cache.responsive_breakpoints_cache import instance as responsive_breakpoints_cache_instance try: from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox @@ -29,15 +29,21 @@ if is_appengine_sandbox(): _http = AppEngineManager() else: # PoolManager uses a socket-level API behind the scenes - _http = PoolManager( - cert_reqs='CERT_REQUIRED', - ca_certs=certifi.where() - ) + _http = utils.get_http_connector(cloudinary.config(), cloudinary.CERT_KWARGS) + +upload_options = [ + "filename", + "timeout", + "chunk_size", + "use_cache" +] + +UPLOAD_LARGE_CHUNK_SIZE = 20000000 def upload(file, **options): params = utils.build_upload_params(**options) - return call_api("upload", params, file=file, **options) + return call_cacheable_api("upload", params, file=file, **options) def unsigned_upload(file, upload_preset, **options): @@ -55,35 +61,56 @@ def upload_resource(file, **options): result = upload(file, **options) return cloudinary.CloudinaryResource( result["public_id"], version=str(result["version"]), - format=result.get("format"), type=result["type"], resource_type=result["resource_type"], metadata=result) + format=result.get("format"), type=result["type"], + resource_type=result["resource_type"], metadata=result) def upload_large(file, **options): """ Upload large files. """ - upload_id = utils.random_public_id() - with open(file, 'rb') as file_io: - results = None - current_loc = 0 - chunk_size = options.get("chunk_size", 20000000) - file_size = getsize(file) - chunk = file_io.read(chunk_size) - while chunk: - range = "bytes {0}-{1}/{2}".format(current_loc, current_loc + len(chunk) - 1, file_size) - current_loc += len(chunk) + if utils.is_remote_url(file): + return upload(file, **options) + + if hasattr(file, 'read') and callable(file.read): + file_io = file + else: + file_io = open(file, 'rb') + + upload_result = None + + with file_io: + upload_id = utils.random_public_id() + current_loc = 0 + chunk_size = options.get("chunk_size", UPLOAD_LARGE_CHUNK_SIZE) + file_size = utils.file_io_size(file_io) + + file_name = options.get( + "filename", + file_io.name if hasattr(file_io, 'name') and isinstance(file_io.name, str) else "stream") + + chunk = file_io.read(chunk_size) + + while chunk: + content_range = "bytes {0}-{1}/{2}".format(current_loc, current_loc + len(chunk) - 1, file_size) + current_loc += len(chunk) + http_headers = {"Content-Range": content_range, "X-Unique-Upload-Id": upload_id} + + upload_result = upload_large_part((file_name, chunk), http_headers=http_headers, **options) + + options["public_id"] = upload_result.get("public_id") - results = upload_large_part((file, chunk), - http_headers={"Content-Range": range, "X-Unique-Upload-Id": upload_id}, - **options) - options["public_id"] = results.get("public_id") chunk = file_io.read(chunk_size) - return results + + return upload_result def upload_large_part(file, **options): """ Upload large files. """ params = utils.build_upload_params(**options) - if 'resource_type' not in options: options['resource_type'] = "raw" - return call_api("upload", params, file=file, **options) + + if 'resource_type' not in options: + options['resource_type'] = "raw" + + return call_cacheable_api("upload", params, file=file, **options) def destroy(public_id, **options): @@ -91,7 +118,7 @@ def destroy(public_id, **options): "timestamp": utils.now(), "type": options.get("type"), "invalidate": options.get("invalidate"), - "public_id": public_id + "public_id": public_id } return call_api("destroy", params, **options) @@ -103,15 +130,43 @@ def rename(from_public_id, to_public_id, **options): "overwrite": options.get("overwrite"), "invalidate": options.get("invalidate"), "from_public_id": from_public_id, - "to_public_id": to_public_id + "to_public_id": to_public_id, + "to_type": options.get("to_type") } return call_api("rename", params, **options) +def update_metadata(metadata, public_ids, **options): + """ + Populates metadata fields with the given values. Existing values will be overwritten. + + Any metadata-value pairs given are merged with any existing metadata-value pairs + (an empty value for an existing metadata field clears the value) + + :param metadata: A list of custom metadata fields (by external_id) and the values to assign to each + of them. + :param public_ids: An array of Public IDs of assets uploaded to Cloudinary. + :param options: Options such as + *resource_type* (the type of file. Default: image. Valid values: image, raw, or video) and + *type* (The storage type. Default: upload. Valid values: upload, private, or authenticated.) + + :return: A list of public IDs that were updated + :rtype: mixed + """ + params = { + "timestamp": utils.now(), + "metadata": utils.encode_context(metadata), + "public_ids": utils.build_array(public_ids), + "type": options.get("type") + } + + return call_api("metadata", params, **options) + + def explicit(public_id, **options): params = utils.build_upload_params(**options) params["public_id"] = public_id - return call_api("explicit", params, **options) + return call_cacheable_api("explicit", params, **options) def create_archive(**options): @@ -131,7 +186,8 @@ def generate_sprite(tag, **options): "tag": tag, "async": options.get("async"), "notification_url": options.get("notification_url"), - "transformation": utils.generate_transformation_string(fetch_format=options.get("format"), **options)[0] + "transformation": utils.generate_transformation_string( + fetch_format=options.get("format"), **options)[0] } return call_api("sprite", params, **options) @@ -177,8 +233,10 @@ def replace_tag(tag, public_ids=None, **options): def remove_all_tags(public_ids, **options): """ Remove all tags from the specified public IDs. + :param public_ids: the public IDs of the resources to update :param options: additional options passed to the request + :return: dictionary with a list of public IDs that were updated """ return call_tags_api(None, "remove_all", public_ids, **options) @@ -187,9 +245,11 @@ def remove_all_tags(public_ids, **options): def add_context(context, public_ids, **options): """ Add a context keys and values. If a particular key already exists, the value associated with the key is updated. + :param context: dictionary of context :param public_ids: the public IDs of the resources to update :param options: additional options passed to the request + :return: dictionary with a list of public IDs that were updated """ return call_context_api(context, "add", public_ids, **options) @@ -198,8 +258,10 @@ def add_context(context, public_ids, **options): def remove_all_context(public_ids, **options): """ Remove all custom context from the specified public IDs. + :param public_ids: the public IDs of the resources to update :param options: additional options passed to the request + :return: dictionary with a list of public IDs that were updated """ return call_context_api(None, "remove_all", public_ids, **options) @@ -227,17 +289,18 @@ def call_context_api(context, command, public_ids=None, **options): return call_api("context", params, **options) -TEXT_PARAMS = ["public_id", - "font_family", - "font_size", - "font_color", - "text_align", - "font_weight", - "font_style", - "background", - "opacity", - "text_decoration" - ] +TEXT_PARAMS = [ + "public_id", + "font_family", + "font_size", + "font_color", + "text_align", + "font_weight", + "font_style", + "background", + "opacity", + "text_decoration" +] def text(text, **options): @@ -247,6 +310,42 @@ def text(text, **options): return call_api("text", params, **options) +def _save_responsive_breakpoints_to_cache(result): + """ + Saves responsive breakpoints parsed from upload result to cache + + :param result: Upload result + """ + if "responsive_breakpoints" not in result: + return + + if "public_id" not in result: + # We have some faulty result, nothing to cache + return + + options = dict((k, result[k]) for k in ["type", "resource_type"] if k in result) + + for transformation in result.get("responsive_breakpoints", []): + options["raw_transformation"] = transformation.get("transformation", "") + options["format"] = os.path.splitext(transformation["breakpoints"][0]["url"])[1][1:] + breakpoints = [bp["width"] for bp in transformation["breakpoints"]] + responsive_breakpoints_cache_instance.set(result["public_id"], breakpoints, **options) + + +def call_cacheable_api(action, params, http_headers=None, return_error=False, unsigned=False, file=None, timeout=None, + **options): + """ + Calls Upload API and saves results to cache (if enabled) + """ + + result = call_api(action, params, http_headers, return_error, unsigned, file, timeout, **options) + + if "use_cache" in options or cloudinary.config().use_cache: + _save_responsive_breakpoints_to_cache(result) + + return result + + def call_api(action, params, http_headers=None, return_error=False, unsigned=False, file=None, timeout=None, **options): if http_headers is None: http_headers = {} @@ -267,26 +366,27 @@ def call_api(action, params, http_headers=None, return_error=False, unsigned=Fal api_url = utils.cloudinary_api_url(action, **options) if file: + filename = options.get("filename") # Custom filename provided by user (relevant only for streams and files) + if isinstance(file, string_types): - if re.match(r'ftp:|https?:|s3:|data:[^;]*;base64,([a-zA-Z0-9\/+\n=]+)$', file): + if utils.is_remote_url(file): # URL name = None data = file else: # file path - name = file + name = filename or file with open(file, "rb") as opened: data = opened.read() elif hasattr(file, 'read') and callable(file.read): # stream data = file.read() - name = file.name if hasattr(file, 'name') and isinstance(file.name, str) else "stream" + name = filename or (file.name if hasattr(file, 'name') and isinstance(file.name, str) else "stream") elif isinstance(file, tuple): - name = None - data = file + name, data = file else: # Not a string, not a stream - name = "file" + name = filename or "file" data = file param_list["file"] = (name, data) if name else data @@ -310,16 +410,17 @@ def call_api(action, params, http_headers=None, return_error=False, unsigned=Fal result = json.loads(response.data.decode('utf-8')) except Exception as e: # Error is parsing json - raise Error("Error parsing server response (%d) - %s. Got - %s", response.status, response, e) + raise Error("Error parsing server response (%d) - %s. Got - %s" % (response.status, response.data, e)) if "error" in result: if response.status not in [200, 400, 401, 403, 404, 500]: code = response.status if return_error: - result["error"]["http_code"] = code + result["error"]["http_code"] = code else: raise Error(result["error"]["message"]) return result finally: - if file_io: file_io.close() + if file_io: + file_io.close() diff --git a/lib/cloudinary/utils.py b/lib/cloudinary/utils.py index 507bdad4..872a7588 100644 --- a/lib/cloudinary/utils.py +++ b/lib/cloudinary/utils.py @@ -3,15 +3,19 @@ import base64 import copy import hashlib import json +import os import random import re import string import struct import time +import urllib import zlib from collections import OrderedDict from datetime import datetime, date from fractions import Fraction +from numbers import Number +from urllib3 import ProxyManager, PoolManager import six.moves.urllib.parse from six import iteritems @@ -33,12 +37,101 @@ DEFAULT_RESPONSIVE_WIDTH_TRANSFORMATION = {"width": "auto", "crop": "limit"} RANGE_VALUE_RE = r'^(?P(\d+\.)?\d+)(?P[%pP])?$' RANGE_RE = r'^(\d+\.)?\d+[%pP]?\.\.(\d+\.)?\d+[%pP]?$' FLOAT_RE = r'^(\d+)\.(\d+)?$' +REMOTE_URL_RE = r'ftp:|https?:|s3:|gs:|data:([\w-]+\/[\w-]+)?(;[\w-]+=[\w-]+)*;base64,([a-zA-Z0-9\/+\n=]+)$' __LAYER_KEYWORD_PARAMS = [("font_weight", "normal"), ("font_style", "normal"), ("text_decoration", "none"), ("text_align", None), ("stroke", "none")] +# a list of keys used by the cloudinary_url function +__URL_KEYS = [ + 'api_secret', + 'auth_token', + 'cdn_subdomain', + 'cloud_name', + 'cname', + 'format', + 'private_cdn', + 'resource_type', + 'secure', + 'secure_cdn_subdomain', + 'secure_distribution', + 'shorten', + 'sign_url', + 'ssl_detected', + 'type', + 'url_suffix', + 'use_root_path', + 'version' +] + +__SIMPLE_UPLOAD_PARAMS = [ + "public_id", + "callback", + "format", + "type", + "backup", + "faces", + "image_metadata", + "exif", + "colors", + "use_filename", + "unique_filename", + "discard_original_filename", + "invalidate", + "notification_url", + "eager_notification_url", + "eager_async", + "proxy", + "folder", + "overwrite", + "moderation", + "raw_convert", + "quality_override", + "quality_analysis", + "ocr", + "categorization", + "detection", + "similarity_search", + "background_removal", + "upload_preset", + "phash", + "return_delete_token", + "auto_tagging", + "async", + "cinemagraph_analysis", +] + +__SERIALIZED_UPLOAD_PARAMS = [ + "timestamp", + "transformation", + "headers", + "eager", + "tags", + "allowed_formats", + "face_coordinates", + "custom_coordinates", + "context", + "auto_tagging", + "responsive_breakpoints", + "access_control", + "metadata", +] + +upload_params = __SIMPLE_UPLOAD_PARAMS + __SERIALIZED_UPLOAD_PARAMS + + +def compute_hex_hash(s): + """ + Compute hash and convert the result to HEX string + + :param s: string to process + + :return: HEX string + """ + return hashlib.sha1(to_bytes(s)).hexdigest() + def build_array(arg): if isinstance(arg, list): @@ -133,6 +226,22 @@ def json_encode(value): return json.dumps(value, default=__json_serializer, separators=(',', ':')) +def patch_fetch_format(options): + """ + When upload type is fetch, remove the format options. + In addition, set the fetch_format options to the format value unless it was already set. + Mutates the options parameter! + + :param options: URL and transformation options + """ + if options.get("type", "upload") != "fetch": + return + + resource_format = options.pop("format", None) + if "fetch_format" not in options: + options["fetch_format"] = resource_format + + def generate_transformation_string(**options): responsive_width = options.pop("responsive_width", cloudinary.config().responsive_width) size = options.pop("size", None) @@ -165,6 +274,7 @@ def generate_transformation_string(**options): return generate_transformation_string(**bs)[0] else: return generate_transformation_string(transformation=bs)[0] + base_transformations = list(map(recurse, base_transformations)) named_transformation = None else: @@ -186,11 +296,11 @@ def generate_transformation_string(**options): flags = ".".join(build_array(options.pop("flags", None))) dpr = options.pop("dpr", cloudinary.config().dpr) duration = norm_range_value(options.pop("duration", None)) - start_offset = norm_range_value(options.pop("start_offset", None)) + start_offset = norm_auto_range_value(options.pop("start_offset", None)) end_offset = norm_range_value(options.pop("end_offset", None)) offset = split_range(options.pop("offset", None)) if offset: - start_offset = norm_range_value(offset[0]) + start_offset = norm_auto_range_value(offset[0]) end_offset = norm_range_value(offset[1]) video_codec = process_video_codec_param(options.pop("video_codec", None)) @@ -202,6 +312,9 @@ def generate_transformation_string(**options): overlay = process_layer(options.pop("overlay", None), "overlay") underlay = process_layer(options.pop("underlay", None), "underlay") if_value = process_conditional(options.pop("if", None)) + custom_function = process_custom_function(options.pop("custom_function", None)) + custom_pre_function = process_custom_pre_function(options.pop("custom_pre_function", None)) + fps = process_fps(options.pop("fps", None)) params = { "a": normalize_expression(angle), @@ -215,19 +328,22 @@ def generate_transformation_string(**options): "e": normalize_expression(effect), "eo": normalize_expression(end_offset), "fl": flags, + "fn": custom_function or custom_pre_function, + "fps": fps, "h": normalize_expression(height), + "ki": process_ki(options.pop("keyframe_interval", None)), "l": overlay, - "o": normalize_expression(options.pop('opacity',None)), - "q": normalize_expression(options.pop('quality',None)), - "r": normalize_expression(options.pop('radius',None)), + "o": normalize_expression(options.pop('opacity', None)), + "q": normalize_expression(options.pop('quality', None)), + "r": process_radius(options.pop('radius', None)), "so": normalize_expression(start_offset), "t": named_transformation, "u": underlay, "w": normalize_expression(width), - "x": normalize_expression(options.pop('x',None)), - "y": normalize_expression(options.pop('y',None)), + "x": normalize_expression(options.pop('x', None)), + "y": normalize_expression(options.pop('y', None)), "vc": video_codec, - "z": normalize_expression(options.pop('zoom',None)) + "z": normalize_expression(options.pop('zoom', None)) } simple_params = { "ac": "audio_codec", @@ -239,7 +355,6 @@ def generate_transformation_string(**options): "dn": "density", "f": "fetch_format", "g": "gravity", - "ki": "keyframe_interval", "p": "prefix", "pg": "page", "sp": "streaming_profile", @@ -249,9 +364,9 @@ def generate_transformation_string(**options): for param, option in simple_params.items(): params[param] = options.pop(option, None) - variables = options.pop('variables',{}) + variables = options.pop('variables', {}) var_params = [] - for key,value in options.items(): + for key, value in options.items(): if re.match(r'^\$', key): var_params.append(u"{0}_{1}".format(key, normalize_expression(str(value)))) @@ -261,7 +376,6 @@ def generate_transformation_string(**options): for var in variables: var_params.append(u"{0}_{1}".format(var[0], normalize_expression(str(var[1])))) - variables = ','.join(var_params) sorted_params = sorted([param + "_" + str(value) for param, value in params.items() if (value or value == 0)]) @@ -270,10 +384,14 @@ def generate_transformation_string(**options): if if_value is not None: sorted_params.insert(0, "if_" + str(if_value)) + + if "raw_transformation" in options and (options["raw_transformation"] or options["raw_transformation"] == 0): + sorted_params.append(options.pop("raw_transformation")) + transformation = ",".join(sorted_params) - if "raw_transformation" in options: - transformation = transformation + "," + options.pop("raw_transformation") + transformations = base_transformations + [transformation] + if responsive_width: responsive_width_transformation = cloudinary.config().responsive_width_transformation \ or DEFAULT_RESPONSIVE_WIDTH_TRANSFORMATION @@ -287,6 +405,31 @@ def generate_transformation_string(**options): return url, options +def chain_transformations(options, transformations): + """ + Helper function, allows chaining transformations to the end of transformations list + + The result of this function is an updated options parameter + + :param options: Original options + :param transformations: Transformations to chain at the end + + :return: Resulting options + """ + + transformations = copy.deepcopy(transformations) + + transformations = build_array(transformations) + # preserve url options + url_options = dict((o, options[o]) for o in __URL_KEYS if o in options) + + transformations.insert(0, options) + + url_options["transformation"] = transformations + + return url_options + + def is_fraction(width): width = str(width) return re.match(FLOAT_RE, width) and float(width) < 1 @@ -302,18 +445,26 @@ def split_range(range): def norm_range_value(value): - if value is None: return None + if value is None: + return None match = re.match(RANGE_VALUE_RE, str(value)) - if match is None: return None + if match is None: + return None modifier = '' if match.group('modifier') is not None: - modifier = 'p' + modifier = 'p' return match.group('value') + modifier +def norm_auto_range_value(value): + if value == "auto": + return value + return norm_range_value(value) + + def process_video_codec_param(param): out_param = param if isinstance(out_param, dict): @@ -325,15 +476,29 @@ def process_video_codec_param(param): return out_param +def process_radius(param): + if param is None: + return + + if isinstance(param, (list, tuple)): + if not 1 <= len(param) <= 4: + raise ValueError("Invalid radius param") + return ':'.join(normalize_expression(t) for t in param) + + return str(param) + + def cleanup_params(params): return dict([(k, __safe_value(v)) for (k, v) in params.items() if v is not None and not v == ""]) def sign_request(params, options): api_key = options.get("api_key", cloudinary.config().api_key) - if not api_key: raise ValueError("Must supply api_key") + if not api_key: + raise ValueError("Must supply api_key") api_secret = options.get("api_secret", cloudinary.config().api_secret) - if not api_secret: raise ValueError("Must supply api_secret") + if not api_secret: + raise ValueError("Must supply api_secret") params = cleanup_params(params) params["signature"] = api_sign_request(params, api_secret) @@ -345,7 +510,7 @@ def sign_request(params, options): def api_sign_request(params_to_sign, api_secret): params = [(k + "=" + (",".join(v) if isinstance(v, list) else str(v))) for k, v in params_to_sign.items() if v] to_sign = "&".join(sorted(params)) - return hashlib.sha1(to_bytes(to_sign + api_secret)).hexdigest() + return compute_hex_hash(to_sign + api_secret) def breakpoint_settings_mapper(breakpoint_settings): @@ -370,11 +535,13 @@ def finalize_source(source, format, url_suffix): source_to_sign = source else: source = unquote(source) - if not PY3: source = source.encode('utf8') + if not PY3: + source = source.encode('utf8') source = smart_escape(source) source_to_sign = source if url_suffix is not None: - if re.search(r'[\./]', url_suffix): raise ValueError("url_suffix should not include . or /") + if re.search(r'[\./]', url_suffix): + raise ValueError("url_suffix should not include . or /") source = source + "/" + url_suffix if format is not None: source = source + "." + format @@ -396,7 +563,8 @@ def finalize_resource_type(resource_type, type, url_suffix, use_root_path, short raise ValueError("URL Suffix only supported for image/upload and raw/upload") if use_root_path: - if (resource_type == "image" and upload_type == "upload") or (resource_type == "images" and upload_type is None): + if (resource_type == "image" and upload_type == "upload") or ( + resource_type == "images" and upload_type is None): resource_type = None upload_type = None else: @@ -409,28 +577,33 @@ def finalize_resource_type(resource_type, type, url_suffix, use_root_path, short return resource_type, upload_type -def unsigned_download_url_prefix(source, cloud_name, private_cdn, cdn_subdomain, secure_cdn_subdomain, cname, secure, - secure_distribution): +def unsigned_download_url_prefix(source, cloud_name, private_cdn, cdn_subdomain, + secure_cdn_subdomain, cname, secure, secure_distribution): """cdn_subdomain and secure_cdn_subdomain 1) Customers in shared distribution (e.g. res.cloudinary.com) - if cdn_domain is true uses res-[1-5].cloudinary.com for both http and https. Setting secure_cdn_subdomain to false disables this for https. + if cdn_domain is true uses res-[1-5].cloudinary.com for both http and https. + Setting secure_cdn_subdomain to false disables this for https. 2) Customers with private cdn if cdn_domain is true uses cloudname-res-[1-5].cloudinary.com for http - if secure_cdn_domain is true uses cloudname-res-[1-5].cloudinary.com for https (please contact support if you require this) + if secure_cdn_domain is true uses cloudname-res-[1-5].cloudinary.com for https + (please contact support if you require this) 3) Customers with cname - if cdn_domain is true uses a[1-5].cname for http. For https, uses the same naming scheme as 1 for shared distribution and as 2 for private distribution.""" + if cdn_domain is true uses a[1-5].cname for http. For https, uses the same naming scheme + as 1 for shared distribution and as 2 for private distribution.""" shared_domain = not private_cdn shard = __crc(source) if secure: if secure_distribution is None or secure_distribution == cloudinary.OLD_AKAMAI_SHARED_CDN: - secure_distribution = cloud_name + "-res.cloudinary.com" if private_cdn else cloudinary.SHARED_CDN + secure_distribution = cloud_name + "-res.cloudinary.com" \ + if private_cdn else cloudinary.SHARED_CDN shared_domain = shared_domain or secure_distribution == cloudinary.SHARED_CDN if secure_cdn_subdomain is None and shared_domain: secure_cdn_subdomain = cdn_subdomain if secure_cdn_subdomain: - secure_distribution = re.sub('res.cloudinary.com', "res-" + shard + ".cloudinary.com", secure_distribution) + secure_distribution = re.sub('res.cloudinary.com', "res-" + shard + ".cloudinary.com", + secure_distribution) prefix = "https://" + secure_distribution elif cname: @@ -438,10 +611,12 @@ def unsigned_download_url_prefix(source, cloud_name, private_cdn, cdn_subdomain, prefix = "http://" + subdomain + cname else: subdomain = cloud_name + "-res" if private_cdn else "res" - if cdn_subdomain: subdomain = subdomain + "-" + shard + if cdn_subdomain: + subdomain = subdomain + "-" + shard prefix = "http://" + subdomain + ".cloudinary.com" - if shared_domain: prefix += "/" + cloud_name + if shared_domain: + prefix += "/" + cloud_name return prefix @@ -460,16 +635,23 @@ def merge(*dict_args): def cloudinary_url(source, **options): original_source = source + patch_fetch_format(options) type = options.pop("type", "upload") - if type == 'fetch': - options["fetch_format"] = options.get("fetch_format", options.pop("format", None)) + transformation, options = generate_transformation_string(**options) resource_type = options.pop("resource_type", "image") + + force_version = options.pop("force_version", cloudinary.config().force_version) + if force_version is None: + force_version = True + version = options.pop("version", None) + format = options.pop("format", None) cdn_subdomain = options.pop("cdn_subdomain", cloudinary.config().cdn_subdomain) - secure_cdn_subdomain = options.pop("secure_cdn_subdomain", cloudinary.config().secure_cdn_subdomain) + secure_cdn_subdomain = options.pop("secure_cdn_subdomain", + cloudinary.config().secure_cdn_subdomain) cname = options.pop("cname", cloudinary.config().cname) shorten = options.pop("shorten", cloudinary.config().shorten) @@ -478,7 +660,8 @@ def cloudinary_url(source, **options): raise ValueError("Must supply cloud_name in tag or in configuration") secure = options.pop("secure", cloudinary.config().secure) private_cdn = options.pop("private_cdn", cloudinary.config().private_cdn) - secure_distribution = options.pop("secure_distribution", cloudinary.config().secure_distribution) + secure_distribution = options.pop("secure_distribution", + cloudinary.config().secure_distribution) sign_url = options.pop("sign_url", cloudinary.config().sign_url) api_secret = options.pop("api_secret", cloudinary.config().api_secret) url_suffix = options.pop("url_suffix", None) @@ -490,15 +673,19 @@ def cloudinary_url(source, **options): if (not source) or type == "upload" and re.match(r'^https?:', source): return original_source, options - resource_type, type = finalize_resource_type(resource_type, type, url_suffix, use_root_path, shorten) + resource_type, type = finalize_resource_type( + resource_type, type, url_suffix, use_root_path, shorten) source, source_to_sign = finalize_source(source, format, url_suffix) - if source_to_sign.find("/") >= 0 \ + if not version and force_version \ + and source_to_sign.find("/") >= 0 \ and not re.match(r'^https?:/', source_to_sign) \ - and not re.match(r'^v[0-9]+', source_to_sign) \ - and not version: + and not re.match(r'^v[0-9]+', source_to_sign): version = "1" - if version: version = "v" + str(version) + if version: + version = "v" + str(version) + else: + version = None transformation = re.sub(r'([^:])/+', r'\1/', transformation) @@ -506,35 +693,84 @@ def cloudinary_url(source, **options): if sign_url and not auth_token: to_sign = "/".join(__compact([transformation, source_to_sign])) signature = "s--" + to_string( - base64.urlsafe_b64encode(hashlib.sha1(to_bytes(to_sign + api_secret)).digest())[0:8]) + "--" + base64.urlsafe_b64encode( + hashlib.sha1(to_bytes(to_sign + api_secret)).digest())[0:8]) + "--" - prefix = unsigned_download_url_prefix(source, cloud_name, private_cdn, cdn_subdomain, secure_cdn_subdomain, cname, - secure, secure_distribution) - source = "/".join(__compact([prefix, resource_type, type, signature, transformation, version, source])) + prefix = unsigned_download_url_prefix( + source, cloud_name, private_cdn, cdn_subdomain, secure_cdn_subdomain, + cname, secure, secure_distribution) + source = "/".join(__compact( + [prefix, resource_type, type, signature, transformation, version, source])) if sign_url and auth_token: path = urlparse(source).path - token = cloudinary.auth_token.generate( **merge(auth_token, {"url": path})) + token = cloudinary.auth_token.generate(**merge(auth_token, {"url": path})) source = "%s?%s" % (source, token) return source, options def cloudinary_api_url(action='upload', **options): - cloudinary_prefix = options.get("upload_prefix", cloudinary.config().upload_prefix) or "https://api.cloudinary.com" + cloudinary_prefix = options.get("upload_prefix", cloudinary.config().upload_prefix)\ + or "https://api.cloudinary.com" cloud_name = options.get("cloud_name", cloudinary.config().cloud_name) - if not cloud_name: raise ValueError("Must supply cloud_name") + if not cloud_name: + raise ValueError("Must supply cloud_name") resource_type = options.get("resource_type", "image") - return "/".join([cloudinary_prefix, "v1_1", cloud_name, resource_type, action]) + + return encode_unicode_url("/".join([cloudinary_prefix, "v1_1", cloud_name, resource_type, action])) -# Based on ruby's CGI::unescape. In addition does not escape / : -def smart_escape(source,unsafe = r"([^a-zA-Z0-9_.\-\/:]+)"): +def cloudinary_scaled_url(source, width, transformation, options): + """ + Generates a cloudinary url scaled to specified width. + + :param source: The resource + :param width: Width in pixels of the srcset item + :param transformation: Custom transformation that overrides transformations provided in options + :param options: A dict with additional options + + :return: Resulting URL of the item + """ + + # preserve options from being destructed + options = copy.deepcopy(options) + + if transformation: + if isinstance(transformation, string_types): + transformation = {"raw_transformation": transformation} + + # Remove all transformation related options + options = dict((o, options[o]) for o in __URL_KEYS if o in options) + options.update(transformation) + + scale_transformation = {"crop": "scale", "width": width} + + url_options = options + patch_fetch_format(url_options) + url_options = chain_transformations(url_options, scale_transformation) + + return cloudinary_url(source, **url_options)[0] + + +def smart_escape(source, unsafe=r"([^a-zA-Z0-9_.\-\/:]+)"): + """ + Based on ruby's CGI::unescape. In addition does not escape / : + + :param source: Source string to escape + :param unsafe: Unsafe characters + + :return: Escaped string + """ def pack(m): - return to_bytes('%' + "%".join(["%02X" % x for x in struct.unpack('B' * len(m.group(1)), m.group(1))]).upper()) + return to_bytes('%' + "%".join( + ["%02X" % x for x in struct.unpack('B' * len(m.group(1)), m.group(1))] + ).upper()) + return to_string(re.sub(to_bytes(unsafe), pack, to_bytes(source))) def random_public_id(): - return ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(16)) + return ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) + for _ in range(16)) def signed_preloaded_image(result): @@ -584,7 +820,8 @@ def download_archive_url(**options): params = options.copy() params.update(mode="download") cloudinary_params = sign_request(archive_params(**params), options) - return cloudinary_api_url("generate_archive", **options) + "?" + urlencode(bracketize_seq(cloudinary_params), True) + return cloudinary_api_url("generate_archive", **options) + "?" + \ + urlencode(bracketize_seq(cloudinary_params), True) def download_zip_url(**options): @@ -592,10 +829,12 @@ def download_zip_url(**options): new_options.update(target_format="zip") return download_archive_url(**new_options) + def generate_auth_token(**options): token_options = merge(cloudinary.config().auth_token, options) return auth_token.generate(**token_options) + def archive_params(**options): if options.get("timestamp") is None: timestamp = now() @@ -613,6 +852,8 @@ def archive_params(**options): "phash": options.get("phash"), "prefixes": options.get("prefixes") and build_array(options.get("prefixes")), "public_ids": options.get("public_ids") and build_array(options.get("public_ids")), + "fully_qualified_public_ids": options.get("fully_qualified_public_ids") and build_array( + options.get("fully_qualified_public_ids")), "skip_transformation_name": options.get("skip_transformation_name"), "tags": options.get("tags") and build_array(options.get("tags")), "target_format": options.get("target_format"), @@ -629,15 +870,32 @@ def archive_params(**options): def build_eager(transformations): if transformations is None: return None - eager = [] - for tr in build_array(transformations): - if isinstance(tr, string_types): - single_eager = tr - else: - ext = tr.get("format") - single_eager = "/".join([x for x in [generate_transformation_string(**tr)[0], ext] if x]) - eager.append(single_eager) - return "|".join(eager) + + return "|".join([build_single_eager(et) for et in build_array(transformations)]) + + +def build_single_eager(options): + """ + Builds a single eager transformation which consists of transformation and (optionally) format joined by "/" + + :param options: Options containing transformation parameters and (optionally) a "format" key + format can be a string value (jpg, gif, etc) or can be set to "" (empty string). + The latter leads to transformation ending with "/", which means "No extension, use original format" + If format is not provided or set to None, only transformation is used (without the trailing "/") + + :return: Resulting eager transformation string + """ + if isinstance(options, string_types): + return options + + trans_str = generate_transformation_string(**options)[0] + + if not trans_str: + return "" + + file_format = options.get("format") + + return trans_str + ("/" + file_format if file_format is not None else "") def build_custom_headers(headers): @@ -653,49 +911,30 @@ def build_custom_headers(headers): def build_upload_params(**options): - params = {"timestamp": now(), - "transformation": generate_transformation_string(**options)[0], - "public_id": options.get("public_id"), - "callback": options.get("callback"), - "format": options.get("format"), - "type": options.get("type"), - "backup": options.get("backup"), - "faces": options.get("faces"), - "image_metadata": options.get("image_metadata"), - "exif": options.get("exif"), - "colors": options.get("colors"), - "headers": build_custom_headers(options.get("headers")), - "eager": build_eager(options.get("eager")), - "use_filename": options.get("use_filename"), - "unique_filename": options.get("unique_filename"), - "discard_original_filename": options.get("discard_original_filename"), - "invalidate": options.get("invalidate"), - "notification_url": options.get("notification_url"), - "eager_notification_url": options.get("eager_notification_url"), - "eager_async": options.get("eager_async"), - "proxy": options.get("proxy"), - "folder": options.get("folder"), - "overwrite": options.get("overwrite"), - "tags": options.get("tags") and ",".join(build_array(options["tags"])), - "allowed_formats": options.get("allowed_formats") and ",".join(build_array(options["allowed_formats"])), - "face_coordinates": encode_double_array(options.get("face_coordinates")), - "custom_coordinates": encode_double_array(options.get("custom_coordinates")), - "context": encode_context(options.get("context")), - "moderation": options.get("moderation"), - "raw_convert": options.get("raw_convert"), - "quality_override": options.get("quality_override"), - "ocr": options.get("ocr"), - "categorization": options.get("categorization"), - "detection": options.get("detection"), - "similarity_search": options.get("similarity_search"), - "background_removal": options.get("background_removal"), - "upload_preset": options.get("upload_preset"), - "phash": options.get("phash"), - "return_delete_token": options.get("return_delete_token"), - "auto_tagging": options.get("auto_tagging") and str(options.get("auto_tagging")), - "responsive_breakpoints": generate_responsive_breakpoints_string(options.get("responsive_breakpoints")), - "async": options.get("async"), - "access_control": options.get("access_control") and json_encode(build_list_of_dicts(options.get("access_control")))} + params = {param_name: options.get(param_name) for param_name in __SIMPLE_UPLOAD_PARAMS} + + serialized_params = { + "timestamp": now(), + "metadata": encode_context(options.get("metadata")), + "transformation": generate_transformation_string(**options)[0], + "headers": build_custom_headers(options.get("headers")), + "eager": build_eager(options.get("eager")), + "tags": options.get("tags") and ",".join(build_array(options["tags"])), + "allowed_formats": options.get("allowed_formats") and ",".join(build_array(options["allowed_formats"])), + "face_coordinates": encode_double_array(options.get("face_coordinates")), + "custom_coordinates": encode_double_array(options.get("custom_coordinates")), + "context": encode_context(options.get("context")), + "auto_tagging": options.get("auto_tagging") and str(options.get("auto_tagging")), + "responsive_breakpoints": generate_responsive_breakpoints_string(options.get("responsive_breakpoints")), + "access_control": options.get("access_control") and json_encode( + build_list_of_dicts(options.get("access_control"))) + } + + # make sure that we are in-sync with __SERIALIZED_UPLOAD_PARAMS which are in use by other methods + serialized_params = {param_name: serialized_params[param_name] for param_name in __SERIALIZED_UPLOAD_PARAMS} + + params.update(serialized_params) + return params @@ -716,6 +955,14 @@ def __process_text_options(layer, layer_parameter): if line_spacing is not None: keywords.append("line_spacing_" + str(line_spacing)) + font_antialiasing = layer.get("font_antialiasing") + if font_antialiasing is not None: + keywords.append("antialias_" + str(font_antialiasing)) + + font_hinting = layer.get("font_hinting") + if font_hinting is not None: + keywords.append("hinting_" + str(font_hinting)) + if font_size is None and font_family is None and len(keywords) == 0: return None @@ -778,12 +1025,12 @@ def process_layer(layer, layer_parameter): if text is not None: var_pattern = VAR_NAME_RE - match = re.findall(var_pattern,text) + match = re.findall(var_pattern, text) - parts= filter(lambda p: p is not None, re.split(var_pattern,text)) + parts = filter(lambda p: p is not None, re.split(var_pattern, text)) encoded_text = [] for part in parts: - if re.match(var_pattern,part): + if re.match(var_pattern, part): encoded_text.append(part) else: encoded_text.append(smart_escape(smart_escape(part, r"([,/])"))) @@ -801,6 +1048,7 @@ def process_layer(layer, layer_parameter): return ':'.join(components) + IF_OPERATORS = { "=": 'eq', "!=": 'ne', @@ -813,7 +1061,8 @@ IF_OPERATORS = { "*": 'mul', "/": 'div', "+": 'add', - "-": 'sub' + "-": 'sub', + "^": 'pow' } PREDEFINED_VARS = { @@ -828,17 +1077,69 @@ PREDEFINED_VARS = { "page_x": "px", "page_y": "py", "tags": "tags", - "width": "w" + "width": "w", + "duration": "du", + "initial_duration": "idu", } -replaceRE = "((\\|\\||>=|<=|&&|!=|>|=|<|/|-|\\+|\\*)(?=[ _])|" + '|'.join(PREDEFINED_VARS.keys())+ ")" +replaceRE = "((\\|\\||>=|<=|&&|!=|>|=|<|/|-|\\+|\\*|\^)(?=[ _])|(?