mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-30 11:38:36 -07:00
Add cloudinary v1.11.0
This commit is contained in:
parent
5710bcb43c
commit
136260a822
27 changed files with 10855 additions and 0 deletions
302
lib/cloudinary/__init__.py
Normal file
302
lib/cloudinary/__init__.py
Normal file
|
@ -0,0 +1,302 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger("Cloudinary")
|
||||
ch = logging.StreamHandler()
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from six import python_2_unicode_compatible
|
||||
|
||||
from cloudinary import utils
|
||||
from cloudinary.compat import urlparse, parse_qs
|
||||
from cloudinary.search import Search
|
||||
|
||||
CF_SHARED_CDN = "d3jpl91pxevbkh.cloudfront.net"
|
||||
OLD_AKAMAI_SHARED_CDN = "cloudinary-a.akamaihd.net"
|
||||
AKAMAI_SHARED_CDN = "res.cloudinary.com"
|
||||
SHARED_CDN = AKAMAI_SHARED_CDN
|
||||
CL_BLANK = "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"
|
||||
|
||||
VERSION = "1.11.0"
|
||||
USER_AGENT = "CloudinaryPython/" + VERSION
|
||||
""" :const: USER_AGENT """
|
||||
|
||||
USER_PLATFORM = ""
|
||||
"""
|
||||
Additional information to be passed with the USER_AGENT, e.g. "CloudinaryMagento/1.0.1".
|
||||
This value is set in platform-specific implementations that use cloudinary_php.
|
||||
|
||||
The format of the value should be <ProductName>/Version[ (comment)].
|
||||
@see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.43
|
||||
|
||||
**Do not set this value in application code!**
|
||||
"""
|
||||
|
||||
|
||||
def get_user_agent():
|
||||
"""Provides the `USER_AGENT` string that is passed to the Cloudinary servers.
|
||||
Prepends `USER_PLATFORM` if it is defined.
|
||||
|
||||
:returns: the user agent
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
if USER_PLATFORM == "":
|
||||
return USER_AGENT
|
||||
else:
|
||||
return USER_PLATFORM + " " + USER_AGENT
|
||||
|
||||
|
||||
def import_django_settings():
|
||||
try:
|
||||
import django.conf
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
try:
|
||||
if 'CLOUDINARY' in dir(django.conf.settings):
|
||||
return django.conf.settings.CLOUDINARY
|
||||
else:
|
||||
return None
|
||||
except ImproperlyConfigured:
|
||||
return None
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
|
||||
class Config(object):
|
||||
def __init__(self):
|
||||
django_settings = import_django_settings()
|
||||
if django_settings:
|
||||
self.update(**django_settings)
|
||||
elif os.environ.get("CLOUDINARY_CLOUD_NAME"):
|
||||
self.update(
|
||||
cloud_name=os.environ.get("CLOUDINARY_CLOUD_NAME"),
|
||||
api_key=os.environ.get("CLOUDINARY_API_KEY"),
|
||||
api_secret=os.environ.get("CLOUDINARY_API_SECRET"),
|
||||
secure_distribution=os.environ.get("CLOUDINARY_SECURE_DISTRIBUTION"),
|
||||
private_cdn=os.environ.get("CLOUDINARY_PRIVATE_CDN") == 'true'
|
||||
)
|
||||
elif os.environ.get("CLOUDINARY_URL"):
|
||||
cloudinary_url = os.environ.get("CLOUDINARY_URL")
|
||||
self._parse_cloudinary_url(cloudinary_url)
|
||||
|
||||
def _parse_cloudinary_url(self, cloudinary_url):
|
||||
uri = urlparse(cloudinary_url.replace("cloudinary://", "http://"))
|
||||
for k, v in parse_qs(uri.query).items():
|
||||
if self._is_nested_key(k):
|
||||
self._put_nested_key(k, v)
|
||||
else:
|
||||
self.__dict__[k] = v[0]
|
||||
self.update(
|
||||
cloud_name=uri.hostname,
|
||||
api_key=uri.username,
|
||||
api_secret=uri.password,
|
||||
private_cdn=uri.path != ''
|
||||
)
|
||||
if uri.path != '':
|
||||
self.update(secure_distribution=uri.path[1:])
|
||||
|
||||
def __getattr__(self, i):
|
||||
if i in self.__dict__:
|
||||
return self.__dict__[i]
|
||||
else:
|
||||
return None
|
||||
|
||||
def update(self, **keywords):
|
||||
for k, v in keywords.items():
|
||||
self.__dict__[k] = v
|
||||
|
||||
def _is_nested_key(self, key):
|
||||
return re.match(r'\w+\[\w+\]', key)
|
||||
|
||||
def _put_nested_key(self, key, value):
|
||||
chain = re.split(r'[\[\]]+', key)
|
||||
chain = [key for key in chain if key]
|
||||
outer = self.__dict__
|
||||
last_key = chain.pop()
|
||||
for inner_key in chain:
|
||||
if inner_key in outer:
|
||||
inner = outer[inner_key]
|
||||
else:
|
||||
inner = dict()
|
||||
outer[inner_key] = inner
|
||||
outer = inner
|
||||
if isinstance(value, list):
|
||||
value = value[0]
|
||||
outer[last_key] = value
|
||||
|
||||
_config = Config()
|
||||
|
||||
|
||||
def config(**keywords):
|
||||
global _config
|
||||
_config.update(**keywords)
|
||||
return _config
|
||||
|
||||
|
||||
def reset_config():
|
||||
global _config
|
||||
_config = Config()
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class CloudinaryResource(object):
|
||||
def __init__(self, public_id=None, format=None, version=None,
|
||||
signature=None, url_options=None, metadata=None, type=None, resource_type=None,
|
||||
default_resource_type=None):
|
||||
self.metadata = metadata
|
||||
metadata = metadata or {}
|
||||
self.public_id = public_id or metadata.get('public_id')
|
||||
self.format = format or metadata.get('format')
|
||||
self.version = version or metadata.get('version')
|
||||
self.signature = signature or metadata.get('signature')
|
||||
self.type = type or metadata.get('type') or "upload"
|
||||
self.resource_type = resource_type or metadata.get('resource_type') or default_resource_type
|
||||
self.url_options = url_options or {}
|
||||
|
||||
def __str__(self):
|
||||
return self.public_id
|
||||
|
||||
def __len__(self):
|
||||
return len(self.public_id) if self.public_id is not None else 0
|
||||
|
||||
def validate(self):
|
||||
return self.signature == self.get_expected_signature()
|
||||
|
||||
def get_prep_value(self):
|
||||
if None in [self.public_id,
|
||||
self.type,
|
||||
self.resource_type]:
|
||||
return None
|
||||
prep = ''
|
||||
prep = prep + self.resource_type + '/' + self.type + '/'
|
||||
if self.version: prep = prep + 'v' + str(self.version) + '/'
|
||||
prep = prep + self.public_id
|
||||
if self.format: prep = prep + '.' + self.format
|
||||
return prep
|
||||
|
||||
def get_presigned(self):
|
||||
return self.get_prep_value() + '#' + self.get_expected_signature()
|
||||
|
||||
def get_expected_signature(self):
|
||||
return utils.api_sign_request({"public_id": self.public_id, "version": self.version}, config().api_secret)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self.build_url(**self.url_options)
|
||||
|
||||
def __build_url(self, **options):
|
||||
combined_options = dict(format=self.format, version=self.version, type=self.type,
|
||||
resource_type=self.resource_type or "image")
|
||||
combined_options.update(options)
|
||||
public_id = combined_options.get('public_id') or self.public_id
|
||||
return utils.cloudinary_url(public_id, **combined_options)
|
||||
|
||||
def build_url(self, **options):
|
||||
return self.__build_url(**options)[0]
|
||||
|
||||
def default_poster_options(self, options):
|
||||
options["format"] = options.get("format", "jpg")
|
||||
|
||||
def default_source_types(self):
|
||||
return ['webm', 'mp4', 'ogv']
|
||||
|
||||
def image(self, **options):
|
||||
if options.get("resource_type", self.resource_type) == "video":
|
||||
self.default_poster_options(options)
|
||||
src, attrs = self.__build_url(**options)
|
||||
client_hints = attrs.pop("client_hints", config().client_hints)
|
||||
responsive = attrs.pop("responsive", False)
|
||||
hidpi = attrs.pop("hidpi", False)
|
||||
if (responsive or hidpi) and not client_hints:
|
||||
attrs["data-src"] = src
|
||||
classes = "cld-responsive" if responsive else "cld-hidpi"
|
||||
if "class" in attrs: classes += " " + attrs["class"]
|
||||
attrs["class"] = classes
|
||||
src = attrs.pop("responsive_placeholder", config().responsive_placeholder)
|
||||
if src == "blank": src = CL_BLANK
|
||||
|
||||
if src: attrs["src"] = src
|
||||
|
||||
return u"<img {0}/>".format(utils.html_attrs(attrs))
|
||||
|
||||
def video_thumbnail(self, **options):
|
||||
self.default_poster_options(options)
|
||||
return self.build_url(**options)
|
||||
|
||||
# Creates an HTML video tag for the provided +source+
|
||||
#
|
||||
# ==== Options
|
||||
# * <tt>source_types</tt> - Specify which source type the tag should include. defaults to webm, mp4 and ogv.
|
||||
# * <tt>source_transformation</tt> - specific transformations to use for a specific source type.
|
||||
# * <tt>poster</tt> - override default thumbnail:
|
||||
# * url: provide an ad hoc url
|
||||
# * options: with specific poster transformations and/or Cloudinary +:public_id+
|
||||
#
|
||||
# ==== Examples
|
||||
# CloudinaryResource("mymovie.mp4").video()
|
||||
# CloudinaryResource("mymovie.mp4").video(source_types = 'webm')
|
||||
# CloudinaryResource("mymovie.ogv").video(poster = "myspecialplaceholder.jpg")
|
||||
# CloudinaryResource("mymovie.webm").video(source_types = ['webm', 'mp4'], poster = {'effect': 'sepia'})
|
||||
def video(self, **options):
|
||||
public_id = options.get('public_id', self.public_id)
|
||||
source = re.sub("\.({0})$".format("|".join(self.default_source_types())), '', public_id)
|
||||
|
||||
source_types = options.pop('source_types', [])
|
||||
source_transformation = options.pop('source_transformation', {})
|
||||
fallback = options.pop('fallback_content', '')
|
||||
options['resource_type'] = options.pop('resource_type', self.resource_type or 'video')
|
||||
|
||||
if not source_types: source_types = self.default_source_types()
|
||||
video_options = options.copy()
|
||||
|
||||
if 'poster' in video_options:
|
||||
poster_options = video_options['poster']
|
||||
if isinstance(poster_options, dict):
|
||||
if 'public_id' in poster_options:
|
||||
video_options['poster'] = utils.cloudinary_url(poster_options['public_id'], **poster_options)[0]
|
||||
else:
|
||||
video_options['poster'] = self.video_thumbnail(public_id=source, **poster_options)
|
||||
else:
|
||||
video_options['poster'] = self.video_thumbnail(public_id=source, **options)
|
||||
|
||||
if not video_options['poster']: del video_options['poster']
|
||||
|
||||
nested_source_types = isinstance(source_types, list) and len(source_types) > 1
|
||||
if not nested_source_types:
|
||||
source = source + '.' + utils.build_array(source_types)[0]
|
||||
|
||||
video_url = utils.cloudinary_url(source, **video_options)
|
||||
video_options = video_url[1]
|
||||
if not nested_source_types:
|
||||
video_options['src'] = video_url[0]
|
||||
if 'html_width' in video_options: video_options['width'] = video_options.pop('html_width')
|
||||
if 'html_height' in video_options: video_options['height'] = video_options.pop('html_height')
|
||||
|
||||
sources = ""
|
||||
if nested_source_types:
|
||||
for source_type in source_types:
|
||||
transformation = options.copy()
|
||||
transformation.update(source_transformation.get(source_type, {}))
|
||||
src = utils.cloudinary_url(source, format=source_type, **transformation)[0]
|
||||
video_type = "ogg" if source_type == 'ogv' else source_type
|
||||
mime_type = "video/" + video_type
|
||||
sources += "<source {attributes}>".format(attributes=utils.html_attrs({'src': src, 'type': mime_type}))
|
||||
|
||||
html = "<video {attributes}>{sources}{fallback}</video>".format(
|
||||
attributes=utils.html_attrs(video_options), sources=sources, fallback=fallback)
|
||||
return html
|
||||
|
||||
|
||||
class CloudinaryImage(CloudinaryResource):
|
||||
def __init__(self, public_id=None, **kwargs):
|
||||
super(CloudinaryImage, self).__init__(public_id=public_id, default_resource_type="image", **kwargs)
|
||||
|
||||
|
||||
class CloudinaryVideo(CloudinaryResource):
|
||||
def __init__(self, public_id=None, **kwargs):
|
||||
super(CloudinaryVideo, self).__init__(public_id=public_id, default_resource_type="video", **kwargs)
|
448
lib/cloudinary/api.py
Normal file
448
lib/cloudinary/api.py
Normal file
|
@ -0,0 +1,448 @@
|
|||
# Copyright Cloudinary
|
||||
|
||||
import email.utils
|
||||
import json
|
||||
import socket
|
||||
|
||||
import cloudinary
|
||||
from six import string_types
|
||||
|
||||
import urllib3
|
||||
import certifi
|
||||
|
||||
from cloudinary import utils
|
||||
from urllib3.exceptions import HTTPError
|
||||
|
||||
logger = cloudinary.logger
|
||||
|
||||
# intentionally one-liners
|
||||
class Error(Exception): pass
|
||||
class NotFound(Error): pass
|
||||
class NotAllowed(Error): pass
|
||||
class AlreadyExists(Error): pass
|
||||
class RateLimited(Error): pass
|
||||
class BadRequest(Error): pass
|
||||
class GeneralError(Error): pass
|
||||
class AuthorizationRequired(Error): pass
|
||||
|
||||
|
||||
EXCEPTION_CODES = {
|
||||
400: BadRequest,
|
||||
401: AuthorizationRequired,
|
||||
403: NotAllowed,
|
||||
404: NotFound,
|
||||
409: AlreadyExists,
|
||||
420: RateLimited,
|
||||
500: GeneralError
|
||||
}
|
||||
|
||||
|
||||
class Response(dict):
|
||||
def __init__(self, result, response, **kwargs):
|
||||
super(Response, self).__init__(**kwargs)
|
||||
self.update(result)
|
||||
self.rate_limit_allowed = int(response.headers["x-featureratelimit-limit"])
|
||||
self.rate_limit_reset_at = email.utils.parsedate(response.headers["x-featureratelimit-reset"])
|
||||
self.rate_limit_remaining = int(response.headers["x-featureratelimit-remaining"])
|
||||
|
||||
_http = urllib3.PoolManager(
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=certifi.where()
|
||||
)
|
||||
|
||||
|
||||
def ping(**options):
|
||||
return call_api("get", ["ping"], {}, **options)
|
||||
|
||||
|
||||
def usage(**options):
|
||||
return call_api("get", ["usage"], {}, **options)
|
||||
|
||||
|
||||
def resource_types(**options):
|
||||
return call_api("get", ["resources"], {}, **options)
|
||||
|
||||
|
||||
def resources(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", None)
|
||||
uri = ["resources", resource_type]
|
||||
if upload_type: uri.append(upload_type)
|
||||
params = only(options,
|
||||
"next_cursor", "max_results", "prefix", "tags", "context", "moderations", "direction", "start_at")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "tags", tag]
|
||||
params = only(options, "next_cursor", "max_results", "tags", "context", "moderations", "direction")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_moderation(kind, status, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "moderations", kind, status]
|
||||
params = only(options, "next_cursor", "max_results", "tags", "context", "moderations", "direction")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_ids(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = dict(only(options, "tags", "moderations", "context"), public_ids=public_ids)
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resource(public_id, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, public_id]
|
||||
params = only(options, "exif", "faces", "colors", "image_metadata", "pages", "phash", "coordinates", "max_results")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def update(public_id, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, public_id]
|
||||
params = only(options, "moderation_status", "raw_convert",
|
||||
"quality_override", "ocr",
|
||||
"categorization", "detection", "similarity_search",
|
||||
"background_removal", "notification_url")
|
||||
if "tags" in options:
|
||||
params["tags"] = ",".join(utils.build_array(options["tags"]))
|
||||
if "face_coordinates" in options:
|
||||
params["face_coordinates"] = utils.encode_double_array(options.get("face_coordinates"))
|
||||
if "custom_coordinates" in options:
|
||||
params["custom_coordinates"] = utils.encode_double_array(options.get("custom_coordinates"))
|
||||
if "context" in options:
|
||||
params["context"] = utils.encode_context(options.get("context"))
|
||||
if "auto_tagging" in options:
|
||||
params["auto_tagging"] = str(options.get("auto_tagging"))
|
||||
if "access_control" in options:
|
||||
params["access_control"] = utils.json_encode(utils.build_list_of_dicts(options.get("access_control")))
|
||||
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, public_ids=public_ids)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources_by_prefix(prefix, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, prefix=prefix)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_all_resources(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, all=True)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "tags", tag]
|
||||
params = __delete_resource_params(options)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_derived_resources(derived_resource_ids, **options):
|
||||
uri = ["derived_resources"]
|
||||
params = {"derived_resource_ids": derived_resource_ids}
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_derived_by_transformation(public_ids, transformations,
|
||||
resource_type='image', type='upload', invalidate=None,
|
||||
**options):
|
||||
"""
|
||||
Delete derived resources of public ids, identified by transformations
|
||||
|
||||
:param public_ids: the base resources
|
||||
:type public_ids: list of str
|
||||
:param transformations: the transformation of derived resources, optionally including the format
|
||||
:type transformations: list of (dict or str)
|
||||
:param type: The upload type
|
||||
:type type: str
|
||||
:param resource_type: The type of the resource: defaults to "image"
|
||||
:type resource_type: str
|
||||
:param invalidate: (optional) True to invalidate the resources after deletion
|
||||
:type invalidate: bool
|
||||
:return: a list of the public ids for which derived resources were deleted
|
||||
:rtype: dict
|
||||
"""
|
||||
uri = ["resources", resource_type, type]
|
||||
if not isinstance(public_ids, list):
|
||||
public_ids = [public_ids]
|
||||
params = {"public_ids": public_ids,
|
||||
"transformations": utils.build_eager(transformations),
|
||||
"keep_original": True}
|
||||
if invalidate is not None:
|
||||
params['invalidate'] = invalidate
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def tags(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["tags", resource_type]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results", "prefix"), **options)
|
||||
|
||||
|
||||
def transformations(**options):
|
||||
uri = ["transformations"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def delete_transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
return call_api("delete", uri, {}, **options)
|
||||
|
||||
|
||||
# updates - currently only supported update is the "allowed_for_strict" boolean flag and unsafe_update
|
||||
def update_transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
updates = only(options, "allowed_for_strict")
|
||||
if "unsafe_update" in options:
|
||||
updates["unsafe_update"] = transformation_string(options.get("unsafe_update"))
|
||||
if not updates: raise Exception("No updates given")
|
||||
|
||||
return call_api("put", uri, updates, **options)
|
||||
|
||||
|
||||
def create_transformation(name, definition, **options):
|
||||
uri = ["transformations", name]
|
||||
return call_api("post", uri, {"transformation": transformation_string(definition)}, **options)
|
||||
|
||||
|
||||
def publish_by_ids(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), public_ids=public_ids)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def publish_by_prefix(prefix, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), prefix=prefix)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def publish_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), tag=tag)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def upload_presets(**options):
|
||||
uri = ["upload_presets"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
return call_api("get", uri, only(options, "max_results"), **options)
|
||||
|
||||
|
||||
def delete_upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
return call_api("delete", uri, {}, **options)
|
||||
|
||||
|
||||
def update_upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
params = utils.build_upload_params(**options)
|
||||
params = utils.cleanup_params(params)
|
||||
params.update(only(options, "unsigned", "disallow_public_id"))
|
||||
return call_api("put", uri, params, **options)
|
||||
|
||||
|
||||
def create_upload_preset(**options):
|
||||
uri = ["upload_presets"]
|
||||
params = utils.build_upload_params(**options)
|
||||
params = utils.cleanup_params(params)
|
||||
params.update(only(options, "unsigned", "disallow_public_id", "name"))
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def root_folders(**options):
|
||||
return call_api("get", ["folders"], {}, **options)
|
||||
|
||||
|
||||
def subfolders(of_folder_path, **options):
|
||||
return call_api("get", ["folders", of_folder_path], {}, **options)
|
||||
|
||||
|
||||
def restore(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, "restore"]
|
||||
params = dict(public_ids=public_ids)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def upload_mappings(**options):
|
||||
uri = ["upload_mappings"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def delete_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def update_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
params.update(only(options, "template"))
|
||||
return call_api("put", uri, params, **options)
|
||||
|
||||
|
||||
def create_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
params.update(only(options, "template"))
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def list_streaming_profiles(**options):
|
||||
uri = ["streaming_profiles"]
|
||||
return call_api('GET', uri, {}, **options)
|
||||
|
||||
|
||||
def get_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
return call_api('GET', uri, {}, **options)
|
||||
|
||||
|
||||
def delete_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
return call_api('DELETE', uri, {}, **options)
|
||||
|
||||
|
||||
def create_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles"]
|
||||
params = __prepare_streaming_profile_params(**options)
|
||||
params["name"] = name
|
||||
return call_api('POST', uri, params, **options)
|
||||
|
||||
|
||||
def update_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
params = __prepare_streaming_profile_params(**options)
|
||||
return call_api('PUT', uri, params, **options)
|
||||
|
||||
|
||||
def call_json_api(method, uri, jsonBody, **options):
|
||||
logger.debug(jsonBody)
|
||||
data = json.dumps(jsonBody).encode('utf-8')
|
||||
return _call_api(method, uri, body=data, headers={'Content-Type': 'application/json'}, **options)
|
||||
|
||||
|
||||
def call_api(method, uri, params, **options):
|
||||
return _call_api(method, uri, params=params, **options)
|
||||
|
||||
|
||||
def _call_api(method, uri, params=None, body=None, headers=None, **options):
|
||||
prefix = options.pop("upload_prefix",
|
||||
cloudinary.config().upload_prefix) or "https://api.cloudinary.com"
|
||||
cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name)
|
||||
if not cloud_name: raise Exception("Must supply cloud_name")
|
||||
api_key = options.pop("api_key", cloudinary.config().api_key)
|
||||
if not api_key: raise Exception("Must supply api_key")
|
||||
api_secret = options.pop("api_secret", cloudinary.config().api_secret)
|
||||
if not cloud_name: raise Exception("Must supply api_secret")
|
||||
api_url = "/".join([prefix, "v1_1", cloud_name] + uri)
|
||||
|
||||
processed_params = None
|
||||
if isinstance(params, dict):
|
||||
processed_params = {}
|
||||
for key, value in params.items():
|
||||
if isinstance(value, list):
|
||||
value_list = {"{}[{}]".format(key, i): i_value for i, i_value in enumerate(value)}
|
||||
processed_params.update(value_list)
|
||||
elif value:
|
||||
processed_params[key] = value
|
||||
|
||||
# Add authentication
|
||||
req_headers = urllib3.make_headers(
|
||||
basic_auth="{0}:{1}".format(api_key, api_secret),
|
||||
user_agent=cloudinary.get_user_agent()
|
||||
)
|
||||
if headers is not None:
|
||||
req_headers.update(headers)
|
||||
kw = {}
|
||||
if 'timeout' in options:
|
||||
kw['timeout'] = options['timeout']
|
||||
if body is not None:
|
||||
kw['body'] = body
|
||||
try:
|
||||
response = _http.request(method.upper(), api_url, processed_params, req_headers, **kw)
|
||||
body = response.data
|
||||
except HTTPError as e:
|
||||
raise GeneralError("Unexpected error {0}", e.message)
|
||||
except socket.error as e:
|
||||
raise GeneralError("Socket Error: %s" % (str(e)))
|
||||
|
||||
try:
|
||||
result = json.loads(body.decode('utf-8'))
|
||||
except Exception as e:
|
||||
# Error is parsing json
|
||||
raise GeneralError("Error parsing server response (%d) - %s. Got - %s" % (response.status, body, e))
|
||||
|
||||
if "error" in result:
|
||||
exception_class = EXCEPTION_CODES.get(response.status) or Exception
|
||||
exception_class = exception_class
|
||||
raise exception_class("Error {0} - {1}".format(response.status, result["error"]["message"]))
|
||||
|
||||
return Response(result, response)
|
||||
|
||||
|
||||
def only(source, *keys):
|
||||
return {key: source[key] for key in keys if key in source}
|
||||
|
||||
|
||||
def transformation_string(transformation):
|
||||
if isinstance(transformation, string_types):
|
||||
return transformation
|
||||
else:
|
||||
return cloudinary.utils.generate_transformation_string(**transformation)[0]
|
||||
|
||||
|
||||
def __prepare_streaming_profile_params(**options):
|
||||
params = only(options, "display_name")
|
||||
if "representations" in options:
|
||||
representations = [{"transformation": transformation_string(trans)} for trans in options["representations"]]
|
||||
params["representations"] = json.dumps(representations)
|
||||
return params
|
||||
|
||||
def __delete_resource_params(options, **params):
|
||||
p = dict(transformations=utils.build_eager(options.get('transformations')),
|
||||
**only(options, "keep_original", "next_cursor", "invalidate"))
|
||||
p.update(params)
|
||||
return p
|
47
lib/cloudinary/auth_token.py
Normal file
47
lib/cloudinary/auth_token.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
import hashlib
|
||||
import hmac
|
||||
import re
|
||||
import time
|
||||
from binascii import a2b_hex
|
||||
from cloudinary.compat import quote_plus
|
||||
|
||||
AUTH_TOKEN_NAME = "__cld_token__"
|
||||
|
||||
|
||||
|
||||
def generate(url=None, acl=None, start_time=None, duration=None, expiration=None, ip=None, key=None,
|
||||
token_name=AUTH_TOKEN_NAME):
|
||||
|
||||
if expiration is None:
|
||||
if duration is not None:
|
||||
start = start_time if start_time is not None else int(time.mktime(time.gmtime()))
|
||||
expiration = start + duration
|
||||
else:
|
||||
raise Exception("Must provide either expiration or duration")
|
||||
|
||||
token_parts = []
|
||||
if ip is not None: token_parts.append("ip=" + ip)
|
||||
if start_time is not None: token_parts.append("st=%d" % start_time)
|
||||
token_parts.append("exp=%d" % expiration)
|
||||
if acl is not None: token_parts.append("acl=%s" % _escape_to_lower(acl))
|
||||
to_sign = list(token_parts)
|
||||
if url is not None:
|
||||
to_sign.append("url=%s" % _escape_to_lower(url))
|
||||
auth = _digest("~".join(to_sign), key)
|
||||
token_parts.append("hmac=%s" % auth)
|
||||
return "%(token_name)s=%(token)s" % {"token_name": token_name, "token": "~".join(token_parts)}
|
||||
|
||||
|
||||
def _digest(message, key):
|
||||
bin_key = a2b_hex(key)
|
||||
return hmac.new(bin_key, message.encode('utf-8'), hashlib.sha256).hexdigest()
|
||||
|
||||
|
||||
def _escape_to_lower(url):
|
||||
escaped_url = quote_plus(url)
|
||||
|
||||
def toLowercase(match):
|
||||
return match.group(0).lower()
|
||||
|
||||
escaped_url = re.sub(r'%..', toLowercase, escaped_url)
|
||||
return escaped_url
|
34
lib/cloudinary/compat.py
Normal file
34
lib/cloudinary/compat.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
# Copyright Cloudinary
|
||||
import six.moves.urllib.parse
|
||||
urlencode = six.moves.urllib.parse.urlencode
|
||||
unquote = six.moves.urllib.parse.unquote
|
||||
urlparse = six.moves.urllib.parse.urlparse
|
||||
parse_qs = six.moves.urllib.parse.parse_qs
|
||||
parse_qsl = six.moves.urllib.parse.parse_qsl
|
||||
quote_plus = six.moves.urllib.parse.quote_plus
|
||||
httplib = six.moves.http_client
|
||||
from six import PY3, string_types, StringIO, BytesIO
|
||||
urllib2 = six.moves.urllib.request
|
||||
NotConnected = six.moves.http_client.NotConnected
|
||||
|
||||
if PY3:
|
||||
to_bytes = lambda s: s.encode('utf8')
|
||||
to_bytearray = lambda s: bytearray(s, 'utf8')
|
||||
to_string = lambda b: b.decode('utf8')
|
||||
|
||||
else:
|
||||
to_bytes = str
|
||||
to_bytearray = str
|
||||
to_string = str
|
||||
|
||||
try:
|
||||
cldrange = xrange
|
||||
except NameError:
|
||||
def cldrange(*args, **kwargs):
|
||||
return iter(range(*args, **kwargs))
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
134
lib/cloudinary/forms.py
Normal file
134
lib/cloudinary/forms.py
Normal file
|
@ -0,0 +1,134 @@
|
|||
from django import forms
|
||||
from cloudinary import CloudinaryResource
|
||||
import cloudinary.uploader
|
||||
import cloudinary.utils
|
||||
import re
|
||||
import json
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
def cl_init_js_callbacks(form, request):
|
||||
for field in form.fields.values():
|
||||
if isinstance(field, CloudinaryJsFileField):
|
||||
field.enable_callback(request)
|
||||
|
||||
|
||||
class CloudinaryInput(forms.TextInput):
|
||||
input_type = 'file'
|
||||
|
||||
def render(self, name, value, attrs=None):
|
||||
attrs = self.build_attrs(attrs)
|
||||
options = attrs.get('options', {})
|
||||
attrs["options"] = ''
|
||||
|
||||
params = cloudinary.utils.build_upload_params(**options)
|
||||
if options.get("unsigned"):
|
||||
params = cloudinary.utils.cleanup_params(params)
|
||||
else:
|
||||
params = cloudinary.utils.sign_request(params, options)
|
||||
|
||||
if 'resource_type' not in options: options['resource_type'] = 'auto'
|
||||
cloudinary_upload_url = cloudinary.utils.cloudinary_api_url("upload", **options)
|
||||
|
||||
attrs["data-url"] = cloudinary_upload_url
|
||||
attrs["data-form-data"] = json.dumps(params)
|
||||
attrs["data-cloudinary-field"] = name
|
||||
chunk_size = options.get("chunk_size", None)
|
||||
if chunk_size: attrs["data-max-chunk-size"] = chunk_size
|
||||
attrs["class"] = " ".join(["cloudinary-fileupload", attrs.get("class", "")])
|
||||
|
||||
widget = super(CloudinaryInput, self).render("file", None, attrs=attrs)
|
||||
if value:
|
||||
if isinstance(value, CloudinaryResource):
|
||||
value_string = value.get_presigned()
|
||||
else:
|
||||
value_string = value
|
||||
widget += forms.HiddenInput().render(name, value_string)
|
||||
return widget
|
||||
|
||||
|
||||
class CloudinaryJsFileField(forms.Field):
|
||||
default_error_messages = {
|
||||
'required': _(u"No file selected!")
|
||||
}
|
||||
|
||||
def __init__(self, attrs=None, options=None, autosave=True, *args, **kwargs):
|
||||
if attrs is None: attrs = {}
|
||||
if options is None: options = {}
|
||||
self.autosave = autosave
|
||||
attrs = attrs.copy()
|
||||
attrs["options"] = options.copy()
|
||||
|
||||
field_options = {'widget': CloudinaryInput(attrs=attrs)}
|
||||
field_options.update(kwargs)
|
||||
super(CloudinaryJsFileField, self).__init__(*args, **field_options)
|
||||
|
||||
def enable_callback(self, request):
|
||||
from django.contrib.staticfiles.storage import staticfiles_storage
|
||||
self.widget.attrs["options"]["callback"] = request.build_absolute_uri(
|
||||
staticfiles_storage.url("html/cloudinary_cors.html"))
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert to CloudinaryResource"""
|
||||
if not value: return None
|
||||
m = re.search(r'^([^/]+)/([^/]+)/v(\d+)/([^#]+)#([^/]+)$', value)
|
||||
if not m:
|
||||
raise forms.ValidationError("Invalid format")
|
||||
resource_type = m.group(1)
|
||||
upload_type = m.group(2)
|
||||
version = m.group(3)
|
||||
filename = m.group(4)
|
||||
signature = m.group(5)
|
||||
m = re.search(r'(.*)\.(.*)', filename)
|
||||
if not m:
|
||||
raise forms.ValidationError("Invalid file name")
|
||||
public_id = m.group(1)
|
||||
image_format = m.group(2)
|
||||
return CloudinaryResource(public_id,
|
||||
format=image_format,
|
||||
version=version,
|
||||
signature=signature,
|
||||
type=upload_type,
|
||||
resource_type=resource_type)
|
||||
|
||||
def validate(self, value):
|
||||
"""Validate the signature"""
|
||||
# Use the parent's handling of required fields, etc.
|
||||
super(CloudinaryJsFileField, self).validate(value)
|
||||
if not value: return
|
||||
if not value.validate():
|
||||
raise forms.ValidationError("Signature mismatch")
|
||||
|
||||
|
||||
class CloudinaryUnsignedJsFileField(CloudinaryJsFileField):
|
||||
def __init__(self, upload_preset, attrs=None, options=None, autosave=True, *args, **kwargs):
|
||||
if attrs is None:
|
||||
attrs = {}
|
||||
if options is None:
|
||||
options = {}
|
||||
options = options.copy()
|
||||
options.update({"unsigned": True, "upload_preset": upload_preset})
|
||||
super(CloudinaryUnsignedJsFileField, self).__init__(attrs, options, autosave, *args, **kwargs)
|
||||
|
||||
|
||||
class CloudinaryFileField(forms.FileField):
|
||||
my_default_error_messages = {
|
||||
'required': _(u"No file selected!")
|
||||
}
|
||||
default_error_messages = forms.FileField.default_error_messages.copy()
|
||||
default_error_messages.update(my_default_error_messages)
|
||||
|
||||
def __init__(self, options=None, autosave=True, *args, **kwargs):
|
||||
self.autosave = autosave
|
||||
self.options = options or {}
|
||||
super(CloudinaryFileField, self).__init__(*args, **kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
"""Upload and convert to CloudinaryResource"""
|
||||
value = super(CloudinaryFileField, self).to_python(value)
|
||||
if not value:
|
||||
return None
|
||||
if self.autosave:
|
||||
return cloudinary.uploader.upload_image(value, **self.options)
|
||||
else:
|
||||
return value
|
121
lib/cloudinary/models.py
Normal file
121
lib/cloudinary/models.py
Normal file
|
@ -0,0 +1,121 @@
|
|||
import re
|
||||
|
||||
|
||||
from cloudinary import CloudinaryResource, forms, uploader
|
||||
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from django.db import models
|
||||
|
||||
# Add introspection rules for South, if it's installed.
|
||||
try:
|
||||
from south.modelsinspector import add_introspection_rules
|
||||
add_introspection_rules([], ["^cloudinary.models.CloudinaryField"])
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
CLOUDINARY_FIELD_DB_RE = r'(?:(?P<resource_type>image|raw|video)/(?P<type>upload|private|authenticated)/)?(?:v(?P<version>\d+)/)?(?P<public_id>.*?)(\.(?P<format>[^.]+))?$'
|
||||
|
||||
|
||||
# Taken from six - https://pythonhosted.org/six/
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
class CloudinaryField(models.Field):
|
||||
description = "A resource stored in Cloudinary"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
options = {'max_length': 255}
|
||||
self.default_form_class = kwargs.pop("default_form_class", forms.CloudinaryFileField)
|
||||
options.update(kwargs)
|
||||
self.type = options.pop("type", "upload")
|
||||
self.resource_type = options.pop("resource_type", "image")
|
||||
self.width_field = options.pop("width_field", None)
|
||||
self.height_field = options.pop("height_field", None)
|
||||
super(CloudinaryField, self).__init__(*args, **options)
|
||||
|
||||
def get_internal_type(self):
|
||||
return 'CharField'
|
||||
|
||||
def value_to_string(self, obj):
|
||||
# We need to support both legacy `_get_val_from_obj` and new `value_from_object` models.Field methods.
|
||||
# It would be better to wrap it with try -> except AttributeError -> fallback to legacy.
|
||||
# Unfortunately, we can catch AttributeError exception from `value_from_object` function itself.
|
||||
# Parsing exception string is an overkill here, that's why we check for attribute existence
|
||||
|
||||
if hasattr(self, 'value_from_object'):
|
||||
value = self.value_from_object(obj)
|
||||
else: # fallback for legacy django versions
|
||||
value = self._get_val_from_obj(obj)
|
||||
|
||||
return self.get_prep_value(value)
|
||||
|
||||
def parse_cloudinary_resource(self, value):
|
||||
m = re.match(CLOUDINARY_FIELD_DB_RE, value)
|
||||
resource_type = m.group('resource_type') or self.resource_type
|
||||
upload_type = m.group('type') or self.type
|
||||
return CloudinaryResource(
|
||||
type=upload_type,
|
||||
resource_type=resource_type,
|
||||
version=m.group('version'),
|
||||
public_id=m.group('public_id'),
|
||||
format=m.group('format')
|
||||
)
|
||||
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
if value is None:
|
||||
return value
|
||||
return self.parse_cloudinary_resource(value)
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(value, CloudinaryResource):
|
||||
return value
|
||||
elif isinstance(value, UploadedFile):
|
||||
return value
|
||||
elif value is None:
|
||||
return value
|
||||
else:
|
||||
return self.parse_cloudinary_resource(value)
|
||||
|
||||
def upload_options_with_filename(self, model_instance, filename):
|
||||
return self.upload_options(model_instance)
|
||||
|
||||
def upload_options(self, model_instance):
|
||||
return {}
|
||||
|
||||
def pre_save(self, model_instance, add):
|
||||
value = super(CloudinaryField, self).pre_save(model_instance, add)
|
||||
if isinstance(value, UploadedFile):
|
||||
options = {"type": self.type, "resource_type": self.resource_type}
|
||||
options.update(self.upload_options_with_filename(model_instance, value.name))
|
||||
instance_value = uploader.upload_resource(value, **options)
|
||||
setattr(model_instance, self.attname, instance_value)
|
||||
if self.width_field:
|
||||
setattr(model_instance, self.width_field, instance_value.metadata['width'])
|
||||
if self.height_field:
|
||||
setattr(model_instance, self.height_field, instance_value.metadata['height'])
|
||||
return self.get_prep_value(instance_value)
|
||||
else:
|
||||
return value
|
||||
|
||||
def get_prep_value(self, value):
|
||||
if not value:
|
||||
return self.get_default()
|
||||
if isinstance(value, CloudinaryResource):
|
||||
return value.get_prep_value()
|
||||
else:
|
||||
return value
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
options = {"type": self.type, "resource_type": self.resource_type}
|
||||
options.update(kwargs.pop('options', {}))
|
||||
defaults = {'form_class': self.default_form_class, 'options': options, 'autosave': False}
|
||||
defaults.update(kwargs)
|
||||
return super(CloudinaryField, self).formfield(**defaults)
|
34
lib/cloudinary/poster/__init__.py
Normal file
34
lib/cloudinary/poster/__init__.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
#
|
||||
# Copyright (c) 2011 Chris AtLee
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
"""poster module
|
||||
|
||||
Support for streaming HTTP uploads, and multipart/form-data encoding
|
||||
|
||||
```poster.version``` is a 3-tuple of integers representing the version number.
|
||||
New releases of poster will always have a version number that compares greater
|
||||
than an older version of poster.
|
||||
New in version 0.6."""
|
||||
|
||||
import cloudinary.poster.streaminghttp
|
||||
import cloudinary.poster.encode
|
||||
|
||||
version = (0, 8, 2) # Thanks JP!
|
447
lib/cloudinary/poster/encode.py
Normal file
447
lib/cloudinary/poster/encode.py
Normal file
|
@ -0,0 +1,447 @@
|
|||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
"""multipart/form-data encoding module
|
||||
|
||||
This module provides functions that faciliate encoding name/value pairs
|
||||
as multipart/form-data suitable for a HTTP POST or PUT request.
|
||||
|
||||
multipart/form-data is the standard way to upload files over HTTP"""
|
||||
|
||||
__all__ = ['gen_boundary', 'encode_and_quote', 'MultipartParam',
|
||||
'encode_string', 'encode_file_header', 'get_body_size', 'get_headers',
|
||||
'multipart_encode']
|
||||
|
||||
try:
|
||||
from io import UnsupportedOperation
|
||||
except ImportError:
|
||||
UnsupportedOperation = None
|
||||
|
||||
try:
|
||||
import uuid
|
||||
def gen_boundary():
|
||||
"""Returns a random string to use as the boundary for a message"""
|
||||
return uuid.uuid4().hex
|
||||
except ImportError:
|
||||
import random, sha
|
||||
def gen_boundary():
|
||||
"""Returns a random string to use as the boundary for a message"""
|
||||
bits = random.getrandbits(160)
|
||||
return sha.new(str(bits)).hexdigest()
|
||||
|
||||
import re, os, mimetypes
|
||||
from cloudinary.compat import (PY3, string_types, to_bytes, to_string,
|
||||
to_bytearray, quote_plus, advance_iterator)
|
||||
try:
|
||||
from email.header import Header
|
||||
except ImportError:
|
||||
# Python 2.4
|
||||
from email.Header import Header
|
||||
|
||||
if PY3:
|
||||
def encode_and_quote(data):
|
||||
if data is None:
|
||||
return None
|
||||
return quote_plus(to_bytes(data))
|
||||
|
||||
else:
|
||||
def encode_and_quote(data):
|
||||
"""If ``data`` is unicode, return quote_plus(data.encode("utf-8")) otherwise return quote_plus(data)"""
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
if isinstance(data, unicode):
|
||||
data = data.encode("utf-8")
|
||||
return quote_plus(data)
|
||||
|
||||
if PY3:
|
||||
def _strify(s):
|
||||
if s is None:
|
||||
return None
|
||||
elif isinstance(s, bytes):
|
||||
return s
|
||||
else:
|
||||
try:
|
||||
return to_bytes(s)
|
||||
except AttributeError:
|
||||
return to_bytes(str(s))
|
||||
else:
|
||||
def _strify(s):
|
||||
"""If s is a unicode string, encode it to UTF-8 and return the results, otherwise return str(s), or None if s is None"""
|
||||
if s is None:
|
||||
return None
|
||||
if isinstance(s, unicode):
|
||||
return s.encode("utf-8")
|
||||
return str(s)
|
||||
|
||||
class MultipartParam(object):
|
||||
"""Represents a single parameter in a multipart/form-data request
|
||||
|
||||
``name`` is the name of this parameter.
|
||||
|
||||
If ``value`` is set, it must be a string or unicode object to use as the
|
||||
data for this parameter.
|
||||
|
||||
If ``filename`` is set, it is what to say that this parameter's filename
|
||||
is. Note that this does not have to be the actual filename any local file.
|
||||
|
||||
If ``filetype`` is set, it is used as the Content-Type for this parameter.
|
||||
If unset it defaults to "text/plain; charset=utf8"
|
||||
|
||||
If ``filesize`` is set, it specifies the length of the file ``fileobj``
|
||||
|
||||
If ``fileobj`` is set, it must be a file-like object that supports
|
||||
.read().
|
||||
|
||||
Both ``value`` and ``fileobj`` must not be set, doing so will
|
||||
raise a ValueError assertion.
|
||||
|
||||
If ``fileobj`` is set, and ``filesize`` is not specified, then
|
||||
the file's size will be determined first by stat'ing ``fileobj``'s
|
||||
file descriptor, and if that fails, by seeking to the end of the file,
|
||||
recording the current position as the size, and then by seeking back to the
|
||||
beginning of the file.
|
||||
|
||||
``cb`` is a callable which will be called from iter_encode with (self,
|
||||
current, total), representing the current parameter, current amount
|
||||
transferred, and the total size.
|
||||
"""
|
||||
def __init__(self, name, value=None, filename=None, filetype=None,
|
||||
filesize=None, fileobj=None, cb=None):
|
||||
self.name = Header(name).encode()
|
||||
self.value = _strify(value)
|
||||
if filename is None:
|
||||
self.filename = None
|
||||
else:
|
||||
if PY3:
|
||||
byte_filename = filename.encode("ascii", "xmlcharrefreplace")
|
||||
self.filename = to_string(byte_filename)
|
||||
encoding = 'unicode_escape'
|
||||
else:
|
||||
if isinstance(filename, unicode):
|
||||
# Encode with XML entities
|
||||
self.filename = filename.encode("ascii", "xmlcharrefreplace")
|
||||
else:
|
||||
self.filename = str(filename)
|
||||
encoding = 'string_escape'
|
||||
self.filename = self.filename.encode(encoding).replace(to_bytes('"'), to_bytes('\\"'))
|
||||
self.filetype = _strify(filetype)
|
||||
|
||||
self.filesize = filesize
|
||||
self.fileobj = fileobj
|
||||
self.cb = cb
|
||||
|
||||
if self.value is not None and self.fileobj is not None:
|
||||
raise ValueError("Only one of value or fileobj may be specified")
|
||||
|
||||
if fileobj is not None and filesize is None:
|
||||
# Try and determine the file size
|
||||
try:
|
||||
self.filesize = os.fstat(fileobj.fileno()).st_size
|
||||
except (OSError, AttributeError, UnsupportedOperation):
|
||||
try:
|
||||
fileobj.seek(0, 2)
|
||||
self.filesize = fileobj.tell()
|
||||
fileobj.seek(0)
|
||||
except:
|
||||
raise ValueError("Could not determine filesize")
|
||||
|
||||
def __cmp__(self, other):
|
||||
attrs = ['name', 'value', 'filename', 'filetype', 'filesize', 'fileobj']
|
||||
myattrs = [getattr(self, a) for a in attrs]
|
||||
oattrs = [getattr(other, a) for a in attrs]
|
||||
return cmp(myattrs, oattrs)
|
||||
|
||||
def reset(self):
|
||||
if self.fileobj is not None:
|
||||
self.fileobj.seek(0)
|
||||
elif self.value is None:
|
||||
raise ValueError("Don't know how to reset this parameter")
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, paramname, filename):
|
||||
"""Returns a new MultipartParam object constructed from the local
|
||||
file at ``filename``.
|
||||
|
||||
``filesize`` is determined by os.path.getsize(``filename``)
|
||||
|
||||
``filetype`` is determined by mimetypes.guess_type(``filename``)[0]
|
||||
|
||||
``filename`` is set to os.path.basename(``filename``)
|
||||
"""
|
||||
|
||||
return cls(paramname, filename=os.path.basename(filename),
|
||||
filetype=mimetypes.guess_type(filename)[0],
|
||||
filesize=os.path.getsize(filename),
|
||||
fileobj=open(filename, "rb"))
|
||||
|
||||
@classmethod
|
||||
def from_params(cls, params):
|
||||
"""Returns a list of MultipartParam objects from a sequence of
|
||||
name, value pairs, MultipartParam instances,
|
||||
or from a mapping of names to values
|
||||
|
||||
The values may be strings or file objects, or MultipartParam objects.
|
||||
MultipartParam object names must match the given names in the
|
||||
name,value pairs or mapping, if applicable."""
|
||||
if hasattr(params, 'items'):
|
||||
params = params.items()
|
||||
|
||||
retval = []
|
||||
for item in params:
|
||||
if isinstance(item, cls):
|
||||
retval.append(item)
|
||||
continue
|
||||
name, value = item
|
||||
if isinstance(value, cls):
|
||||
assert value.name == name
|
||||
retval.append(value)
|
||||
continue
|
||||
if hasattr(value, 'read'):
|
||||
# Looks like a file object
|
||||
filename = getattr(value, 'name', None)
|
||||
if filename is not None:
|
||||
filetype = mimetypes.guess_type(filename)[0]
|
||||
else:
|
||||
filetype = None
|
||||
|
||||
retval.append(cls(name=name, filename=filename,
|
||||
filetype=filetype, fileobj=value))
|
||||
else:
|
||||
retval.append(cls(name, value))
|
||||
return retval
|
||||
|
||||
def encode_hdr(self, boundary):
|
||||
"""Returns the header of the encoding of this parameter"""
|
||||
boundary = encode_and_quote(boundary)
|
||||
|
||||
headers = ["--%s" % boundary]
|
||||
|
||||
if self.filename:
|
||||
disposition = 'form-data; name="%s"; filename="%s"' % (self.name,
|
||||
to_string(self.filename))
|
||||
else:
|
||||
disposition = 'form-data; name="%s"' % self.name
|
||||
|
||||
headers.append("Content-Disposition: %s" % disposition)
|
||||
|
||||
if self.filetype:
|
||||
filetype = to_string(self.filetype)
|
||||
else:
|
||||
filetype = "text/plain; charset=utf-8"
|
||||
|
||||
headers.append("Content-Type: %s" % filetype)
|
||||
|
||||
headers.append("")
|
||||
headers.append("")
|
||||
|
||||
return "\r\n".join(headers)
|
||||
|
||||
def encode(self, boundary):
|
||||
"""Returns the string encoding of this parameter"""
|
||||
if self.value is None:
|
||||
value = self.fileobj.read()
|
||||
else:
|
||||
value = self.value
|
||||
|
||||
if re.search(to_bytes("^--%s$" % re.escape(boundary)), value, re.M):
|
||||
raise ValueError("boundary found in encoded string")
|
||||
|
||||
return to_bytes(self.encode_hdr(boundary)) + value + b"\r\n"
|
||||
|
||||
def iter_encode(self, boundary, blocksize=4096):
|
||||
"""Yields the encoding of this parameter
|
||||
If self.fileobj is set, then blocks of ``blocksize`` bytes are read and
|
||||
yielded."""
|
||||
total = self.get_size(boundary)
|
||||
current = 0
|
||||
if self.value is not None:
|
||||
block = self.encode(boundary)
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
else:
|
||||
block = to_bytes(self.encode_hdr(boundary))
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
last_block = to_bytearray("")
|
||||
encoded_boundary = "--%s" % encode_and_quote(boundary)
|
||||
boundary_exp = re.compile(to_bytes("^%s$" % re.escape(encoded_boundary)),
|
||||
re.M)
|
||||
while True:
|
||||
block = self.fileobj.read(blocksize)
|
||||
if not block:
|
||||
current += 2
|
||||
yield to_bytes("\r\n")
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
break
|
||||
last_block += block
|
||||
if boundary_exp.search(last_block):
|
||||
raise ValueError("boundary found in file data")
|
||||
last_block = last_block[-len(to_bytes(encoded_boundary))-2:]
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
|
||||
def get_size(self, boundary):
|
||||
"""Returns the size in bytes that this param will be when encoded
|
||||
with the given boundary."""
|
||||
if self.filesize is not None:
|
||||
valuesize = self.filesize
|
||||
else:
|
||||
valuesize = len(self.value)
|
||||
|
||||
return len(self.encode_hdr(boundary)) + 2 + valuesize
|
||||
|
||||
def encode_string(boundary, name, value):
|
||||
"""Returns ``name`` and ``value`` encoded as a multipart/form-data
|
||||
variable. ``boundary`` is the boundary string used throughout
|
||||
a single request to separate variables."""
|
||||
|
||||
return MultipartParam(name, value).encode(boundary)
|
||||
|
||||
def encode_file_header(boundary, paramname, filesize, filename=None,
|
||||
filetype=None):
|
||||
"""Returns the leading data for a multipart/form-data field that contains
|
||||
file data.
|
||||
|
||||
``boundary`` is the boundary string used throughout a single request to
|
||||
separate variables.
|
||||
|
||||
``paramname`` is the name of the variable in this request.
|
||||
|
||||
``filesize`` is the size of the file data.
|
||||
|
||||
``filename`` if specified is the filename to give to this field. This
|
||||
field is only useful to the server for determining the original filename.
|
||||
|
||||
``filetype`` if specified is the MIME type of this file.
|
||||
|
||||
The actual file data should be sent after this header has been sent.
|
||||
"""
|
||||
|
||||
return MultipartParam(paramname, filesize=filesize, filename=filename,
|
||||
filetype=filetype).encode_hdr(boundary)
|
||||
|
||||
def get_body_size(params, boundary):
|
||||
"""Returns the number of bytes that the multipart/form-data encoding
|
||||
of ``params`` will be."""
|
||||
size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params))
|
||||
return size + len(boundary) + 6
|
||||
|
||||
def get_headers(params, boundary):
|
||||
"""Returns a dictionary with Content-Type and Content-Length headers
|
||||
for the multipart/form-data encoding of ``params``."""
|
||||
headers = {}
|
||||
boundary = quote_plus(boundary)
|
||||
headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary
|
||||
headers['Content-Length'] = str(get_body_size(params, boundary))
|
||||
return headers
|
||||
|
||||
class multipart_yielder:
|
||||
def __init__(self, params, boundary, cb):
|
||||
self.params = params
|
||||
self.boundary = boundary
|
||||
self.cb = cb
|
||||
|
||||
self.i = 0
|
||||
self.p = None
|
||||
self.param_iter = None
|
||||
self.current = 0
|
||||
self.total = get_body_size(params, boundary)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
return self.next()
|
||||
|
||||
def next(self):
|
||||
"""generator function to yield multipart/form-data representation
|
||||
of parameters"""
|
||||
if self.param_iter is not None:
|
||||
try:
|
||||
block = advance_iterator(self.param_iter)
|
||||
self.current += len(block)
|
||||
if self.cb:
|
||||
self.cb(self.p, self.current, self.total)
|
||||
return block
|
||||
except StopIteration:
|
||||
self.p = None
|
||||
self.param_iter = None
|
||||
|
||||
if self.i is None:
|
||||
raise StopIteration
|
||||
elif self.i >= len(self.params):
|
||||
self.param_iter = None
|
||||
self.p = None
|
||||
self.i = None
|
||||
block = to_bytes("--%s--\r\n" % self.boundary)
|
||||
self.current += len(block)
|
||||
if self.cb:
|
||||
self.cb(self.p, self.current, self.total)
|
||||
return block
|
||||
|
||||
self.p = self.params[self.i]
|
||||
self.param_iter = self.p.iter_encode(self.boundary)
|
||||
self.i += 1
|
||||
return advance_iterator(self)
|
||||
|
||||
def reset(self):
|
||||
self.i = 0
|
||||
self.current = 0
|
||||
for param in self.params:
|
||||
param.reset()
|
||||
|
||||
def multipart_encode(params, boundary=None, cb=None):
|
||||
"""Encode ``params`` as multipart/form-data.
|
||||
|
||||
``params`` should be a sequence of (name, value) pairs or MultipartParam
|
||||
objects, or a mapping of names to values.
|
||||
Values are either strings parameter values, or file-like objects to use as
|
||||
the parameter value. The file-like objects must support .read() and either
|
||||
.fileno() or both .seek() and .tell().
|
||||
|
||||
If ``boundary`` is set, then it as used as the MIME boundary. Otherwise
|
||||
a randomly generated boundary will be used. In either case, if the
|
||||
boundary string appears in the parameter values a ValueError will be
|
||||
raised.
|
||||
|
||||
If ``cb`` is set, it should be a callback which will get called as blocks
|
||||
of data are encoded. It will be called with (param, current, total),
|
||||
indicating the current parameter being encoded, the current amount encoded,
|
||||
and the total amount to encode.
|
||||
|
||||
Returns a tuple of `datagen`, `headers`, where `datagen` is a
|
||||
generator that will yield blocks of data that make up the encoded
|
||||
parameters, and `headers` is a dictionary with the assoicated
|
||||
Content-Type and Content-Length headers.
|
||||
|
||||
Examples:
|
||||
|
||||
>>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" in s and "value1" in s
|
||||
|
||||
>>> p = MultipartParam("key", "value2")
|
||||
>>> datagen, headers = multipart_encode( [("key", "value1"), p] )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" in s and "value1" in s
|
||||
|
||||
>>> datagen, headers = multipart_encode( {"key": "value1"} )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" not in s and "value1" in s
|
||||
|
||||
"""
|
||||
if boundary is None:
|
||||
boundary = gen_boundary()
|
||||
else:
|
||||
boundary = quote_plus(boundary)
|
||||
|
||||
headers = get_headers(params, boundary)
|
||||
params = MultipartParam.from_params(params)
|
||||
|
||||
return multipart_yielder(params, boundary, cb), headers
|
201
lib/cloudinary/poster/streaminghttp.py
Normal file
201
lib/cloudinary/poster/streaminghttp.py
Normal file
|
@ -0,0 +1,201 @@
|
|||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
"""Streaming HTTP uploads module.
|
||||
|
||||
This module extends the standard httplib and urllib2 objects so that
|
||||
iterable objects can be used in the body of HTTP requests.
|
||||
|
||||
In most cases all one should have to do is call :func:`register_openers()`
|
||||
to register the new streaming http handlers which will take priority over
|
||||
the default handlers, and then you can use iterable objects in the body
|
||||
of HTTP requests.
|
||||
|
||||
**N.B.** You must specify a Content-Length header if using an iterable object
|
||||
since there is no way to determine in advance the total size that will be
|
||||
yielded, and there is no way to reset an interator.
|
||||
|
||||
Example usage:
|
||||
|
||||
>>> from StringIO import StringIO
|
||||
>>> import urllib2, poster.streaminghttp
|
||||
|
||||
>>> opener = poster.streaminghttp.register_openers()
|
||||
|
||||
>>> s = "Test file data"
|
||||
>>> f = StringIO(s)
|
||||
|
||||
>>> req = urllib2.Request("http://localhost:5000", f,
|
||||
... {'Content-Length': str(len(s))})
|
||||
"""
|
||||
|
||||
import sys, socket
|
||||
from cloudinary.compat import httplib, urllib2, NotConnected
|
||||
|
||||
__all__ = ['StreamingHTTPConnection', 'StreamingHTTPRedirectHandler',
|
||||
'StreamingHTTPHandler', 'register_openers']
|
||||
|
||||
if hasattr(httplib, 'HTTPS'):
|
||||
__all__.extend(['StreamingHTTPSHandler', 'StreamingHTTPSConnection'])
|
||||
|
||||
class _StreamingHTTPMixin:
|
||||
"""Mixin class for HTTP and HTTPS connections that implements a streaming
|
||||
send method."""
|
||||
def send(self, value):
|
||||
"""Send ``value`` to the server.
|
||||
|
||||
``value`` can be a string object, a file-like object that supports
|
||||
a .read() method, or an iterable object that supports a .next()
|
||||
method.
|
||||
"""
|
||||
# Based on python 2.6's httplib.HTTPConnection.send()
|
||||
if self.sock is None:
|
||||
if self.auto_open:
|
||||
self.connect()
|
||||
else:
|
||||
raise NotConnected()
|
||||
|
||||
# send the data to the server. if we get a broken pipe, then close
|
||||
# the socket. we want to reconnect when somebody tries to send again.
|
||||
#
|
||||
# NOTE: we DO propagate the error, though, because we cannot simply
|
||||
# ignore the error... the caller will know if they can retry.
|
||||
if self.debuglevel > 0:
|
||||
print("send:", repr(value))
|
||||
try:
|
||||
blocksize = 8192
|
||||
if hasattr(value, 'read') :
|
||||
if hasattr(value, 'seek'):
|
||||
value.seek(0)
|
||||
if self.debuglevel > 0:
|
||||
print("sendIng a read()able")
|
||||
data = value.read(blocksize)
|
||||
while data:
|
||||
self.sock.sendall(data)
|
||||
data = value.read(blocksize)
|
||||
elif hasattr(value, 'next'):
|
||||
if hasattr(value, 'reset'):
|
||||
value.reset()
|
||||
if self.debuglevel > 0:
|
||||
print("sendIng an iterable")
|
||||
for data in value:
|
||||
self.sock.sendall(data)
|
||||
else:
|
||||
self.sock.sendall(value)
|
||||
except socket.error:
|
||||
e = sys.exc_info()[1]
|
||||
if e[0] == 32: # Broken pipe
|
||||
self.close()
|
||||
raise
|
||||
|
||||
class StreamingHTTPConnection(_StreamingHTTPMixin, httplib.HTTPConnection):
|
||||
"""Subclass of `httplib.HTTPConnection` that overrides the `send()` method
|
||||
to support iterable body objects"""
|
||||
|
||||
class StreamingHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||
"""Subclass of `urllib2.HTTPRedirectHandler` that overrides the
|
||||
`redirect_request` method to properly handle redirected POST requests
|
||||
|
||||
This class is required because python 2.5's HTTPRedirectHandler does
|
||||
not remove the Content-Type or Content-Length headers when requesting
|
||||
the new resource, but the body of the original request is not preserved.
|
||||
"""
|
||||
|
||||
handler_order = urllib2.HTTPRedirectHandler.handler_order - 1
|
||||
|
||||
# From python2.6 urllib2's HTTPRedirectHandler
|
||||
def redirect_request(self, req, fp, code, msg, headers, newurl):
|
||||
"""Return a Request or None in response to a redirect.
|
||||
|
||||
This is called by the http_error_30x methods when a
|
||||
redirection response is received. If a redirection should
|
||||
take place, return a new Request to allow http_error_30x to
|
||||
perform the redirect. Otherwise, raise HTTPError if no-one
|
||||
else should try to handle this url. Return None if you can't
|
||||
but another Handler might.
|
||||
"""
|
||||
m = req.get_method()
|
||||
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
|
||||
or code in (301, 302, 303) and m == "POST"):
|
||||
# Strictly (according to RFC 2616), 301 or 302 in response
|
||||
# to a POST MUST NOT cause a redirection without confirmation
|
||||
# from the user (of urllib2, in this case). In practice,
|
||||
# essentially all clients do redirect in this case, so we
|
||||
# do the same.
|
||||
# be conciliant with URIs containing a space
|
||||
newurl = newurl.replace(' ', '%20')
|
||||
newheaders = dict((k, v) for k, v in req.headers.items()
|
||||
if k.lower() not in (
|
||||
"content-length", "content-type")
|
||||
)
|
||||
return urllib2.Request(newurl,
|
||||
headers=newheaders,
|
||||
origin_req_host=req.get_origin_req_host(),
|
||||
unverifiable=True)
|
||||
else:
|
||||
raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
|
||||
|
||||
class StreamingHTTPHandler(urllib2.HTTPHandler):
|
||||
"""Subclass of `urllib2.HTTPHandler` that uses
|
||||
StreamingHTTPConnection as its http connection class."""
|
||||
|
||||
handler_order = urllib2.HTTPHandler.handler_order - 1
|
||||
|
||||
def http_open(self, req):
|
||||
"""Open a StreamingHTTPConnection for the given request"""
|
||||
return self.do_open(StreamingHTTPConnection, req)
|
||||
|
||||
def http_request(self, req):
|
||||
"""Handle a HTTP request. Make sure that Content-Length is specified
|
||||
if we're using an interable value"""
|
||||
# Make sure that if we're using an iterable object as the request
|
||||
# body, that we've also specified Content-Length
|
||||
if req.has_data():
|
||||
data = req.get_data()
|
||||
if hasattr(data, 'read') or hasattr(data, 'next'):
|
||||
if not req.has_header('Content-length'):
|
||||
raise ValueError(
|
||||
"No Content-Length specified for iterable body")
|
||||
return urllib2.HTTPHandler.do_request_(self, req)
|
||||
|
||||
if hasattr(httplib, 'HTTPS'):
|
||||
class StreamingHTTPSConnection(_StreamingHTTPMixin,
|
||||
httplib.HTTPSConnection):
|
||||
"""Subclass of `httplib.HTTSConnection` that overrides the `send()`
|
||||
method to support iterable body objects"""
|
||||
|
||||
class StreamingHTTPSHandler(urllib2.HTTPSHandler):
|
||||
"""Subclass of `urllib2.HTTPSHandler` that uses
|
||||
StreamingHTTPSConnection as its http connection class."""
|
||||
|
||||
handler_order = urllib2.HTTPSHandler.handler_order - 1
|
||||
|
||||
def https_open(self, req):
|
||||
return self.do_open(StreamingHTTPSConnection, req)
|
||||
|
||||
def https_request(self, req):
|
||||
# Make sure that if we're using an iterable object as the request
|
||||
# body, that we've also specified Content-Length
|
||||
if req.has_data():
|
||||
data = req.get_data()
|
||||
if hasattr(data, 'read') or hasattr(data, 'next'):
|
||||
if not req.has_header('Content-length'):
|
||||
raise ValueError(
|
||||
"No Content-Length specified for iterable body")
|
||||
return urllib2.HTTPSHandler.do_request_(self, req)
|
||||
|
||||
|
||||
def get_handlers():
|
||||
handlers = [StreamingHTTPHandler, StreamingHTTPRedirectHandler]
|
||||
if hasattr(httplib, "HTTPS"):
|
||||
handlers.append(StreamingHTTPSHandler)
|
||||
return handlers
|
||||
|
||||
def register_openers():
|
||||
"""Register the streaming http handlers in the global urllib2 default
|
||||
opener object.
|
||||
|
||||
Returns the created OpenerDirector object."""
|
||||
opener = urllib2.build_opener(*get_handlers())
|
||||
|
||||
urllib2.install_opener(opener)
|
||||
|
||||
return opener
|
59
lib/cloudinary/search.py
Normal file
59
lib/cloudinary/search.py
Normal file
|
@ -0,0 +1,59 @@
|
|||
import json
|
||||
from copy import deepcopy
|
||||
from . import api
|
||||
|
||||
|
||||
class Search:
|
||||
"""Build and execute a search query."""
|
||||
def __init__(self):
|
||||
self.query = {}
|
||||
|
||||
def expression(self, value):
|
||||
"""Specify the search query expression."""
|
||||
self.query["expression"] = value
|
||||
return self
|
||||
|
||||
def max_results(self, value):
|
||||
"""Set the max results to return"""
|
||||
self.query["max_results"] = value
|
||||
return self
|
||||
|
||||
def next_cursor(self, value):
|
||||
"""Get next page in the query using the ``next_cursor`` value from a previous invocation."""
|
||||
self.query["next_cursor"] = value
|
||||
return self
|
||||
|
||||
def sort_by(self, field_name, direction=None):
|
||||
"""Add a field to sort results by. If not provided, direction is ``desc``."""
|
||||
if direction is None:
|
||||
direction = 'desc'
|
||||
self._add("sort_by", {field_name: direction})
|
||||
return self
|
||||
|
||||
def aggregate(self, value):
|
||||
"""Aggregate field."""
|
||||
self._add("aggregate", value)
|
||||
return self
|
||||
|
||||
def with_field(self, value):
|
||||
"""Request an additional field in the result set."""
|
||||
self._add("with_field", value)
|
||||
return self
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(self.query)
|
||||
|
||||
def execute(self, **options):
|
||||
"""Execute the search and return results."""
|
||||
options["content_type"] = 'application/json'
|
||||
uri = ['resources','search']
|
||||
return api.call_json_api('post', uri, self.as_dict(), **options)
|
||||
|
||||
def _add(self, name, value):
|
||||
if name not in self.query:
|
||||
self.query[name] = []
|
||||
self.query[name].append(value)
|
||||
return self
|
||||
|
||||
def as_dict(self):
|
||||
return deepcopy(self.query)
|
43
lib/cloudinary/static/html/cloudinary_cors.html
Normal file
43
lib/cloudinary/static/html/cloudinary_cors.html
Normal file
|
@ -0,0 +1,43 @@
|
|||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
/*
|
||||
json2.js
|
||||
2011-10-19
|
||||
|
||||
Public Domain.
|
||||
|
||||
NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
||||
|
||||
See http://www.JSON.org/js.html
|
||||
|
||||
This code should be minified before deployment.
|
||||
See http://javascript.crockford.com/jsmin.html
|
||||
|
||||
USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
|
||||
NOT CONTROL.
|
||||
|
||||
*/
|
||||
var JSON;if(!JSON){JSON={}}(function(){function str(a,b){var c,d,e,f,g=gap,h,i=b[a];if(i&&typeof i==="object"&&typeof i.toJSON==="function"){i=i.toJSON(a)}if(typeof rep==="function"){i=rep.call(b,a,i)}switch(typeof i){case"string":return quote(i);case"number":return isFinite(i)?String(i):"null";case"boolean":case"null":return String(i);case"object":if(!i){return"null"}gap+=indent;h=[];if(Object.prototype.toString.apply(i)==="[object Array]"){f=i.length;for(c=0;c<f;c+=1){h[c]=str(c,i)||"null"}e=h.length===0?"[]":gap?"[\n"+gap+h.join(",\n"+gap)+"\n"+g+"]":"["+h.join(",")+"]";gap=g;return e}if(rep&&typeof rep==="object"){f=rep.length;for(c=0;c<f;c+=1){if(typeof rep[c]==="string"){d=rep[c];e=str(d,i);if(e){h.push(quote(d)+(gap?": ":":")+e)}}}}else{for(d in i){if(Object.prototype.hasOwnProperty.call(i,d)){e=str(d,i);if(e){h.push(quote(d)+(gap?": ":":")+e)}}}}e=h.length===0?"{}":gap?"{\n"+gap+h.join(",\n"+gap)+"\n"+g+"}":"{"+h.join(",")+"}";gap=g;return e}}function quote(a){escapable.lastIndex=0;return escapable.test(a)?'"'+a.replace(escapable,function(a){var b=meta[a];return typeof b==="string"?b:"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})+'"':'"'+a+'"'}function f(a){return a<10?"0"+a:a}"use strict";if(typeof Date.prototype.toJSON!=="function"){Date.prototype.toJSON=function(a){return isFinite(this.valueOf())?this.getUTCFullYear()+"-"+f(this.getUTCMonth()+1)+"-"+f(this.getUTCDate())+"T"+f(this.getUTCHours())+":"+f(this.getUTCMinutes())+":"+f(this.getUTCSeconds())+"Z":null};String.prototype.toJSON=Number.prototype.toJSON=Boolean.prototype.toJSON=function(a){return this.valueOf()}}var cx=/[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,escapable=/[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,gap,indent,meta={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},rep;if(typeof JSON.stringify!=="function"){JSON.stringify=function(a,b,c){var d;gap="";indent="";if(typeof c==="number"){for(d=0;d<c;d+=1){indent+=" "}}else if(typeof c==="string"){indent=c}rep=b;if(b&&typeof b!=="function"&&(typeof b!=="object"||typeof b.length!=="number")){throw new Error("JSON.stringify")}return str("",{"":a})}}if(typeof JSON.parse!=="function"){JSON.parse=function(text,reviver){function walk(a,b){var c,d,e=a[b];if(e&&typeof e==="object"){for(c in e){if(Object.prototype.hasOwnProperty.call(e,c)){d=walk(e,c);if(d!==undefined){e[c]=d}else{delete e[c]}}}}return reviver.call(a,b,e)}var j;text=String(text);cx.lastIndex=0;if(cx.test(text)){text=text.replace(cx,function(a){return"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})}if(/^[\],:{}\s]*$/.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,""))){j=eval("("+text+")");return typeof reviver==="function"?walk({"":j},""):j}throw new SyntaxError("JSON.parse")}}})()
|
||||
/* end of json2.js */
|
||||
;
|
||||
function parse(query) {
|
||||
var result = {};
|
||||
var params = query.split("&");
|
||||
for (var i = 0; i < params.length; i++) {
|
||||
var param = params[i].split("=");
|
||||
result[param[0]] = decodeURIComponent(param[1]);
|
||||
}
|
||||
return JSON.stringify(result);
|
||||
}
|
||||
|
||||
document.body.textContent = document.body.innerText = parse(window.location.search.slice(1));
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
2
lib/cloudinary/static/js/canvas-to-blob.min.js
vendored
Normal file
2
lib/cloudinary/static/js/canvas-to-blob.min.js
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
!function(t){"use strict";var e=t.HTMLCanvasElement&&t.HTMLCanvasElement.prototype,o=t.Blob&&function(){try{return Boolean(new Blob)}catch(t){return!1}}(),n=o&&t.Uint8Array&&function(){try{return 100===new Blob([new Uint8Array(100)]).size}catch(t){return!1}}(),r=t.BlobBuilder||t.WebKitBlobBuilder||t.MozBlobBuilder||t.MSBlobBuilder,a=/^data:((.*?)(;charset=.*?)?)(;base64)?,/,i=(o||r)&&t.atob&&t.ArrayBuffer&&t.Uint8Array&&function(t){var e,i,l,u,c,f,b,d,B;if(!(e=t.match(a)))throw new Error("invalid data URI");for(i=e[2]?e[1]:"text/plain"+(e[3]||";charset=US-ASCII"),l=!!e[4],u=t.slice(e[0].length),c=l?atob(u):decodeURIComponent(u),f=new ArrayBuffer(c.length),b=new Uint8Array(f),d=0;d<c.length;d+=1)b[d]=c.charCodeAt(d);return o?new Blob([n?b:f],{type:i}):((B=new r).append(f),B.getBlob(i))};t.HTMLCanvasElement&&!e.toBlob&&(e.mozGetAsFile?e.toBlob=function(t,o,n){var r=this;setTimeout(function(){t(n&&e.toDataURL&&i?i(r.toDataURL(o,n)):r.mozGetAsFile("blob",o))})}:e.toDataURL&&i&&(e.toBlob=function(t,e,o){var n=this;setTimeout(function(){t(i(n.toDataURL(e,o)))})})),"function"==typeof define&&define.amd?define(function(){return i}):"object"==typeof module&&module.exports?module.exports=i:t.dataURLtoBlob=i}(window);
|
||||
//# sourceMappingURL=canvas-to-blob.min.js.map
|
4722
lib/cloudinary/static/js/jquery.cloudinary.js
Normal file
4722
lib/cloudinary/static/js/jquery.cloudinary.js
Normal file
File diff suppressed because it is too large
Load diff
326
lib/cloudinary/static/js/jquery.fileupload-image.js
vendored
Normal file
326
lib/cloudinary/static/js/jquery.fileupload-image.js
vendored
Normal file
|
@ -0,0 +1,326 @@
|
|||
/*
|
||||
* jQuery File Upload Image Preview & Resize Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2013, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* jshint nomen:false */
|
||||
/* global define, require, window, Blob */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define([
|
||||
'jquery',
|
||||
'load-image',
|
||||
'load-image-meta',
|
||||
'load-image-scale',
|
||||
'load-image-exif',
|
||||
'canvas-to-blob',
|
||||
'./jquery.fileupload-process'
|
||||
], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(
|
||||
require('jquery'),
|
||||
require('blueimp-load-image/js/load-image'),
|
||||
require('blueimp-load-image/js/load-image-meta'),
|
||||
require('blueimp-load-image/js/load-image-scale'),
|
||||
require('blueimp-load-image/js/load-image-exif'),
|
||||
require('blueimp-canvas-to-blob'),
|
||||
require('./jquery.fileupload-process')
|
||||
);
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(
|
||||
window.jQuery,
|
||||
window.loadImage
|
||||
);
|
||||
}
|
||||
}(function ($, loadImage) {
|
||||
'use strict';
|
||||
|
||||
// Prepend to the default processQueue:
|
||||
$.blueimp.fileupload.prototype.options.processQueue.unshift(
|
||||
{
|
||||
action: 'loadImageMetaData',
|
||||
disableImageHead: '@',
|
||||
disableExif: '@',
|
||||
disableExifThumbnail: '@',
|
||||
disableExifSub: '@',
|
||||
disableExifGps: '@',
|
||||
disabled: '@disableImageMetaDataLoad'
|
||||
},
|
||||
{
|
||||
action: 'loadImage',
|
||||
// Use the action as prefix for the "@" options:
|
||||
prefix: true,
|
||||
fileTypes: '@',
|
||||
maxFileSize: '@',
|
||||
noRevoke: '@',
|
||||
disabled: '@disableImageLoad'
|
||||
},
|
||||
{
|
||||
action: 'resizeImage',
|
||||
// Use "image" as prefix for the "@" options:
|
||||
prefix: 'image',
|
||||
maxWidth: '@',
|
||||
maxHeight: '@',
|
||||
minWidth: '@',
|
||||
minHeight: '@',
|
||||
crop: '@',
|
||||
orientation: '@',
|
||||
forceResize: '@',
|
||||
disabled: '@disableImageResize'
|
||||
},
|
||||
{
|
||||
action: 'saveImage',
|
||||
quality: '@imageQuality',
|
||||
type: '@imageType',
|
||||
disabled: '@disableImageResize'
|
||||
},
|
||||
{
|
||||
action: 'saveImageMetaData',
|
||||
disabled: '@disableImageMetaDataSave'
|
||||
},
|
||||
{
|
||||
action: 'resizeImage',
|
||||
// Use "preview" as prefix for the "@" options:
|
||||
prefix: 'preview',
|
||||
maxWidth: '@',
|
||||
maxHeight: '@',
|
||||
minWidth: '@',
|
||||
minHeight: '@',
|
||||
crop: '@',
|
||||
orientation: '@',
|
||||
thumbnail: '@',
|
||||
canvas: '@',
|
||||
disabled: '@disableImagePreview'
|
||||
},
|
||||
{
|
||||
action: 'setImage',
|
||||
name: '@imagePreviewName',
|
||||
disabled: '@disableImagePreview'
|
||||
},
|
||||
{
|
||||
action: 'deleteImageReferences',
|
||||
disabled: '@disableImageReferencesDeletion'
|
||||
}
|
||||
);
|
||||
|
||||
// The File Upload Resize plugin extends the fileupload widget
|
||||
// with image resize functionality:
|
||||
$.widget('blueimp.fileupload', $.blueimp.fileupload, {
|
||||
|
||||
options: {
|
||||
// The regular expression for the types of images to load:
|
||||
// matched against the file type:
|
||||
loadImageFileTypes: /^image\/(gif|jpeg|png|svg\+xml)$/,
|
||||
// The maximum file size of images to load:
|
||||
loadImageMaxFileSize: 10000000, // 10MB
|
||||
// The maximum width of resized images:
|
||||
imageMaxWidth: 1920,
|
||||
// The maximum height of resized images:
|
||||
imageMaxHeight: 1080,
|
||||
// Defines the image orientation (1-8) or takes the orientation
|
||||
// value from Exif data if set to true:
|
||||
imageOrientation: false,
|
||||
// Define if resized images should be cropped or only scaled:
|
||||
imageCrop: false,
|
||||
// Disable the resize image functionality by default:
|
||||
disableImageResize: true,
|
||||
// The maximum width of the preview images:
|
||||
previewMaxWidth: 80,
|
||||
// The maximum height of the preview images:
|
||||
previewMaxHeight: 80,
|
||||
// Defines the preview orientation (1-8) or takes the orientation
|
||||
// value from Exif data if set to true:
|
||||
previewOrientation: true,
|
||||
// Create the preview using the Exif data thumbnail:
|
||||
previewThumbnail: true,
|
||||
// Define if preview images should be cropped or only scaled:
|
||||
previewCrop: false,
|
||||
// Define if preview images should be resized as canvas elements:
|
||||
previewCanvas: true
|
||||
},
|
||||
|
||||
processActions: {
|
||||
|
||||
// Loads the image given via data.files and data.index
|
||||
// as img element, if the browser supports the File API.
|
||||
// Accepts the options fileTypes (regular expression)
|
||||
// and maxFileSize (integer) to limit the files to load:
|
||||
loadImage: function (data, options) {
|
||||
if (options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var that = this,
|
||||
file = data.files[data.index],
|
||||
dfd = $.Deferred();
|
||||
if (($.type(options.maxFileSize) === 'number' &&
|
||||
file.size > options.maxFileSize) ||
|
||||
(options.fileTypes &&
|
||||
!options.fileTypes.test(file.type)) ||
|
||||
!loadImage(
|
||||
file,
|
||||
function (img) {
|
||||
if (img.src) {
|
||||
data.img = img;
|
||||
}
|
||||
dfd.resolveWith(that, [data]);
|
||||
},
|
||||
options
|
||||
)) {
|
||||
return data;
|
||||
}
|
||||
return dfd.promise();
|
||||
},
|
||||
|
||||
// Resizes the image given as data.canvas or data.img
|
||||
// and updates data.canvas or data.img with the resized image.
|
||||
// Also stores the resized image as preview property.
|
||||
// Accepts the options maxWidth, maxHeight, minWidth,
|
||||
// minHeight, canvas and crop:
|
||||
resizeImage: function (data, options) {
|
||||
if (options.disabled || !(data.canvas || data.img)) {
|
||||
return data;
|
||||
}
|
||||
options = $.extend({canvas: true}, options);
|
||||
var that = this,
|
||||
dfd = $.Deferred(),
|
||||
img = (options.canvas && data.canvas) || data.img,
|
||||
resolve = function (newImg) {
|
||||
if (newImg && (newImg.width !== img.width ||
|
||||
newImg.height !== img.height ||
|
||||
options.forceResize)) {
|
||||
data[newImg.getContext ? 'canvas' : 'img'] = newImg;
|
||||
}
|
||||
data.preview = newImg;
|
||||
dfd.resolveWith(that, [data]);
|
||||
},
|
||||
thumbnail;
|
||||
if (data.exif) {
|
||||
if (options.orientation === true) {
|
||||
options.orientation = data.exif.get('Orientation');
|
||||
}
|
||||
if (options.thumbnail) {
|
||||
thumbnail = data.exif.get('Thumbnail');
|
||||
if (thumbnail) {
|
||||
loadImage(thumbnail, resolve, options);
|
||||
return dfd.promise();
|
||||
}
|
||||
}
|
||||
// Prevent orienting the same image twice:
|
||||
if (data.orientation) {
|
||||
delete options.orientation;
|
||||
} else {
|
||||
data.orientation = options.orientation;
|
||||
}
|
||||
}
|
||||
if (img) {
|
||||
resolve(loadImage.scale(img, options));
|
||||
return dfd.promise();
|
||||
}
|
||||
return data;
|
||||
},
|
||||
|
||||
// Saves the processed image given as data.canvas
|
||||
// inplace at data.index of data.files:
|
||||
saveImage: function (data, options) {
|
||||
if (!data.canvas || options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var that = this,
|
||||
file = data.files[data.index],
|
||||
dfd = $.Deferred();
|
||||
if (data.canvas.toBlob) {
|
||||
data.canvas.toBlob(
|
||||
function (blob) {
|
||||
if (!blob.name) {
|
||||
if (file.type === blob.type) {
|
||||
blob.name = file.name;
|
||||
} else if (file.name) {
|
||||
blob.name = file.name.replace(
|
||||
/\.\w+$/,
|
||||
'.' + blob.type.substr(6)
|
||||
);
|
||||
}
|
||||
}
|
||||
// Don't restore invalid meta data:
|
||||
if (file.type !== blob.type) {
|
||||
delete data.imageHead;
|
||||
}
|
||||
// Store the created blob at the position
|
||||
// of the original file in the files list:
|
||||
data.files[data.index] = blob;
|
||||
dfd.resolveWith(that, [data]);
|
||||
},
|
||||
options.type || file.type,
|
||||
options.quality
|
||||
);
|
||||
} else {
|
||||
return data;
|
||||
}
|
||||
return dfd.promise();
|
||||
},
|
||||
|
||||
loadImageMetaData: function (data, options) {
|
||||
if (options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var that = this,
|
||||
dfd = $.Deferred();
|
||||
loadImage.parseMetaData(data.files[data.index], function (result) {
|
||||
$.extend(data, result);
|
||||
dfd.resolveWith(that, [data]);
|
||||
}, options);
|
||||
return dfd.promise();
|
||||
},
|
||||
|
||||
saveImageMetaData: function (data, options) {
|
||||
if (!(data.imageHead && data.canvas &&
|
||||
data.canvas.toBlob && !options.disabled)) {
|
||||
return data;
|
||||
}
|
||||
var file = data.files[data.index],
|
||||
blob = new Blob([
|
||||
data.imageHead,
|
||||
// Resized images always have a head size of 20 bytes,
|
||||
// including the JPEG marker and a minimal JFIF header:
|
||||
this._blobSlice.call(file, 20)
|
||||
], {type: file.type});
|
||||
blob.name = file.name;
|
||||
data.files[data.index] = blob;
|
||||
return data;
|
||||
},
|
||||
|
||||
// Sets the resized version of the image as a property of the
|
||||
// file object, must be called after "saveImage":
|
||||
setImage: function (data, options) {
|
||||
if (data.preview && !options.disabled) {
|
||||
data.files[data.index][options.name || 'preview'] = data.preview;
|
||||
}
|
||||
return data;
|
||||
},
|
||||
|
||||
deleteImageReferences: function (data, options) {
|
||||
if (!options.disabled) {
|
||||
delete data.img;
|
||||
delete data.canvas;
|
||||
delete data.preview;
|
||||
delete data.imageHead;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}));
|
178
lib/cloudinary/static/js/jquery.fileupload-process.js
vendored
Normal file
178
lib/cloudinary/static/js/jquery.fileupload-process.js
vendored
Normal file
|
@ -0,0 +1,178 @@
|
|||
/*
|
||||
* jQuery File Upload Processing Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2012, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* jshint nomen:false */
|
||||
/* global define, require, window */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define([
|
||||
'jquery',
|
||||
'./jquery.fileupload'
|
||||
], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(
|
||||
require('jquery'),
|
||||
require('./jquery.fileupload')
|
||||
);
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(
|
||||
window.jQuery
|
||||
);
|
||||
}
|
||||
}(function ($) {
|
||||
'use strict';
|
||||
|
||||
var originalAdd = $.blueimp.fileupload.prototype.options.add;
|
||||
|
||||
// The File Upload Processing plugin extends the fileupload widget
|
||||
// with file processing functionality:
|
||||
$.widget('blueimp.fileupload', $.blueimp.fileupload, {
|
||||
|
||||
options: {
|
||||
// The list of processing actions:
|
||||
processQueue: [
|
||||
/*
|
||||
{
|
||||
action: 'log',
|
||||
type: 'debug'
|
||||
}
|
||||
*/
|
||||
],
|
||||
add: function (e, data) {
|
||||
var $this = $(this);
|
||||
data.process(function () {
|
||||
return $this.fileupload('process', data);
|
||||
});
|
||||
originalAdd.call(this, e, data);
|
||||
}
|
||||
},
|
||||
|
||||
processActions: {
|
||||
/*
|
||||
log: function (data, options) {
|
||||
console[options.type](
|
||||
'Processing "' + data.files[data.index].name + '"'
|
||||
);
|
||||
}
|
||||
*/
|
||||
},
|
||||
|
||||
_processFile: function (data, originalData) {
|
||||
var that = this,
|
||||
dfd = $.Deferred().resolveWith(that, [data]),
|
||||
chain = dfd.promise();
|
||||
this._trigger('process', null, data);
|
||||
$.each(data.processQueue, function (i, settings) {
|
||||
var func = function (data) {
|
||||
if (originalData.errorThrown) {
|
||||
return $.Deferred()
|
||||
.rejectWith(that, [originalData]).promise();
|
||||
}
|
||||
return that.processActions[settings.action].call(
|
||||
that,
|
||||
data,
|
||||
settings
|
||||
);
|
||||
};
|
||||
chain = chain.then(func, settings.always && func);
|
||||
});
|
||||
chain
|
||||
.done(function () {
|
||||
that._trigger('processdone', null, data);
|
||||
that._trigger('processalways', null, data);
|
||||
})
|
||||
.fail(function () {
|
||||
that._trigger('processfail', null, data);
|
||||
that._trigger('processalways', null, data);
|
||||
});
|
||||
return chain;
|
||||
},
|
||||
|
||||
// Replaces the settings of each processQueue item that
|
||||
// are strings starting with an "@", using the remaining
|
||||
// substring as key for the option map,
|
||||
// e.g. "@autoUpload" is replaced with options.autoUpload:
|
||||
_transformProcessQueue: function (options) {
|
||||
var processQueue = [];
|
||||
$.each(options.processQueue, function () {
|
||||
var settings = {},
|
||||
action = this.action,
|
||||
prefix = this.prefix === true ? action : this.prefix;
|
||||
$.each(this, function (key, value) {
|
||||
if ($.type(value) === 'string' &&
|
||||
value.charAt(0) === '@') {
|
||||
settings[key] = options[
|
||||
value.slice(1) || (prefix ? prefix +
|
||||
key.charAt(0).toUpperCase() + key.slice(1) : key)
|
||||
];
|
||||
} else {
|
||||
settings[key] = value;
|
||||
}
|
||||
|
||||
});
|
||||
processQueue.push(settings);
|
||||
});
|
||||
options.processQueue = processQueue;
|
||||
},
|
||||
|
||||
// Returns the number of files currently in the processsing queue:
|
||||
processing: function () {
|
||||
return this._processing;
|
||||
},
|
||||
|
||||
// Processes the files given as files property of the data parameter,
|
||||
// returns a Promise object that allows to bind callbacks:
|
||||
process: function (data) {
|
||||
var that = this,
|
||||
options = $.extend({}, this.options, data);
|
||||
if (options.processQueue && options.processQueue.length) {
|
||||
this._transformProcessQueue(options);
|
||||
if (this._processing === 0) {
|
||||
this._trigger('processstart');
|
||||
}
|
||||
$.each(data.files, function (index) {
|
||||
var opts = index ? $.extend({}, options) : options,
|
||||
func = function () {
|
||||
if (data.errorThrown) {
|
||||
return $.Deferred()
|
||||
.rejectWith(that, [data]).promise();
|
||||
}
|
||||
return that._processFile(opts, data);
|
||||
};
|
||||
opts.index = index;
|
||||
that._processing += 1;
|
||||
that._processingQueue = that._processingQueue.then(func, func)
|
||||
.always(function () {
|
||||
that._processing -= 1;
|
||||
if (that._processing === 0) {
|
||||
that._trigger('processstop');
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
return this._processingQueue;
|
||||
},
|
||||
|
||||
_create: function () {
|
||||
this._super();
|
||||
this._processing = 0;
|
||||
this._processingQueue = $.Deferred().resolveWith(this)
|
||||
.promise();
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}));
|
125
lib/cloudinary/static/js/jquery.fileupload-validate.js
vendored
Normal file
125
lib/cloudinary/static/js/jquery.fileupload-validate.js
vendored
Normal file
|
@ -0,0 +1,125 @@
|
|||
/*
|
||||
* jQuery File Upload Validation Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2013, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* global define, require, window */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define([
|
||||
'jquery',
|
||||
'./jquery.fileupload-process'
|
||||
], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(
|
||||
require('jquery'),
|
||||
require('./jquery.fileupload-process')
|
||||
);
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(
|
||||
window.jQuery
|
||||
);
|
||||
}
|
||||
}(function ($) {
|
||||
'use strict';
|
||||
|
||||
// Append to the default processQueue:
|
||||
$.blueimp.fileupload.prototype.options.processQueue.push(
|
||||
{
|
||||
action: 'validate',
|
||||
// Always trigger this action,
|
||||
// even if the previous action was rejected:
|
||||
always: true,
|
||||
// Options taken from the global options map:
|
||||
acceptFileTypes: '@',
|
||||
maxFileSize: '@',
|
||||
minFileSize: '@',
|
||||
maxNumberOfFiles: '@',
|
||||
disabled: '@disableValidation'
|
||||
}
|
||||
);
|
||||
|
||||
// The File Upload Validation plugin extends the fileupload widget
|
||||
// with file validation functionality:
|
||||
$.widget('blueimp.fileupload', $.blueimp.fileupload, {
|
||||
|
||||
options: {
|
||||
/*
|
||||
// The regular expression for allowed file types, matches
|
||||
// against either file type or file name:
|
||||
acceptFileTypes: /(\.|\/)(gif|jpe?g|png)$/i,
|
||||
// The maximum allowed file size in bytes:
|
||||
maxFileSize: 10000000, // 10 MB
|
||||
// The minimum allowed file size in bytes:
|
||||
minFileSize: undefined, // No minimal file size
|
||||
// The limit of files to be uploaded:
|
||||
maxNumberOfFiles: 10,
|
||||
*/
|
||||
|
||||
// Function returning the current number of files,
|
||||
// has to be overriden for maxNumberOfFiles validation:
|
||||
getNumberOfFiles: $.noop,
|
||||
|
||||
// Error and info messages:
|
||||
messages: {
|
||||
maxNumberOfFiles: 'Maximum number of files exceeded',
|
||||
acceptFileTypes: 'File type not allowed',
|
||||
maxFileSize: 'File is too large',
|
||||
minFileSize: 'File is too small'
|
||||
}
|
||||
},
|
||||
|
||||
processActions: {
|
||||
|
||||
validate: function (data, options) {
|
||||
if (options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var dfd = $.Deferred(),
|
||||
settings = this.options,
|
||||
file = data.files[data.index],
|
||||
fileSize;
|
||||
if (options.minFileSize || options.maxFileSize) {
|
||||
fileSize = file.size;
|
||||
}
|
||||
if ($.type(options.maxNumberOfFiles) === 'number' &&
|
||||
(settings.getNumberOfFiles() || 0) + data.files.length >
|
||||
options.maxNumberOfFiles) {
|
||||
file.error = settings.i18n('maxNumberOfFiles');
|
||||
} else if (options.acceptFileTypes &&
|
||||
!(options.acceptFileTypes.test(file.type) ||
|
||||
options.acceptFileTypes.test(file.name))) {
|
||||
file.error = settings.i18n('acceptFileTypes');
|
||||
} else if (fileSize > options.maxFileSize) {
|
||||
file.error = settings.i18n('maxFileSize');
|
||||
} else if ($.type(fileSize) === 'number' &&
|
||||
fileSize < options.minFileSize) {
|
||||
file.error = settings.i18n('minFileSize');
|
||||
} else {
|
||||
delete file.error;
|
||||
}
|
||||
if (file.error || data.files.error) {
|
||||
data.files.error = true;
|
||||
dfd.rejectWith(this, [data]);
|
||||
} else {
|
||||
dfd.resolveWith(this, [data]);
|
||||
}
|
||||
return dfd.promise();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}));
|
1482
lib/cloudinary/static/js/jquery.fileupload.js
vendored
Normal file
1482
lib/cloudinary/static/js/jquery.fileupload.js
vendored
Normal file
File diff suppressed because it is too large
Load diff
224
lib/cloudinary/static/js/jquery.iframe-transport.js
Normal file
224
lib/cloudinary/static/js/jquery.iframe-transport.js
Normal file
|
@ -0,0 +1,224 @@
|
|||
/*
|
||||
* jQuery Iframe Transport Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2011, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* global define, require, window, document, JSON */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define(['jquery'], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(require('jquery'));
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(window.jQuery);
|
||||
}
|
||||
}(function ($) {
|
||||
'use strict';
|
||||
|
||||
// Helper variable to create unique names for the transport iframes:
|
||||
var counter = 0,
|
||||
jsonAPI = $,
|
||||
jsonParse = 'parseJSON';
|
||||
|
||||
if ('JSON' in window && 'parse' in JSON) {
|
||||
jsonAPI = JSON;
|
||||
jsonParse = 'parse';
|
||||
}
|
||||
|
||||
// The iframe transport accepts four additional options:
|
||||
// options.fileInput: a jQuery collection of file input fields
|
||||
// options.paramName: the parameter name for the file form data,
|
||||
// overrides the name property of the file input field(s),
|
||||
// can be a string or an array of strings.
|
||||
// options.formData: an array of objects with name and value properties,
|
||||
// equivalent to the return data of .serializeArray(), e.g.:
|
||||
// [{name: 'a', value: 1}, {name: 'b', value: 2}]
|
||||
// options.initialIframeSrc: the URL of the initial iframe src,
|
||||
// by default set to "javascript:false;"
|
||||
$.ajaxTransport('iframe', function (options) {
|
||||
if (options.async) {
|
||||
// javascript:false as initial iframe src
|
||||
// prevents warning popups on HTTPS in IE6:
|
||||
/*jshint scripturl: true */
|
||||
var initialIframeSrc = options.initialIframeSrc || 'javascript:false;',
|
||||
/*jshint scripturl: false */
|
||||
form,
|
||||
iframe,
|
||||
addParamChar;
|
||||
return {
|
||||
send: function (_, completeCallback) {
|
||||
form = $('<form style="display:none;"></form>');
|
||||
form.attr('accept-charset', options.formAcceptCharset);
|
||||
addParamChar = /\?/.test(options.url) ? '&' : '?';
|
||||
// XDomainRequest only supports GET and POST:
|
||||
if (options.type === 'DELETE') {
|
||||
options.url = options.url + addParamChar + '_method=DELETE';
|
||||
options.type = 'POST';
|
||||
} else if (options.type === 'PUT') {
|
||||
options.url = options.url + addParamChar + '_method=PUT';
|
||||
options.type = 'POST';
|
||||
} else if (options.type === 'PATCH') {
|
||||
options.url = options.url + addParamChar + '_method=PATCH';
|
||||
options.type = 'POST';
|
||||
}
|
||||
// IE versions below IE8 cannot set the name property of
|
||||
// elements that have already been added to the DOM,
|
||||
// so we set the name along with the iframe HTML markup:
|
||||
counter += 1;
|
||||
iframe = $(
|
||||
'<iframe src="' + initialIframeSrc +
|
||||
'" name="iframe-transport-' + counter + '"></iframe>'
|
||||
).bind('load', function () {
|
||||
var fileInputClones,
|
||||
paramNames = $.isArray(options.paramName) ?
|
||||
options.paramName : [options.paramName];
|
||||
iframe
|
||||
.unbind('load')
|
||||
.bind('load', function () {
|
||||
var response;
|
||||
// Wrap in a try/catch block to catch exceptions thrown
|
||||
// when trying to access cross-domain iframe contents:
|
||||
try {
|
||||
response = iframe.contents();
|
||||
// Google Chrome and Firefox do not throw an
|
||||
// exception when calling iframe.contents() on
|
||||
// cross-domain requests, so we unify the response:
|
||||
if (!response.length || !response[0].firstChild) {
|
||||
throw new Error();
|
||||
}
|
||||
} catch (e) {
|
||||
response = undefined;
|
||||
}
|
||||
// The complete callback returns the
|
||||
// iframe content document as response object:
|
||||
completeCallback(
|
||||
200,
|
||||
'success',
|
||||
{'iframe': response}
|
||||
);
|
||||
// Fix for IE endless progress bar activity bug
|
||||
// (happens on form submits to iframe targets):
|
||||
$('<iframe src="' + initialIframeSrc + '"></iframe>')
|
||||
.appendTo(form);
|
||||
window.setTimeout(function () {
|
||||
// Removing the form in a setTimeout call
|
||||
// allows Chrome's developer tools to display
|
||||
// the response result
|
||||
form.remove();
|
||||
}, 0);
|
||||
});
|
||||
form
|
||||
.prop('target', iframe.prop('name'))
|
||||
.prop('action', options.url)
|
||||
.prop('method', options.type);
|
||||
if (options.formData) {
|
||||
$.each(options.formData, function (index, field) {
|
||||
$('<input type="hidden"/>')
|
||||
.prop('name', field.name)
|
||||
.val(field.value)
|
||||
.appendTo(form);
|
||||
});
|
||||
}
|
||||
if (options.fileInput && options.fileInput.length &&
|
||||
options.type === 'POST') {
|
||||
fileInputClones = options.fileInput.clone();
|
||||
// Insert a clone for each file input field:
|
||||
options.fileInput.after(function (index) {
|
||||
return fileInputClones[index];
|
||||
});
|
||||
if (options.paramName) {
|
||||
options.fileInput.each(function (index) {
|
||||
$(this).prop(
|
||||
'name',
|
||||
paramNames[index] || options.paramName
|
||||
);
|
||||
});
|
||||
}
|
||||
// Appending the file input fields to the hidden form
|
||||
// removes them from their original location:
|
||||
form
|
||||
.append(options.fileInput)
|
||||
.prop('enctype', 'multipart/form-data')
|
||||
// enctype must be set as encoding for IE:
|
||||
.prop('encoding', 'multipart/form-data');
|
||||
// Remove the HTML5 form attribute from the input(s):
|
||||
options.fileInput.removeAttr('form');
|
||||
}
|
||||
form.submit();
|
||||
// Insert the file input fields at their original location
|
||||
// by replacing the clones with the originals:
|
||||
if (fileInputClones && fileInputClones.length) {
|
||||
options.fileInput.each(function (index, input) {
|
||||
var clone = $(fileInputClones[index]);
|
||||
// Restore the original name and form properties:
|
||||
$(input)
|
||||
.prop('name', clone.prop('name'))
|
||||
.attr('form', clone.attr('form'));
|
||||
clone.replaceWith(input);
|
||||
});
|
||||
}
|
||||
});
|
||||
form.append(iframe).appendTo(document.body);
|
||||
},
|
||||
abort: function () {
|
||||
if (iframe) {
|
||||
// javascript:false as iframe src aborts the request
|
||||
// and prevents warning popups on HTTPS in IE6.
|
||||
// concat is used to avoid the "Script URL" JSLint error:
|
||||
iframe
|
||||
.unbind('load')
|
||||
.prop('src', initialIframeSrc);
|
||||
}
|
||||
if (form) {
|
||||
form.remove();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// The iframe transport returns the iframe content document as response.
|
||||
// The following adds converters from iframe to text, json, html, xml
|
||||
// and script.
|
||||
// Please note that the Content-Type for JSON responses has to be text/plain
|
||||
// or text/html, if the browser doesn't include application/json in the
|
||||
// Accept header, else IE will show a download dialog.
|
||||
// The Content-Type for XML responses on the other hand has to be always
|
||||
// application/xml or text/xml, so IE properly parses the XML response.
|
||||
// See also
|
||||
// https://github.com/blueimp/jQuery-File-Upload/wiki/Setup#content-type-negotiation
|
||||
$.ajaxSetup({
|
||||
converters: {
|
||||
'iframe text': function (iframe) {
|
||||
return iframe && $(iframe[0].body).text();
|
||||
},
|
||||
'iframe json': function (iframe) {
|
||||
return iframe && jsonAPI[jsonParse]($(iframe[0].body).text());
|
||||
},
|
||||
'iframe html': function (iframe) {
|
||||
return iframe && $(iframe[0].body).html();
|
||||
},
|
||||
'iframe xml': function (iframe) {
|
||||
var xmlDoc = iframe && iframe[0];
|
||||
return xmlDoc && $.isXMLDoc(xmlDoc) ? xmlDoc :
|
||||
$.parseXML((xmlDoc.XMLDocument && xmlDoc.XMLDocument.xml) ||
|
||||
$(xmlDoc.body).html());
|
||||
},
|
||||
'iframe script': function (iframe) {
|
||||
return iframe && $.globalEval($(iframe[0].body).text());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
}));
|
572
lib/cloudinary/static/js/jquery.ui.widget.js
vendored
Normal file
572
lib/cloudinary/static/js/jquery.ui.widget.js
vendored
Normal file
|
@ -0,0 +1,572 @@
|
|||
/*! jQuery UI - v1.11.4+CommonJS - 2015-08-28
|
||||
* http://jqueryui.com
|
||||
* Includes: widget.js
|
||||
* Copyright 2015 jQuery Foundation and other contributors; Licensed MIT */
|
||||
|
||||
(function( factory ) {
|
||||
if ( typeof define === "function" && define.amd ) {
|
||||
|
||||
// AMD. Register as an anonymous module.
|
||||
define([ "jquery" ], factory );
|
||||
|
||||
} else if ( typeof exports === "object" ) {
|
||||
|
||||
// Node/CommonJS
|
||||
factory( require( "jquery" ) );
|
||||
|
||||
} else {
|
||||
|
||||
// Browser globals
|
||||
factory( jQuery );
|
||||
}
|
||||
}(function( $ ) {
|
||||
/*!
|
||||
* jQuery UI Widget 1.11.4
|
||||
* http://jqueryui.com
|
||||
*
|
||||
* Copyright jQuery Foundation and other contributors
|
||||
* Released under the MIT license.
|
||||
* http://jquery.org/license
|
||||
*
|
||||
* http://api.jqueryui.com/jQuery.widget/
|
||||
*/
|
||||
|
||||
|
||||
var widget_uuid = 0,
|
||||
widget_slice = Array.prototype.slice;
|
||||
|
||||
$.cleanData = (function( orig ) {
|
||||
return function( elems ) {
|
||||
var events, elem, i;
|
||||
for ( i = 0; (elem = elems[i]) != null; i++ ) {
|
||||
try {
|
||||
|
||||
// Only trigger remove when necessary to save time
|
||||
events = $._data( elem, "events" );
|
||||
if ( events && events.remove ) {
|
||||
$( elem ).triggerHandler( "remove" );
|
||||
}
|
||||
|
||||
// http://bugs.jquery.com/ticket/8235
|
||||
} catch ( e ) {}
|
||||
}
|
||||
orig( elems );
|
||||
};
|
||||
})( $.cleanData );
|
||||
|
||||
$.widget = function( name, base, prototype ) {
|
||||
var fullName, existingConstructor, constructor, basePrototype,
|
||||
// proxiedPrototype allows the provided prototype to remain unmodified
|
||||
// so that it can be used as a mixin for multiple widgets (#8876)
|
||||
proxiedPrototype = {},
|
||||
namespace = name.split( "." )[ 0 ];
|
||||
|
||||
name = name.split( "." )[ 1 ];
|
||||
fullName = namespace + "-" + name;
|
||||
|
||||
if ( !prototype ) {
|
||||
prototype = base;
|
||||
base = $.Widget;
|
||||
}
|
||||
|
||||
// create selector for plugin
|
||||
$.expr[ ":" ][ fullName.toLowerCase() ] = function( elem ) {
|
||||
return !!$.data( elem, fullName );
|
||||
};
|
||||
|
||||
$[ namespace ] = $[ namespace ] || {};
|
||||
existingConstructor = $[ namespace ][ name ];
|
||||
constructor = $[ namespace ][ name ] = function( options, element ) {
|
||||
// allow instantiation without "new" keyword
|
||||
if ( !this._createWidget ) {
|
||||
return new constructor( options, element );
|
||||
}
|
||||
|
||||
// allow instantiation without initializing for simple inheritance
|
||||
// must use "new" keyword (the code above always passes args)
|
||||
if ( arguments.length ) {
|
||||
this._createWidget( options, element );
|
||||
}
|
||||
};
|
||||
// extend with the existing constructor to carry over any static properties
|
||||
$.extend( constructor, existingConstructor, {
|
||||
version: prototype.version,
|
||||
// copy the object used to create the prototype in case we need to
|
||||
// redefine the widget later
|
||||
_proto: $.extend( {}, prototype ),
|
||||
// track widgets that inherit from this widget in case this widget is
|
||||
// redefined after a widget inherits from it
|
||||
_childConstructors: []
|
||||
});
|
||||
|
||||
basePrototype = new base();
|
||||
// we need to make the options hash a property directly on the new instance
|
||||
// otherwise we'll modify the options hash on the prototype that we're
|
||||
// inheriting from
|
||||
basePrototype.options = $.widget.extend( {}, basePrototype.options );
|
||||
$.each( prototype, function( prop, value ) {
|
||||
if ( !$.isFunction( value ) ) {
|
||||
proxiedPrototype[ prop ] = value;
|
||||
return;
|
||||
}
|
||||
proxiedPrototype[ prop ] = (function() {
|
||||
var _super = function() {
|
||||
return base.prototype[ prop ].apply( this, arguments );
|
||||
},
|
||||
_superApply = function( args ) {
|
||||
return base.prototype[ prop ].apply( this, args );
|
||||
};
|
||||
return function() {
|
||||
var __super = this._super,
|
||||
__superApply = this._superApply,
|
||||
returnValue;
|
||||
|
||||
this._super = _super;
|
||||
this._superApply = _superApply;
|
||||
|
||||
returnValue = value.apply( this, arguments );
|
||||
|
||||
this._super = __super;
|
||||
this._superApply = __superApply;
|
||||
|
||||
return returnValue;
|
||||
};
|
||||
})();
|
||||
});
|
||||
constructor.prototype = $.widget.extend( basePrototype, {
|
||||
// TODO: remove support for widgetEventPrefix
|
||||
// always use the name + a colon as the prefix, e.g., draggable:start
|
||||
// don't prefix for widgets that aren't DOM-based
|
||||
widgetEventPrefix: existingConstructor ? (basePrototype.widgetEventPrefix || name) : name
|
||||
}, proxiedPrototype, {
|
||||
constructor: constructor,
|
||||
namespace: namespace,
|
||||
widgetName: name,
|
||||
widgetFullName: fullName
|
||||
});
|
||||
|
||||
// If this widget is being redefined then we need to find all widgets that
|
||||
// are inheriting from it and redefine all of them so that they inherit from
|
||||
// the new version of this widget. We're essentially trying to replace one
|
||||
// level in the prototype chain.
|
||||
if ( existingConstructor ) {
|
||||
$.each( existingConstructor._childConstructors, function( i, child ) {
|
||||
var childPrototype = child.prototype;
|
||||
|
||||
// redefine the child widget using the same prototype that was
|
||||
// originally used, but inherit from the new version of the base
|
||||
$.widget( childPrototype.namespace + "." + childPrototype.widgetName, constructor, child._proto );
|
||||
});
|
||||
// remove the list of existing child constructors from the old constructor
|
||||
// so the old child constructors can be garbage collected
|
||||
delete existingConstructor._childConstructors;
|
||||
} else {
|
||||
base._childConstructors.push( constructor );
|
||||
}
|
||||
|
||||
$.widget.bridge( name, constructor );
|
||||
|
||||
return constructor;
|
||||
};
|
||||
|
||||
$.widget.extend = function( target ) {
|
||||
var input = widget_slice.call( arguments, 1 ),
|
||||
inputIndex = 0,
|
||||
inputLength = input.length,
|
||||
key,
|
||||
value;
|
||||
for ( ; inputIndex < inputLength; inputIndex++ ) {
|
||||
for ( key in input[ inputIndex ] ) {
|
||||
value = input[ inputIndex ][ key ];
|
||||
if ( input[ inputIndex ].hasOwnProperty( key ) && value !== undefined ) {
|
||||
// Clone objects
|
||||
if ( $.isPlainObject( value ) ) {
|
||||
target[ key ] = $.isPlainObject( target[ key ] ) ?
|
||||
$.widget.extend( {}, target[ key ], value ) :
|
||||
// Don't extend strings, arrays, etc. with objects
|
||||
$.widget.extend( {}, value );
|
||||
// Copy everything else by reference
|
||||
} else {
|
||||
target[ key ] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return target;
|
||||
};
|
||||
|
||||
$.widget.bridge = function( name, object ) {
|
||||
var fullName = object.prototype.widgetFullName || name;
|
||||
$.fn[ name ] = function( options ) {
|
||||
var isMethodCall = typeof options === "string",
|
||||
args = widget_slice.call( arguments, 1 ),
|
||||
returnValue = this;
|
||||
|
||||
if ( isMethodCall ) {
|
||||
this.each(function() {
|
||||
var methodValue,
|
||||
instance = $.data( this, fullName );
|
||||
if ( options === "instance" ) {
|
||||
returnValue = instance;
|
||||
return false;
|
||||
}
|
||||
if ( !instance ) {
|
||||
return $.error( "cannot call methods on " + name + " prior to initialization; " +
|
||||
"attempted to call method '" + options + "'" );
|
||||
}
|
||||
if ( !$.isFunction( instance[options] ) || options.charAt( 0 ) === "_" ) {
|
||||
return $.error( "no such method '" + options + "' for " + name + " widget instance" );
|
||||
}
|
||||
methodValue = instance[ options ].apply( instance, args );
|
||||
if ( methodValue !== instance && methodValue !== undefined ) {
|
||||
returnValue = methodValue && methodValue.jquery ?
|
||||
returnValue.pushStack( methodValue.get() ) :
|
||||
methodValue;
|
||||
return false;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
|
||||
// Allow multiple hashes to be passed on init
|
||||
if ( args.length ) {
|
||||
options = $.widget.extend.apply( null, [ options ].concat(args) );
|
||||
}
|
||||
|
||||
this.each(function() {
|
||||
var instance = $.data( this, fullName );
|
||||
if ( instance ) {
|
||||
instance.option( options || {} );
|
||||
if ( instance._init ) {
|
||||
instance._init();
|
||||
}
|
||||
} else {
|
||||
$.data( this, fullName, new object( options, this ) );
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return returnValue;
|
||||
};
|
||||
};
|
||||
|
||||
$.Widget = function( /* options, element */ ) {};
|
||||
$.Widget._childConstructors = [];
|
||||
|
||||
$.Widget.prototype = {
|
||||
widgetName: "widget",
|
||||
widgetEventPrefix: "",
|
||||
defaultElement: "<div>",
|
||||
options: {
|
||||
disabled: false,
|
||||
|
||||
// callbacks
|
||||
create: null
|
||||
},
|
||||
_createWidget: function( options, element ) {
|
||||
element = $( element || this.defaultElement || this )[ 0 ];
|
||||
this.element = $( element );
|
||||
this.uuid = widget_uuid++;
|
||||
this.eventNamespace = "." + this.widgetName + this.uuid;
|
||||
|
||||
this.bindings = $();
|
||||
this.hoverable = $();
|
||||
this.focusable = $();
|
||||
|
||||
if ( element !== this ) {
|
||||
$.data( element, this.widgetFullName, this );
|
||||
this._on( true, this.element, {
|
||||
remove: function( event ) {
|
||||
if ( event.target === element ) {
|
||||
this.destroy();
|
||||
}
|
||||
}
|
||||
});
|
||||
this.document = $( element.style ?
|
||||
// element within the document
|
||||
element.ownerDocument :
|
||||
// element is window or document
|
||||
element.document || element );
|
||||
this.window = $( this.document[0].defaultView || this.document[0].parentWindow );
|
||||
}
|
||||
|
||||
this.options = $.widget.extend( {},
|
||||
this.options,
|
||||
this._getCreateOptions(),
|
||||
options );
|
||||
|
||||
this._create();
|
||||
this._trigger( "create", null, this._getCreateEventData() );
|
||||
this._init();
|
||||
},
|
||||
_getCreateOptions: $.noop,
|
||||
_getCreateEventData: $.noop,
|
||||
_create: $.noop,
|
||||
_init: $.noop,
|
||||
|
||||
destroy: function() {
|
||||
this._destroy();
|
||||
// we can probably remove the unbind calls in 2.0
|
||||
// all event bindings should go through this._on()
|
||||
this.element
|
||||
.unbind( this.eventNamespace )
|
||||
.removeData( this.widgetFullName )
|
||||
// support: jquery <1.6.3
|
||||
// http://bugs.jquery.com/ticket/9413
|
||||
.removeData( $.camelCase( this.widgetFullName ) );
|
||||
this.widget()
|
||||
.unbind( this.eventNamespace )
|
||||
.removeAttr( "aria-disabled" )
|
||||
.removeClass(
|
||||
this.widgetFullName + "-disabled " +
|
||||
"ui-state-disabled" );
|
||||
|
||||
// clean up events and states
|
||||
this.bindings.unbind( this.eventNamespace );
|
||||
this.hoverable.removeClass( "ui-state-hover" );
|
||||
this.focusable.removeClass( "ui-state-focus" );
|
||||
},
|
||||
_destroy: $.noop,
|
||||
|
||||
widget: function() {
|
||||
return this.element;
|
||||
},
|
||||
|
||||
option: function( key, value ) {
|
||||
var options = key,
|
||||
parts,
|
||||
curOption,
|
||||
i;
|
||||
|
||||
if ( arguments.length === 0 ) {
|
||||
// don't return a reference to the internal hash
|
||||
return $.widget.extend( {}, this.options );
|
||||
}
|
||||
|
||||
if ( typeof key === "string" ) {
|
||||
// handle nested keys, e.g., "foo.bar" => { foo: { bar: ___ } }
|
||||
options = {};
|
||||
parts = key.split( "." );
|
||||
key = parts.shift();
|
||||
if ( parts.length ) {
|
||||
curOption = options[ key ] = $.widget.extend( {}, this.options[ key ] );
|
||||
for ( i = 0; i < parts.length - 1; i++ ) {
|
||||
curOption[ parts[ i ] ] = curOption[ parts[ i ] ] || {};
|
||||
curOption = curOption[ parts[ i ] ];
|
||||
}
|
||||
key = parts.pop();
|
||||
if ( arguments.length === 1 ) {
|
||||
return curOption[ key ] === undefined ? null : curOption[ key ];
|
||||
}
|
||||
curOption[ key ] = value;
|
||||
} else {
|
||||
if ( arguments.length === 1 ) {
|
||||
return this.options[ key ] === undefined ? null : this.options[ key ];
|
||||
}
|
||||
options[ key ] = value;
|
||||
}
|
||||
}
|
||||
|
||||
this._setOptions( options );
|
||||
|
||||
return this;
|
||||
},
|
||||
_setOptions: function( options ) {
|
||||
var key;
|
||||
|
||||
for ( key in options ) {
|
||||
this._setOption( key, options[ key ] );
|
||||
}
|
||||
|
||||
return this;
|
||||
},
|
||||
_setOption: function( key, value ) {
|
||||
this.options[ key ] = value;
|
||||
|
||||
if ( key === "disabled" ) {
|
||||
this.widget()
|
||||
.toggleClass( this.widgetFullName + "-disabled", !!value );
|
||||
|
||||
// If the widget is becoming disabled, then nothing is interactive
|
||||
if ( value ) {
|
||||
this.hoverable.removeClass( "ui-state-hover" );
|
||||
this.focusable.removeClass( "ui-state-focus" );
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
enable: function() {
|
||||
return this._setOptions({ disabled: false });
|
||||
},
|
||||
disable: function() {
|
||||
return this._setOptions({ disabled: true });
|
||||
},
|
||||
|
||||
_on: function( suppressDisabledCheck, element, handlers ) {
|
||||
var delegateElement,
|
||||
instance = this;
|
||||
|
||||
// no suppressDisabledCheck flag, shuffle arguments
|
||||
if ( typeof suppressDisabledCheck !== "boolean" ) {
|
||||
handlers = element;
|
||||
element = suppressDisabledCheck;
|
||||
suppressDisabledCheck = false;
|
||||
}
|
||||
|
||||
// no element argument, shuffle and use this.element
|
||||
if ( !handlers ) {
|
||||
handlers = element;
|
||||
element = this.element;
|
||||
delegateElement = this.widget();
|
||||
} else {
|
||||
element = delegateElement = $( element );
|
||||
this.bindings = this.bindings.add( element );
|
||||
}
|
||||
|
||||
$.each( handlers, function( event, handler ) {
|
||||
function handlerProxy() {
|
||||
// allow widgets to customize the disabled handling
|
||||
// - disabled as an array instead of boolean
|
||||
// - disabled class as method for disabling individual parts
|
||||
if ( !suppressDisabledCheck &&
|
||||
( instance.options.disabled === true ||
|
||||
$( this ).hasClass( "ui-state-disabled" ) ) ) {
|
||||
return;
|
||||
}
|
||||
return ( typeof handler === "string" ? instance[ handler ] : handler )
|
||||
.apply( instance, arguments );
|
||||
}
|
||||
|
||||
// copy the guid so direct unbinding works
|
||||
if ( typeof handler !== "string" ) {
|
||||
handlerProxy.guid = handler.guid =
|
||||
handler.guid || handlerProxy.guid || $.guid++;
|
||||
}
|
||||
|
||||
var match = event.match( /^([\w:-]*)\s*(.*)$/ ),
|
||||
eventName = match[1] + instance.eventNamespace,
|
||||
selector = match[2];
|
||||
if ( selector ) {
|
||||
delegateElement.delegate( selector, eventName, handlerProxy );
|
||||
} else {
|
||||
element.bind( eventName, handlerProxy );
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_off: function( element, eventName ) {
|
||||
eventName = (eventName || "").split( " " ).join( this.eventNamespace + " " ) +
|
||||
this.eventNamespace;
|
||||
element.unbind( eventName ).undelegate( eventName );
|
||||
|
||||
// Clear the stack to avoid memory leaks (#10056)
|
||||
this.bindings = $( this.bindings.not( element ).get() );
|
||||
this.focusable = $( this.focusable.not( element ).get() );
|
||||
this.hoverable = $( this.hoverable.not( element ).get() );
|
||||
},
|
||||
|
||||
_delay: function( handler, delay ) {
|
||||
function handlerProxy() {
|
||||
return ( typeof handler === "string" ? instance[ handler ] : handler )
|
||||
.apply( instance, arguments );
|
||||
}
|
||||
var instance = this;
|
||||
return setTimeout( handlerProxy, delay || 0 );
|
||||
},
|
||||
|
||||
_hoverable: function( element ) {
|
||||
this.hoverable = this.hoverable.add( element );
|
||||
this._on( element, {
|
||||
mouseenter: function( event ) {
|
||||
$( event.currentTarget ).addClass( "ui-state-hover" );
|
||||
},
|
||||
mouseleave: function( event ) {
|
||||
$( event.currentTarget ).removeClass( "ui-state-hover" );
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_focusable: function( element ) {
|
||||
this.focusable = this.focusable.add( element );
|
||||
this._on( element, {
|
||||
focusin: function( event ) {
|
||||
$( event.currentTarget ).addClass( "ui-state-focus" );
|
||||
},
|
||||
focusout: function( event ) {
|
||||
$( event.currentTarget ).removeClass( "ui-state-focus" );
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_trigger: function( type, event, data ) {
|
||||
var prop, orig,
|
||||
callback = this.options[ type ];
|
||||
|
||||
data = data || {};
|
||||
event = $.Event( event );
|
||||
event.type = ( type === this.widgetEventPrefix ?
|
||||
type :
|
||||
this.widgetEventPrefix + type ).toLowerCase();
|
||||
// the original event may come from any element
|
||||
// so we need to reset the target on the new event
|
||||
event.target = this.element[ 0 ];
|
||||
|
||||
// copy original event properties over to the new event
|
||||
orig = event.originalEvent;
|
||||
if ( orig ) {
|
||||
for ( prop in orig ) {
|
||||
if ( !( prop in event ) ) {
|
||||
event[ prop ] = orig[ prop ];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.element.trigger( event, data );
|
||||
return !( $.isFunction( callback ) &&
|
||||
callback.apply( this.element[0], [ event ].concat( data ) ) === false ||
|
||||
event.isDefaultPrevented() );
|
||||
}
|
||||
};
|
||||
|
||||
$.each( { show: "fadeIn", hide: "fadeOut" }, function( method, defaultEffect ) {
|
||||
$.Widget.prototype[ "_" + method ] = function( element, options, callback ) {
|
||||
if ( typeof options === "string" ) {
|
||||
options = { effect: options };
|
||||
}
|
||||
var hasOptions,
|
||||
effectName = !options ?
|
||||
method :
|
||||
options === true || typeof options === "number" ?
|
||||
defaultEffect :
|
||||
options.effect || defaultEffect;
|
||||
options = options || {};
|
||||
if ( typeof options === "number" ) {
|
||||
options = { duration: options };
|
||||
}
|
||||
hasOptions = !$.isEmptyObject( options );
|
||||
options.complete = callback;
|
||||
if ( options.delay ) {
|
||||
element.delay( options.delay );
|
||||
}
|
||||
if ( hasOptions && $.effects && $.effects.effect[ effectName ] ) {
|
||||
element[ method ]( options );
|
||||
} else if ( effectName !== method && element[ effectName ] ) {
|
||||
element[ effectName ]( options.duration, options.easing, callback );
|
||||
} else {
|
||||
element.queue(function( next ) {
|
||||
$( this )[ method ]();
|
||||
if ( callback ) {
|
||||
callback.call( element[ 0 ] );
|
||||
}
|
||||
next();
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
var widget = $.widget;
|
||||
|
||||
|
||||
|
||||
}));
|
2
lib/cloudinary/static/js/load-image.all.min.js
vendored
Normal file
2
lib/cloudinary/static/js/load-image.all.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
12
lib/cloudinary/templates/cloudinary_direct_upload.html
Normal file
12
lib/cloudinary/templates/cloudinary_direct_upload.html
Normal file
|
@ -0,0 +1,12 @@
|
|||
<form action={{url}} method="POST" enctype="multipart/form-data">
|
||||
{% for name, value in params.items %}
|
||||
<input type="hidden" name="{{name}}" value="{{value}}"/>
|
||||
{% endfor %}
|
||||
{% block extra %} {% endblock %}
|
||||
{% block file %}
|
||||
<input type="file" name="file"/>
|
||||
{% endblock %}
|
||||
{% block submit %}
|
||||
<input type="submit"/>
|
||||
{% endblock %}
|
||||
</form>
|
14
lib/cloudinary/templates/cloudinary_includes.html
Normal file
14
lib/cloudinary/templates/cloudinary_includes.html
Normal file
|
@ -0,0 +1,14 @@
|
|||
{% load staticfiles %}
|
||||
|
||||
<script src="{% static "js/jquery.ui.widget.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.iframe-transport.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.cloudinary.js" %}" type="text/javascript"></script>
|
||||
|
||||
{% if processing %}
|
||||
<script src="{% static "js/load-image.all.min.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/canvas-to-blob.min.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload-process.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload-image.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload-validate.js" %}" type="text/javascript"></script>
|
||||
{% endif %}
|
3
lib/cloudinary/templates/cloudinary_js_config.html
Normal file
3
lib/cloudinary/templates/cloudinary_js_config.html
Normal file
|
@ -0,0 +1,3 @@
|
|||
<script type='text/javascript'>
|
||||
$.cloudinary.config({{ params|safe }});
|
||||
</script>
|
1
lib/cloudinary/templatetags/__init__.py
Normal file
1
lib/cloudinary/templatetags/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
#
|
85
lib/cloudinary/templatetags/cloudinary.py
Normal file
85
lib/cloudinary/templatetags/cloudinary.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
|
||||
from django import template
|
||||
from django.forms import Form
|
||||
from django.utils.safestring import mark_safe
|
||||
|
||||
import cloudinary
|
||||
from cloudinary import CloudinaryResource, utils, uploader
|
||||
from cloudinary.forms import CloudinaryJsFileField, cl_init_js_callbacks
|
||||
from cloudinary.compat import PY3
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=True)
|
||||
def cloudinary_url(context, source, options_dict=None, **options):
|
||||
if options_dict is None:
|
||||
options = dict(**options)
|
||||
else:
|
||||
options = dict(options_dict, **options)
|
||||
try:
|
||||
if context['request'].is_secure() and 'secure' not in options:
|
||||
options['secure'] = True
|
||||
except KeyError:
|
||||
pass
|
||||
if not isinstance(source, CloudinaryResource):
|
||||
source = CloudinaryResource(source)
|
||||
return source.build_url(**options)
|
||||
|
||||
|
||||
@register.simple_tag(name='cloudinary', takes_context=True)
|
||||
def cloudinary_tag(context, image, options_dict=None, **options):
|
||||
if options_dict is None:
|
||||
options = dict(**options)
|
||||
else:
|
||||
options = dict(options_dict, **options)
|
||||
try:
|
||||
if context['request'].is_secure() and 'secure' not in options:
|
||||
options['secure'] = True
|
||||
except KeyError:
|
||||
pass
|
||||
if not isinstance(image, CloudinaryResource):
|
||||
image = CloudinaryResource(image)
|
||||
return mark_safe(image.image(**options))
|
||||
|
||||
|
||||
@register.simple_tag
|
||||
def cloudinary_direct_upload_field(field_name="image", request=None):
|
||||
form = type("OnTheFlyForm", (Form,), {field_name: CloudinaryJsFileField()})()
|
||||
if request:
|
||||
cl_init_js_callbacks(form, request)
|
||||
value = form[field_name]
|
||||
if not PY3:
|
||||
value = unicode(value)
|
||||
return value
|
||||
|
||||
|
||||
"""Deprecated - please use cloudinary_direct_upload_field, or a proper form"""
|
||||
@register.inclusion_tag('cloudinary_direct_upload.html')
|
||||
def cloudinary_direct_upload(callback_url, **options):
|
||||
params = utils.build_upload_params(callback=callback_url, **options)
|
||||
params = utils.sign_request(params, options)
|
||||
|
||||
api_url = utils.cloudinary_api_url("upload", resource_type=options.get("resource_type", "image"),
|
||||
upload_prefix=options.get("upload_prefix"))
|
||||
|
||||
return {"params": params, "url": api_url}
|
||||
|
||||
|
||||
@register.inclusion_tag('cloudinary_includes.html')
|
||||
def cloudinary_includes(processing=False):
|
||||
return {"processing": processing}
|
||||
|
||||
|
||||
CLOUDINARY_JS_CONFIG_PARAMS = ("api_key", "cloud_name", "private_cdn", "secure_distribution", "cdn_subdomain")
|
||||
@register.inclusion_tag('cloudinary_js_config.html')
|
||||
def cloudinary_js_config():
|
||||
config = cloudinary.config()
|
||||
return dict(
|
||||
params=json.dumps(dict(
|
||||
(param, getattr(config, param)) for param in CLOUDINARY_JS_CONFIG_PARAMS if getattr(config, param, None)
|
||||
))
|
||||
)
|
325
lib/cloudinary/uploader.py
Normal file
325
lib/cloudinary/uploader.py
Normal file
|
@ -0,0 +1,325 @@
|
|||
# Copyright Cloudinary
|
||||
import json
|
||||
import re
|
||||
import socket
|
||||
from os.path import getsize
|
||||
|
||||
import cloudinary
|
||||
import urllib3
|
||||
import certifi
|
||||
from cloudinary import utils
|
||||
from cloudinary.api import Error
|
||||
from cloudinary.compat import string_types
|
||||
from urllib3.exceptions import HTTPError
|
||||
from urllib3 import PoolManager
|
||||
|
||||
try:
|
||||
from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
|
||||
except Exception:
|
||||
def is_appengine_sandbox():
|
||||
return False
|
||||
|
||||
try: # Python 2.7+
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from urllib3.packages.ordered_dict import OrderedDict
|
||||
|
||||
if is_appengine_sandbox():
|
||||
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
|
||||
_http = AppEngineManager()
|
||||
else:
|
||||
# PoolManager uses a socket-level API behind the scenes
|
||||
_http = PoolManager(
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=certifi.where()
|
||||
)
|
||||
|
||||
|
||||
def upload(file, **options):
|
||||
params = utils.build_upload_params(**options)
|
||||
return call_api("upload", params, file=file, **options)
|
||||
|
||||
|
||||
def unsigned_upload(file, upload_preset, **options):
|
||||
return upload(file, upload_preset=upload_preset, unsigned=True, **options)
|
||||
|
||||
|
||||
def upload_image(file, **options):
|
||||
result = upload(file, **options)
|
||||
return cloudinary.CloudinaryImage(
|
||||
result["public_id"], version=str(result["version"]),
|
||||
format=result.get("format"), metadata=result)
|
||||
|
||||
|
||||
def upload_resource(file, **options):
|
||||
result = upload(file, **options)
|
||||
return cloudinary.CloudinaryResource(
|
||||
result["public_id"], version=str(result["version"]),
|
||||
format=result.get("format"), type=result["type"], resource_type=result["resource_type"], metadata=result)
|
||||
|
||||
|
||||
def upload_large(file, **options):
|
||||
""" Upload large files. """
|
||||
upload_id = utils.random_public_id()
|
||||
with open(file, 'rb') as file_io:
|
||||
results = None
|
||||
current_loc = 0
|
||||
chunk_size = options.get("chunk_size", 20000000)
|
||||
file_size = getsize(file)
|
||||
chunk = file_io.read(chunk_size)
|
||||
while chunk:
|
||||
range = "bytes {0}-{1}/{2}".format(current_loc, current_loc + len(chunk) - 1, file_size)
|
||||
current_loc += len(chunk)
|
||||
|
||||
results = upload_large_part((file, chunk),
|
||||
http_headers={"Content-Range": range, "X-Unique-Upload-Id": upload_id},
|
||||
**options)
|
||||
options["public_id"] = results.get("public_id")
|
||||
chunk = file_io.read(chunk_size)
|
||||
return results
|
||||
|
||||
|
||||
def upload_large_part(file, **options):
|
||||
""" Upload large files. """
|
||||
params = utils.build_upload_params(**options)
|
||||
if 'resource_type' not in options: options['resource_type'] = "raw"
|
||||
return call_api("upload", params, file=file, **options)
|
||||
|
||||
|
||||
def destroy(public_id, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"type": options.get("type"),
|
||||
"invalidate": options.get("invalidate"),
|
||||
"public_id": public_id
|
||||
}
|
||||
return call_api("destroy", params, **options)
|
||||
|
||||
|
||||
def rename(from_public_id, to_public_id, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"type": options.get("type"),
|
||||
"overwrite": options.get("overwrite"),
|
||||
"invalidate": options.get("invalidate"),
|
||||
"from_public_id": from_public_id,
|
||||
"to_public_id": to_public_id
|
||||
}
|
||||
return call_api("rename", params, **options)
|
||||
|
||||
|
||||
def explicit(public_id, **options):
|
||||
params = utils.build_upload_params(**options)
|
||||
params["public_id"] = public_id
|
||||
return call_api("explicit", params, **options)
|
||||
|
||||
|
||||
def create_archive(**options):
|
||||
params = utils.archive_params(**options)
|
||||
if options.get("target_format") is not None:
|
||||
params["target_format"] = options.get("target_format")
|
||||
return call_api("generate_archive", params, **options)
|
||||
|
||||
|
||||
def create_zip(**options):
|
||||
return create_archive(target_format="zip", **options)
|
||||
|
||||
|
||||
def generate_sprite(tag, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"tag": tag,
|
||||
"async": options.get("async"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"transformation": utils.generate_transformation_string(fetch_format=options.get("format"), **options)[0]
|
||||
}
|
||||
return call_api("sprite", params, **options)
|
||||
|
||||
|
||||
def multi(tag, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"tag": tag,
|
||||
"format": options.get("format"),
|
||||
"async": options.get("async"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"transformation": utils.generate_transformation_string(**options)[0]
|
||||
}
|
||||
return call_api("multi", params, **options)
|
||||
|
||||
|
||||
def explode(public_id, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"public_id": public_id,
|
||||
"format": options.get("format"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"transformation": utils.generate_transformation_string(**options)[0]
|
||||
}
|
||||
return call_api("explode", params, **options)
|
||||
|
||||
|
||||
# options may include 'exclusive' (boolean) which causes clearing this tag from all other resources
|
||||
def add_tag(tag, public_ids=None, **options):
|
||||
exclusive = options.pop("exclusive", None)
|
||||
command = "set_exclusive" if exclusive else "add"
|
||||
return call_tags_api(tag, command, public_ids, **options)
|
||||
|
||||
|
||||
def remove_tag(tag, public_ids=None, **options):
|
||||
return call_tags_api(tag, "remove", public_ids, **options)
|
||||
|
||||
|
||||
def replace_tag(tag, public_ids=None, **options):
|
||||
return call_tags_api(tag, "replace", public_ids, **options)
|
||||
|
||||
|
||||
def remove_all_tags(public_ids, **options):
|
||||
"""
|
||||
Remove all tags from the specified public IDs.
|
||||
:param public_ids: the public IDs of the resources to update
|
||||
:param options: additional options passed to the request
|
||||
:return: dictionary with a list of public IDs that were updated
|
||||
"""
|
||||
return call_tags_api(None, "remove_all", public_ids, **options)
|
||||
|
||||
|
||||
def add_context(context, public_ids, **options):
|
||||
"""
|
||||
Add a context keys and values. If a particular key already exists, the value associated with the key is updated.
|
||||
:param context: dictionary of context
|
||||
:param public_ids: the public IDs of the resources to update
|
||||
:param options: additional options passed to the request
|
||||
:return: dictionary with a list of public IDs that were updated
|
||||
"""
|
||||
return call_context_api(context, "add", public_ids, **options)
|
||||
|
||||
|
||||
def remove_all_context(public_ids, **options):
|
||||
"""
|
||||
Remove all custom context from the specified public IDs.
|
||||
:param public_ids: the public IDs of the resources to update
|
||||
:param options: additional options passed to the request
|
||||
:return: dictionary with a list of public IDs that were updated
|
||||
"""
|
||||
return call_context_api(None, "remove_all", public_ids, **options)
|
||||
|
||||
|
||||
def call_tags_api(tag, command, public_ids=None, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"tag": tag,
|
||||
"public_ids": utils.build_array(public_ids),
|
||||
"command": command,
|
||||
"type": options.get("type")
|
||||
}
|
||||
return call_api("tags", params, **options)
|
||||
|
||||
|
||||
def call_context_api(context, command, public_ids=None, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"context": utils.encode_context(context),
|
||||
"public_ids": utils.build_array(public_ids),
|
||||
"command": command,
|
||||
"type": options.get("type")
|
||||
}
|
||||
return call_api("context", params, **options)
|
||||
|
||||
|
||||
TEXT_PARAMS = ["public_id",
|
||||
"font_family",
|
||||
"font_size",
|
||||
"font_color",
|
||||
"text_align",
|
||||
"font_weight",
|
||||
"font_style",
|
||||
"background",
|
||||
"opacity",
|
||||
"text_decoration"
|
||||
]
|
||||
|
||||
|
||||
def text(text, **options):
|
||||
params = {"timestamp": utils.now(), "text": text}
|
||||
for key in TEXT_PARAMS:
|
||||
params[key] = options.get(key)
|
||||
return call_api("text", params, **options)
|
||||
|
||||
|
||||
def call_api(action, params, http_headers=None, return_error=False, unsigned=False, file=None, timeout=None, **options):
|
||||
if http_headers is None:
|
||||
http_headers = {}
|
||||
file_io = None
|
||||
try:
|
||||
if unsigned:
|
||||
params = utils.cleanup_params(params)
|
||||
else:
|
||||
params = utils.sign_request(params, options)
|
||||
|
||||
param_list = OrderedDict()
|
||||
for k, v in params.items():
|
||||
if isinstance(v, list):
|
||||
for i in range(len(v)):
|
||||
param_list["{0}[{1}]".format(k, i)] = v[i]
|
||||
elif v:
|
||||
param_list[k] = v
|
||||
|
||||
api_url = utils.cloudinary_api_url(action, **options)
|
||||
if file:
|
||||
if isinstance(file, string_types):
|
||||
if re.match(r'ftp:|https?:|s3:|data:[^;]*;base64,([a-zA-Z0-9\/+\n=]+)$', file):
|
||||
# URL
|
||||
name = None
|
||||
data = file
|
||||
else:
|
||||
# file path
|
||||
name = file
|
||||
with open(file, "rb") as opened:
|
||||
data = opened.read()
|
||||
elif hasattr(file, 'read') and callable(file.read):
|
||||
# stream
|
||||
data = file.read()
|
||||
name = file.name if hasattr(file, 'name') and isinstance(file.name, str) else "stream"
|
||||
elif isinstance(file, tuple):
|
||||
name = None
|
||||
data = file
|
||||
else:
|
||||
# Not a string, not a stream
|
||||
name = "file"
|
||||
data = file
|
||||
|
||||
param_list["file"] = (name, data) if name else data
|
||||
|
||||
headers = {"User-Agent": cloudinary.get_user_agent()}
|
||||
headers.update(http_headers)
|
||||
|
||||
kw = {}
|
||||
if timeout is not None:
|
||||
kw['timeout'] = timeout
|
||||
|
||||
code = 200
|
||||
try:
|
||||
response = _http.request("POST", api_url, param_list, headers, **kw)
|
||||
except HTTPError as e:
|
||||
raise Error("Unexpected error - {0!r}".format(e))
|
||||
except socket.error as e:
|
||||
raise Error("Socket error: {0!r}".format(e))
|
||||
|
||||
try:
|
||||
result = json.loads(response.data.decode('utf-8'))
|
||||
except Exception as e:
|
||||
# Error is parsing json
|
||||
raise Error("Error parsing server response (%d) - %s. Got - %s", response.status, response, e)
|
||||
|
||||
if "error" in result:
|
||||
if response.status not in [200, 400, 401, 403, 404, 500]:
|
||||
code = response.status
|
||||
if return_error:
|
||||
result["error"]["http_code"] = code
|
||||
else:
|
||||
raise Error(result["error"]["message"])
|
||||
|
||||
return result
|
||||
finally:
|
||||
if file_io: file_io.close()
|
912
lib/cloudinary/utils.py
Normal file
912
lib/cloudinary/utils.py
Normal file
|
@ -0,0 +1,912 @@
|
|||
# Copyright Cloudinary
|
||||
import base64
|
||||
import copy
|
||||
import hashlib
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
import struct
|
||||
import time
|
||||
import zlib
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime, date
|
||||
from fractions import Fraction
|
||||
|
||||
import six.moves.urllib.parse
|
||||
from six import iteritems
|
||||
|
||||
import cloudinary
|
||||
from cloudinary import auth_token
|
||||
from cloudinary.compat import PY3, to_bytes, to_bytearray, to_string, string_types, urlparse
|
||||
|
||||
VAR_NAME_RE = r'(\$\([a-zA-Z]\w+\))'
|
||||
|
||||
urlencode = six.moves.urllib.parse.urlencode
|
||||
unquote = six.moves.urllib.parse.unquote
|
||||
|
||||
""" @deprecated: use cloudinary.SHARED_CDN """
|
||||
SHARED_CDN = "res.cloudinary.com"
|
||||
|
||||
DEFAULT_RESPONSIVE_WIDTH_TRANSFORMATION = {"width": "auto", "crop": "limit"}
|
||||
|
||||
RANGE_VALUE_RE = r'^(?P<value>(\d+\.)?\d+)(?P<modifier>[%pP])?$'
|
||||
RANGE_RE = r'^(\d+\.)?\d+[%pP]?\.\.(\d+\.)?\d+[%pP]?$'
|
||||
FLOAT_RE = r'^(\d+)\.(\d+)?$'
|
||||
__LAYER_KEYWORD_PARAMS = [("font_weight", "normal"),
|
||||
("font_style", "normal"),
|
||||
("text_decoration", "none"),
|
||||
("text_align", None),
|
||||
("stroke", "none")]
|
||||
|
||||
|
||||
def build_array(arg):
|
||||
if isinstance(arg, list):
|
||||
return arg
|
||||
elif arg is None:
|
||||
return []
|
||||
else:
|
||||
return [arg]
|
||||
|
||||
|
||||
def build_list_of_dicts(val):
|
||||
"""
|
||||
Converts a value that can be presented as a list of dict.
|
||||
|
||||
In case top level item is not a list, it is wrapped with a list
|
||||
|
||||
Valid values examples:
|
||||
- Valid dict: {"k": "v", "k2","v2"}
|
||||
- List of dict: [{"k": "v"}, {"k2","v2"}]
|
||||
- JSON decodable string: '{"k": "v"}', or '[{"k": "v"}]'
|
||||
- List of JSON decodable strings: ['{"k": "v"}', '{"k2","v2"}']
|
||||
|
||||
Invalid values examples:
|
||||
- ["not", "a", "dict"]
|
||||
- [123, None],
|
||||
- [["another", "list"]]
|
||||
|
||||
:param val: Input value
|
||||
:type val: Union[list, dict, str]
|
||||
|
||||
:return: Converted(or original) list of dict
|
||||
:raises: ValueError in case value cannot be converted to a list of dict
|
||||
"""
|
||||
if val is None:
|
||||
return []
|
||||
|
||||
if isinstance(val, str):
|
||||
# use OrderedDict to preserve order
|
||||
val = json.loads(val, object_pairs_hook=OrderedDict)
|
||||
|
||||
if isinstance(val, dict):
|
||||
val = [val]
|
||||
|
||||
for index, item in enumerate(val):
|
||||
if isinstance(item, str):
|
||||
# use OrderedDict to preserve order
|
||||
val[index] = json.loads(item, object_pairs_hook=OrderedDict)
|
||||
if not isinstance(val[index], dict):
|
||||
raise ValueError("Expected a list of dicts")
|
||||
return val
|
||||
|
||||
|
||||
def encode_double_array(array):
|
||||
array = build_array(array)
|
||||
if len(array) > 0 and isinstance(array[0], list):
|
||||
return "|".join([",".join([str(i) for i in build_array(inner)]) for inner in array])
|
||||
else:
|
||||
return ",".join([str(i) for i in array])
|
||||
|
||||
|
||||
def encode_dict(arg):
|
||||
if isinstance(arg, dict):
|
||||
if PY3:
|
||||
items = arg.items()
|
||||
else:
|
||||
items = arg.iteritems()
|
||||
return "|".join((k + "=" + v) for k, v in items)
|
||||
else:
|
||||
return arg
|
||||
|
||||
|
||||
def encode_context(context):
|
||||
"""
|
||||
:param context: dict of context to be encoded
|
||||
:return: a joined string of all keys and values properly escaped and separated by a pipe character
|
||||
"""
|
||||
|
||||
if not isinstance(context, dict):
|
||||
return context
|
||||
|
||||
return "|".join(("{}={}".format(k, v.replace("=", "\\=").replace("|", "\\|"))) for k, v in iteritems(context))
|
||||
|
||||
|
||||
def json_encode(value):
|
||||
"""
|
||||
Converts value to a json encoded string
|
||||
|
||||
:param value: value to be encoded
|
||||
|
||||
:return: JSON encoded string
|
||||
"""
|
||||
return json.dumps(value, default=__json_serializer, separators=(',', ':'))
|
||||
|
||||
|
||||
def generate_transformation_string(**options):
|
||||
responsive_width = options.pop("responsive_width", cloudinary.config().responsive_width)
|
||||
size = options.pop("size", None)
|
||||
if size:
|
||||
options["width"], options["height"] = size.split("x")
|
||||
width = options.get("width")
|
||||
height = options.get("height")
|
||||
has_layer = ("underlay" in options) or ("overlay" in options)
|
||||
|
||||
crop = options.pop("crop", None)
|
||||
angle = ".".join([str(value) for value in build_array(options.pop("angle", None))])
|
||||
no_html_sizes = has_layer or angle or crop == "fit" or crop == "limit" or responsive_width
|
||||
|
||||
if width and (str(width).startswith("auto") or str(width) == "ow" or is_fraction(width) or no_html_sizes):
|
||||
del options["width"]
|
||||
if height and (str(height) == "oh" or is_fraction(height) or no_html_sizes):
|
||||
del options["height"]
|
||||
|
||||
background = options.pop("background", None)
|
||||
if background:
|
||||
background = background.replace("#", "rgb:")
|
||||
color = options.pop("color", None)
|
||||
if color:
|
||||
color = color.replace("#", "rgb:")
|
||||
|
||||
base_transformations = build_array(options.pop("transformation", None))
|
||||
if any(isinstance(bs, dict) for bs in base_transformations):
|
||||
def recurse(bs):
|
||||
if isinstance(bs, dict):
|
||||
return generate_transformation_string(**bs)[0]
|
||||
else:
|
||||
return generate_transformation_string(transformation=bs)[0]
|
||||
base_transformations = list(map(recurse, base_transformations))
|
||||
named_transformation = None
|
||||
else:
|
||||
named_transformation = ".".join(base_transformations)
|
||||
base_transformations = []
|
||||
|
||||
effect = options.pop("effect", None)
|
||||
if isinstance(effect, list):
|
||||
effect = ":".join([str(x) for x in effect])
|
||||
elif isinstance(effect, dict):
|
||||
effect = ":".join([str(x) for x in list(effect.items())[0]])
|
||||
|
||||
border = options.pop("border", None)
|
||||
if isinstance(border, dict):
|
||||
border_color = border.get("color", "black").replace("#", "rgb:")
|
||||
border = "%(width)spx_solid_%(color)s" % {"color": border_color,
|
||||
"width": str(border.get("width", 2))}
|
||||
|
||||
flags = ".".join(build_array(options.pop("flags", None)))
|
||||
dpr = options.pop("dpr", cloudinary.config().dpr)
|
||||
duration = norm_range_value(options.pop("duration", None))
|
||||
start_offset = norm_range_value(options.pop("start_offset", None))
|
||||
end_offset = norm_range_value(options.pop("end_offset", None))
|
||||
offset = split_range(options.pop("offset", None))
|
||||
if offset:
|
||||
start_offset = norm_range_value(offset[0])
|
||||
end_offset = norm_range_value(offset[1])
|
||||
|
||||
video_codec = process_video_codec_param(options.pop("video_codec", None))
|
||||
|
||||
aspect_ratio = options.pop("aspect_ratio", None)
|
||||
if isinstance(aspect_ratio, Fraction):
|
||||
aspect_ratio = str(aspect_ratio.numerator) + ":" + str(aspect_ratio.denominator)
|
||||
|
||||
overlay = process_layer(options.pop("overlay", None), "overlay")
|
||||
underlay = process_layer(options.pop("underlay", None), "underlay")
|
||||
if_value = process_conditional(options.pop("if", None))
|
||||
|
||||
params = {
|
||||
"a": normalize_expression(angle),
|
||||
"ar": normalize_expression(aspect_ratio),
|
||||
"b": background,
|
||||
"bo": border,
|
||||
"c": crop,
|
||||
"co": color,
|
||||
"dpr": normalize_expression(dpr),
|
||||
"du": normalize_expression(duration),
|
||||
"e": normalize_expression(effect),
|
||||
"eo": normalize_expression(end_offset),
|
||||
"fl": flags,
|
||||
"h": normalize_expression(height),
|
||||
"l": overlay,
|
||||
"o": normalize_expression(options.pop('opacity',None)),
|
||||
"q": normalize_expression(options.pop('quality',None)),
|
||||
"r": normalize_expression(options.pop('radius',None)),
|
||||
"so": normalize_expression(start_offset),
|
||||
"t": named_transformation,
|
||||
"u": underlay,
|
||||
"w": normalize_expression(width),
|
||||
"x": normalize_expression(options.pop('x',None)),
|
||||
"y": normalize_expression(options.pop('y',None)),
|
||||
"vc": video_codec,
|
||||
"z": normalize_expression(options.pop('zoom',None))
|
||||
}
|
||||
simple_params = {
|
||||
"ac": "audio_codec",
|
||||
"af": "audio_frequency",
|
||||
"br": "bit_rate",
|
||||
"cs": "color_space",
|
||||
"d": "default_image",
|
||||
"dl": "delay",
|
||||
"dn": "density",
|
||||
"f": "fetch_format",
|
||||
"g": "gravity",
|
||||
"ki": "keyframe_interval",
|
||||
"p": "prefix",
|
||||
"pg": "page",
|
||||
"sp": "streaming_profile",
|
||||
"vs": "video_sampling",
|
||||
}
|
||||
|
||||
for param, option in simple_params.items():
|
||||
params[param] = options.pop(option, None)
|
||||
|
||||
variables = options.pop('variables',{})
|
||||
var_params = []
|
||||
for key,value in options.items():
|
||||
if re.match(r'^\$', key):
|
||||
var_params.append(u"{0}_{1}".format(key, normalize_expression(str(value))))
|
||||
|
||||
var_params.sort()
|
||||
|
||||
if variables:
|
||||
for var in variables:
|
||||
var_params.append(u"{0}_{1}".format(var[0], normalize_expression(str(var[1]))))
|
||||
|
||||
|
||||
variables = ','.join(var_params)
|
||||
|
||||
sorted_params = sorted([param + "_" + str(value) for param, value in params.items() if (value or value == 0)])
|
||||
if variables:
|
||||
sorted_params.insert(0, str(variables))
|
||||
|
||||
if if_value is not None:
|
||||
sorted_params.insert(0, "if_" + str(if_value))
|
||||
transformation = ",".join(sorted_params)
|
||||
if "raw_transformation" in options:
|
||||
transformation = transformation + "," + options.pop("raw_transformation")
|
||||
transformations = base_transformations + [transformation]
|
||||
if responsive_width:
|
||||
responsive_width_transformation = cloudinary.config().responsive_width_transformation \
|
||||
or DEFAULT_RESPONSIVE_WIDTH_TRANSFORMATION
|
||||
transformations += [generate_transformation_string(**responsive_width_transformation)[0]]
|
||||
url = "/".join([trans for trans in transformations if trans])
|
||||
|
||||
if str(width).startswith("auto") or responsive_width:
|
||||
options["responsive"] = True
|
||||
if dpr == "auto":
|
||||
options["hidpi"] = True
|
||||
return url, options
|
||||
|
||||
|
||||
def is_fraction(width):
|
||||
width = str(width)
|
||||
return re.match(FLOAT_RE, width) and float(width) < 1
|
||||
|
||||
|
||||
def split_range(range):
|
||||
if (isinstance(range, list) or isinstance(range, tuple)) and len(range) >= 2:
|
||||
return [range[0], range[-1]]
|
||||
elif isinstance(range, string_types) and re.match(RANGE_RE, range):
|
||||
return range.split("..", 1)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def norm_range_value(value):
|
||||
if value is None: return None
|
||||
|
||||
match = re.match(RANGE_VALUE_RE, str(value))
|
||||
|
||||
if match is None: return None
|
||||
|
||||
modifier = ''
|
||||
if match.group('modifier') is not None:
|
||||
modifier = 'p'
|
||||
return match.group('value') + modifier
|
||||
|
||||
|
||||
def process_video_codec_param(param):
|
||||
out_param = param
|
||||
if isinstance(out_param, dict):
|
||||
out_param = param['codec']
|
||||
if 'profile' in param:
|
||||
out_param = out_param + ':' + param['profile']
|
||||
if 'level' in param:
|
||||
out_param = out_param + ':' + param['level']
|
||||
return out_param
|
||||
|
||||
|
||||
def cleanup_params(params):
|
||||
return dict([(k, __safe_value(v)) for (k, v) in params.items() if v is not None and not v == ""])
|
||||
|
||||
|
||||
def sign_request(params, options):
|
||||
api_key = options.get("api_key", cloudinary.config().api_key)
|
||||
if not api_key: raise ValueError("Must supply api_key")
|
||||
api_secret = options.get("api_secret", cloudinary.config().api_secret)
|
||||
if not api_secret: raise ValueError("Must supply api_secret")
|
||||
|
||||
params = cleanup_params(params)
|
||||
params["signature"] = api_sign_request(params, api_secret)
|
||||
params["api_key"] = api_key
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def api_sign_request(params_to_sign, api_secret):
|
||||
params = [(k + "=" + (",".join(v) if isinstance(v, list) else str(v))) for k, v in params_to_sign.items() if v]
|
||||
to_sign = "&".join(sorted(params))
|
||||
return hashlib.sha1(to_bytes(to_sign + api_secret)).hexdigest()
|
||||
|
||||
|
||||
def breakpoint_settings_mapper(breakpoint_settings):
|
||||
breakpoint_settings = copy.deepcopy(breakpoint_settings)
|
||||
transformation = breakpoint_settings.get("transformation")
|
||||
if transformation is not None:
|
||||
breakpoint_settings["transformation"], _ = generate_transformation_string(**transformation)
|
||||
return breakpoint_settings
|
||||
|
||||
|
||||
def generate_responsive_breakpoints_string(breakpoints):
|
||||
if breakpoints is None:
|
||||
return None
|
||||
breakpoints = build_array(breakpoints)
|
||||
return json.dumps(list(map(breakpoint_settings_mapper, breakpoints)))
|
||||
|
||||
|
||||
def finalize_source(source, format, url_suffix):
|
||||
source = re.sub(r'([^:])/+', r'\1/', source)
|
||||
if re.match(r'^https?:/', source):
|
||||
source = smart_escape(source)
|
||||
source_to_sign = source
|
||||
else:
|
||||
source = unquote(source)
|
||||
if not PY3: source = source.encode('utf8')
|
||||
source = smart_escape(source)
|
||||
source_to_sign = source
|
||||
if url_suffix is not None:
|
||||
if re.search(r'[\./]', url_suffix): raise ValueError("url_suffix should not include . or /")
|
||||
source = source + "/" + url_suffix
|
||||
if format is not None:
|
||||
source = source + "." + format
|
||||
source_to_sign = source_to_sign + "." + format
|
||||
|
||||
return source, source_to_sign
|
||||
|
||||
|
||||
def finalize_resource_type(resource_type, type, url_suffix, use_root_path, shorten):
|
||||
upload_type = type or "upload"
|
||||
if url_suffix is not None:
|
||||
if resource_type == "image" and upload_type == "upload":
|
||||
resource_type = "images"
|
||||
upload_type = None
|
||||
elif resource_type == "raw" and upload_type == "upload":
|
||||
resource_type = "files"
|
||||
upload_type = None
|
||||
else:
|
||||
raise ValueError("URL Suffix only supported for image/upload and raw/upload")
|
||||
|
||||
if use_root_path:
|
||||
if (resource_type == "image" and upload_type == "upload") or (resource_type == "images" and upload_type is None):
|
||||
resource_type = None
|
||||
upload_type = None
|
||||
else:
|
||||
raise ValueError("Root path only supported for image/upload")
|
||||
|
||||
if shorten and resource_type == "image" and upload_type == "upload":
|
||||
resource_type = "iu"
|
||||
upload_type = None
|
||||
|
||||
return resource_type, upload_type
|
||||
|
||||
|
||||
def unsigned_download_url_prefix(source, cloud_name, private_cdn, cdn_subdomain, secure_cdn_subdomain, cname, secure,
|
||||
secure_distribution):
|
||||
"""cdn_subdomain and secure_cdn_subdomain
|
||||
1) Customers in shared distribution (e.g. res.cloudinary.com)
|
||||
if cdn_domain is true uses res-[1-5].cloudinary.com for both http and https. Setting secure_cdn_subdomain to false disables this for https.
|
||||
2) Customers with private cdn
|
||||
if cdn_domain is true uses cloudname-res-[1-5].cloudinary.com for http
|
||||
if secure_cdn_domain is true uses cloudname-res-[1-5].cloudinary.com for https (please contact support if you require this)
|
||||
3) Customers with cname
|
||||
if cdn_domain is true uses a[1-5].cname for http. For https, uses the same naming scheme as 1 for shared distribution and as 2 for private distribution."""
|
||||
shared_domain = not private_cdn
|
||||
shard = __crc(source)
|
||||
if secure:
|
||||
if secure_distribution is None or secure_distribution == cloudinary.OLD_AKAMAI_SHARED_CDN:
|
||||
secure_distribution = cloud_name + "-res.cloudinary.com" if private_cdn else cloudinary.SHARED_CDN
|
||||
|
||||
shared_domain = shared_domain or secure_distribution == cloudinary.SHARED_CDN
|
||||
if secure_cdn_subdomain is None and shared_domain:
|
||||
secure_cdn_subdomain = cdn_subdomain
|
||||
|
||||
if secure_cdn_subdomain:
|
||||
secure_distribution = re.sub('res.cloudinary.com', "res-" + shard + ".cloudinary.com", secure_distribution)
|
||||
|
||||
prefix = "https://" + secure_distribution
|
||||
elif cname:
|
||||
subdomain = "a" + shard + "." if cdn_subdomain else ""
|
||||
prefix = "http://" + subdomain + cname
|
||||
else:
|
||||
subdomain = cloud_name + "-res" if private_cdn else "res"
|
||||
if cdn_subdomain: subdomain = subdomain + "-" + shard
|
||||
prefix = "http://" + subdomain + ".cloudinary.com"
|
||||
|
||||
if shared_domain: prefix += "/" + cloud_name
|
||||
|
||||
return prefix
|
||||
|
||||
|
||||
def merge(*dict_args):
|
||||
result = None
|
||||
for dictionary in dict_args:
|
||||
if dictionary is not None:
|
||||
if result is None:
|
||||
result = dictionary.copy()
|
||||
else:
|
||||
result.update(dictionary)
|
||||
return result
|
||||
|
||||
|
||||
def cloudinary_url(source, **options):
|
||||
original_source = source
|
||||
|
||||
type = options.pop("type", "upload")
|
||||
if type == 'fetch':
|
||||
options["fetch_format"] = options.get("fetch_format", options.pop("format", None))
|
||||
transformation, options = generate_transformation_string(**options)
|
||||
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
version = options.pop("version", None)
|
||||
format = options.pop("format", None)
|
||||
cdn_subdomain = options.pop("cdn_subdomain", cloudinary.config().cdn_subdomain)
|
||||
secure_cdn_subdomain = options.pop("secure_cdn_subdomain", cloudinary.config().secure_cdn_subdomain)
|
||||
cname = options.pop("cname", cloudinary.config().cname)
|
||||
shorten = options.pop("shorten", cloudinary.config().shorten)
|
||||
|
||||
cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name or None)
|
||||
if cloud_name is None:
|
||||
raise ValueError("Must supply cloud_name in tag or in configuration")
|
||||
secure = options.pop("secure", cloudinary.config().secure)
|
||||
private_cdn = options.pop("private_cdn", cloudinary.config().private_cdn)
|
||||
secure_distribution = options.pop("secure_distribution", cloudinary.config().secure_distribution)
|
||||
sign_url = options.pop("sign_url", cloudinary.config().sign_url)
|
||||
api_secret = options.pop("api_secret", cloudinary.config().api_secret)
|
||||
url_suffix = options.pop("url_suffix", None)
|
||||
use_root_path = options.pop("use_root_path", cloudinary.config().use_root_path)
|
||||
auth_token = options.pop("auth_token", None)
|
||||
if auth_token is not False:
|
||||
auth_token = merge(cloudinary.config().auth_token, auth_token)
|
||||
|
||||
if (not source) or type == "upload" and re.match(r'^https?:', source):
|
||||
return original_source, options
|
||||
|
||||
resource_type, type = finalize_resource_type(resource_type, type, url_suffix, use_root_path, shorten)
|
||||
source, source_to_sign = finalize_source(source, format, url_suffix)
|
||||
|
||||
if source_to_sign.find("/") >= 0 \
|
||||
and not re.match(r'^https?:/', source_to_sign) \
|
||||
and not re.match(r'^v[0-9]+', source_to_sign) \
|
||||
and not version:
|
||||
version = "1"
|
||||
if version: version = "v" + str(version)
|
||||
|
||||
transformation = re.sub(r'([^:])/+', r'\1/', transformation)
|
||||
|
||||
signature = None
|
||||
if sign_url and not auth_token:
|
||||
to_sign = "/".join(__compact([transformation, source_to_sign]))
|
||||
signature = "s--" + to_string(
|
||||
base64.urlsafe_b64encode(hashlib.sha1(to_bytes(to_sign + api_secret)).digest())[0:8]) + "--"
|
||||
|
||||
prefix = unsigned_download_url_prefix(source, cloud_name, private_cdn, cdn_subdomain, secure_cdn_subdomain, cname,
|
||||
secure, secure_distribution)
|
||||
source = "/".join(__compact([prefix, resource_type, type, signature, transformation, version, source]))
|
||||
if sign_url and auth_token:
|
||||
path = urlparse(source).path
|
||||
token = cloudinary.auth_token.generate( **merge(auth_token, {"url": path}))
|
||||
source = "%s?%s" % (source, token)
|
||||
return source, options
|
||||
|
||||
|
||||
def cloudinary_api_url(action='upload', **options):
|
||||
cloudinary_prefix = options.get("upload_prefix", cloudinary.config().upload_prefix) or "https://api.cloudinary.com"
|
||||
cloud_name = options.get("cloud_name", cloudinary.config().cloud_name)
|
||||
if not cloud_name: raise ValueError("Must supply cloud_name")
|
||||
resource_type = options.get("resource_type", "image")
|
||||
return "/".join([cloudinary_prefix, "v1_1", cloud_name, resource_type, action])
|
||||
|
||||
|
||||
# Based on ruby's CGI::unescape. In addition does not escape / :
|
||||
def smart_escape(source,unsafe = r"([^a-zA-Z0-9_.\-\/:]+)"):
|
||||
def pack(m):
|
||||
return to_bytes('%' + "%".join(["%02X" % x for x in struct.unpack('B' * len(m.group(1)), m.group(1))]).upper())
|
||||
return to_string(re.sub(to_bytes(unsafe), pack, to_bytes(source)))
|
||||
|
||||
|
||||
def random_public_id():
|
||||
return ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(16))
|
||||
|
||||
|
||||
def signed_preloaded_image(result):
|
||||
filename = ".".join([x for x in [result["public_id"], result["format"]] if x])
|
||||
path = "/".join([result["resource_type"], "upload", "v" + str(result["version"]), filename])
|
||||
return path + "#" + result["signature"]
|
||||
|
||||
|
||||
def now():
|
||||
return str(int(time.time()))
|
||||
|
||||
|
||||
def private_download_url(public_id, format, **options):
|
||||
cloudinary_params = sign_request({
|
||||
"timestamp": now(),
|
||||
"public_id": public_id,
|
||||
"format": format,
|
||||
"type": options.get("type"),
|
||||
"attachment": options.get("attachment"),
|
||||
"expires_at": options.get("expires_at")
|
||||
}, options)
|
||||
|
||||
return cloudinary_api_url("download", **options) + "?" + urlencode(cloudinary_params)
|
||||
|
||||
|
||||
def zip_download_url(tag, **options):
|
||||
cloudinary_params = sign_request({
|
||||
"timestamp": now(),
|
||||
"tag": tag,
|
||||
"transformation": generate_transformation_string(**options)[0]
|
||||
}, options)
|
||||
|
||||
return cloudinary_api_url("download_tag.zip", **options) + "?" + urlencode(cloudinary_params)
|
||||
|
||||
|
||||
def bracketize_seq(params):
|
||||
url_params = dict()
|
||||
for param_name in params:
|
||||
param_value = params[param_name]
|
||||
if isinstance(param_value, list):
|
||||
param_name += "[]"
|
||||
url_params[param_name] = param_value
|
||||
return url_params
|
||||
|
||||
|
||||
def download_archive_url(**options):
|
||||
params = options.copy()
|
||||
params.update(mode="download")
|
||||
cloudinary_params = sign_request(archive_params(**params), options)
|
||||
return cloudinary_api_url("generate_archive", **options) + "?" + urlencode(bracketize_seq(cloudinary_params), True)
|
||||
|
||||
|
||||
def download_zip_url(**options):
|
||||
new_options = options.copy()
|
||||
new_options.update(target_format="zip")
|
||||
return download_archive_url(**new_options)
|
||||
|
||||
def generate_auth_token(**options):
|
||||
token_options = merge(cloudinary.config().auth_token, options)
|
||||
return auth_token.generate(**token_options)
|
||||
|
||||
def archive_params(**options):
|
||||
if options.get("timestamp") is None:
|
||||
timestamp = now()
|
||||
else:
|
||||
timestamp = options.get("timestamp")
|
||||
params = {
|
||||
"allow_missing": options.get("allow_missing"),
|
||||
"async": options.get("async"),
|
||||
"expires_at": options.get("expires_at"),
|
||||
"flatten_folders": options.get("flatten_folders"),
|
||||
"flatten_transformations": options.get("flatten_transformations"),
|
||||
"keep_derived": options.get("keep_derived"),
|
||||
"mode": options.get("mode"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"phash": options.get("phash"),
|
||||
"prefixes": options.get("prefixes") and build_array(options.get("prefixes")),
|
||||
"public_ids": options.get("public_ids") and build_array(options.get("public_ids")),
|
||||
"skip_transformation_name": options.get("skip_transformation_name"),
|
||||
"tags": options.get("tags") and build_array(options.get("tags")),
|
||||
"target_format": options.get("target_format"),
|
||||
"target_public_id": options.get("target_public_id"),
|
||||
"target_tags": options.get("target_tags") and build_array(options.get("target_tags")),
|
||||
"timestamp": timestamp,
|
||||
"transformations": build_eager(options.get("transformations")),
|
||||
"type": options.get("type"),
|
||||
"use_original_filename": options.get("use_original_filename"),
|
||||
}
|
||||
return params
|
||||
|
||||
|
||||
def build_eager(transformations):
|
||||
if transformations is None:
|
||||
return None
|
||||
eager = []
|
||||
for tr in build_array(transformations):
|
||||
if isinstance(tr, string_types):
|
||||
single_eager = tr
|
||||
else:
|
||||
ext = tr.get("format")
|
||||
single_eager = "/".join([x for x in [generate_transformation_string(**tr)[0], ext] if x])
|
||||
eager.append(single_eager)
|
||||
return "|".join(eager)
|
||||
|
||||
|
||||
def build_custom_headers(headers):
|
||||
if headers is None:
|
||||
return None
|
||||
elif isinstance(headers, list):
|
||||
pass
|
||||
elif isinstance(headers, dict):
|
||||
headers = [k + ": " + v for k, v in headers.items()]
|
||||
else:
|
||||
return headers
|
||||
return "\n".join(headers)
|
||||
|
||||
|
||||
def build_upload_params(**options):
|
||||
params = {"timestamp": now(),
|
||||
"transformation": generate_transformation_string(**options)[0],
|
||||
"public_id": options.get("public_id"),
|
||||
"callback": options.get("callback"),
|
||||
"format": options.get("format"),
|
||||
"type": options.get("type"),
|
||||
"backup": options.get("backup"),
|
||||
"faces": options.get("faces"),
|
||||
"image_metadata": options.get("image_metadata"),
|
||||
"exif": options.get("exif"),
|
||||
"colors": options.get("colors"),
|
||||
"headers": build_custom_headers(options.get("headers")),
|
||||
"eager": build_eager(options.get("eager")),
|
||||
"use_filename": options.get("use_filename"),
|
||||
"unique_filename": options.get("unique_filename"),
|
||||
"discard_original_filename": options.get("discard_original_filename"),
|
||||
"invalidate": options.get("invalidate"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"eager_notification_url": options.get("eager_notification_url"),
|
||||
"eager_async": options.get("eager_async"),
|
||||
"proxy": options.get("proxy"),
|
||||
"folder": options.get("folder"),
|
||||
"overwrite": options.get("overwrite"),
|
||||
"tags": options.get("tags") and ",".join(build_array(options["tags"])),
|
||||
"allowed_formats": options.get("allowed_formats") and ",".join(build_array(options["allowed_formats"])),
|
||||
"face_coordinates": encode_double_array(options.get("face_coordinates")),
|
||||
"custom_coordinates": encode_double_array(options.get("custom_coordinates")),
|
||||
"context": encode_context(options.get("context")),
|
||||
"moderation": options.get("moderation"),
|
||||
"raw_convert": options.get("raw_convert"),
|
||||
"quality_override": options.get("quality_override"),
|
||||
"ocr": options.get("ocr"),
|
||||
"categorization": options.get("categorization"),
|
||||
"detection": options.get("detection"),
|
||||
"similarity_search": options.get("similarity_search"),
|
||||
"background_removal": options.get("background_removal"),
|
||||
"upload_preset": options.get("upload_preset"),
|
||||
"phash": options.get("phash"),
|
||||
"return_delete_token": options.get("return_delete_token"),
|
||||
"auto_tagging": options.get("auto_tagging") and str(options.get("auto_tagging")),
|
||||
"responsive_breakpoints": generate_responsive_breakpoints_string(options.get("responsive_breakpoints")),
|
||||
"async": options.get("async"),
|
||||
"access_control": options.get("access_control") and json_encode(build_list_of_dicts(options.get("access_control")))}
|
||||
return params
|
||||
|
||||
|
||||
def __process_text_options(layer, layer_parameter):
|
||||
font_family = layer.get("font_family")
|
||||
font_size = layer.get("font_size")
|
||||
keywords = []
|
||||
for attr, default_value in __LAYER_KEYWORD_PARAMS:
|
||||
attr_value = layer.get(attr)
|
||||
if attr_value != default_value and attr_value is not None:
|
||||
keywords.append(attr_value)
|
||||
|
||||
letter_spacing = layer.get("letter_spacing")
|
||||
if letter_spacing is not None:
|
||||
keywords.append("letter_spacing_" + str(letter_spacing))
|
||||
|
||||
line_spacing = layer.get("line_spacing")
|
||||
if line_spacing is not None:
|
||||
keywords.append("line_spacing_" + str(line_spacing))
|
||||
|
||||
if font_size is None and font_family is None and len(keywords) == 0:
|
||||
return None
|
||||
|
||||
if font_family is None:
|
||||
raise ValueError("Must supply font_family for text in " + layer_parameter)
|
||||
|
||||
if font_size is None:
|
||||
raise ValueError("Must supply font_size for text in " + layer_parameter)
|
||||
|
||||
keywords.insert(0, font_size)
|
||||
keywords.insert(0, font_family)
|
||||
|
||||
return '_'.join([str(k) for k in keywords])
|
||||
|
||||
|
||||
def process_layer(layer, layer_parameter):
|
||||
if isinstance(layer, string_types) and layer.startswith("fetch:"):
|
||||
layer = {"url": layer[len('fetch:'):]}
|
||||
if not isinstance(layer, dict):
|
||||
return layer
|
||||
|
||||
resource_type = layer.get("resource_type")
|
||||
text = layer.get("text")
|
||||
type = layer.get("type")
|
||||
public_id = layer.get("public_id")
|
||||
format = layer.get("format")
|
||||
fetch = layer.get("url")
|
||||
components = list()
|
||||
|
||||
if text is not None and resource_type is None:
|
||||
resource_type = "text"
|
||||
|
||||
if fetch and resource_type is None:
|
||||
resource_type = "fetch"
|
||||
|
||||
if public_id is not None and format is not None:
|
||||
public_id = public_id + "." + format
|
||||
|
||||
if public_id is None and resource_type != "text" and resource_type != "fetch":
|
||||
raise ValueError("Must supply public_id for for non-text " + layer_parameter)
|
||||
|
||||
if resource_type is not None and resource_type != "image":
|
||||
components.append(resource_type)
|
||||
|
||||
if type is not None and type != "upload":
|
||||
components.append(type)
|
||||
|
||||
if resource_type == "text" or resource_type == "subtitles":
|
||||
if public_id is None and text is None:
|
||||
raise ValueError("Must supply either text or public_id in " + layer_parameter)
|
||||
|
||||
text_options = __process_text_options(layer, layer_parameter)
|
||||
|
||||
if text_options is not None:
|
||||
components.append(text_options)
|
||||
|
||||
if public_id is not None:
|
||||
public_id = public_id.replace("/", ':')
|
||||
components.append(public_id)
|
||||
|
||||
if text is not None:
|
||||
var_pattern = VAR_NAME_RE
|
||||
match = re.findall(var_pattern,text)
|
||||
|
||||
parts= filter(lambda p: p is not None, re.split(var_pattern,text))
|
||||
encoded_text = []
|
||||
for part in parts:
|
||||
if re.match(var_pattern,part):
|
||||
encoded_text.append(part)
|
||||
else:
|
||||
encoded_text.append(smart_escape(smart_escape(part, r"([,/])")))
|
||||
|
||||
text = ''.join(encoded_text)
|
||||
# text = text.replace("%2C", "%252C")
|
||||
# text = text.replace("/", "%252F")
|
||||
components.append(text)
|
||||
elif resource_type == "fetch":
|
||||
b64 = base64_encode_url(fetch)
|
||||
components.append(b64)
|
||||
else:
|
||||
public_id = public_id.replace("/", ':')
|
||||
components.append(public_id)
|
||||
|
||||
return ':'.join(components)
|
||||
|
||||
IF_OPERATORS = {
|
||||
"=": 'eq',
|
||||
"!=": 'ne',
|
||||
"<": 'lt',
|
||||
">": 'gt',
|
||||
"<=": 'lte',
|
||||
">=": 'gte',
|
||||
"&&": 'and',
|
||||
"||": 'or',
|
||||
"*": 'mul',
|
||||
"/": 'div',
|
||||
"+": 'add',
|
||||
"-": 'sub'
|
||||
}
|
||||
|
||||
PREDEFINED_VARS = {
|
||||
"aspect_ratio": "ar",
|
||||
"current_page": "cp",
|
||||
"face_count": "fc",
|
||||
"height": "h",
|
||||
"initial_aspect_ratio": "iar",
|
||||
"initial_height": "ih",
|
||||
"initial_width": "iw",
|
||||
"page_count": "pc",
|
||||
"page_x": "px",
|
||||
"page_y": "py",
|
||||
"tags": "tags",
|
||||
"width": "w"
|
||||
}
|
||||
|
||||
replaceRE = "((\\|\\||>=|<=|&&|!=|>|=|<|/|-|\\+|\\*)(?=[ _])|" + '|'.join(PREDEFINED_VARS.keys())+ ")"
|
||||
|
||||
|
||||
def translate_if(match):
|
||||
name = match.group(0)
|
||||
return IF_OPERATORS.get(name,
|
||||
PREDEFINED_VARS.get(name,
|
||||
name))
|
||||
|
||||
def process_conditional(conditional):
|
||||
if conditional is None:
|
||||
return conditional
|
||||
result = normalize_expression(conditional)
|
||||
return result
|
||||
|
||||
def normalize_expression(expression):
|
||||
if re.match(r'^!.+!$',str(expression)): # quoted string
|
||||
return expression
|
||||
elif expression:
|
||||
result = str(expression)
|
||||
result = re.sub(replaceRE, translate_if, result)
|
||||
result = re.sub('[ _]+', '_', result)
|
||||
return result
|
||||
else:
|
||||
return expression
|
||||
|
||||
def __join_pair(key, value):
|
||||
if value is None or value == "":
|
||||
return None
|
||||
elif value is True:
|
||||
return key
|
||||
else:
|
||||
return u"{0}=\"{1}\"".format(key, value)
|
||||
|
||||
|
||||
def html_attrs(attrs, only=None):
|
||||
return ' '.join(sorted([__join_pair(key, value) for key, value in attrs.items() if only is None or key in only]))
|
||||
|
||||
|
||||
def __safe_value(v):
|
||||
if isinstance(v, bool):
|
||||
return "1" if v else "0"
|
||||
else:
|
||||
return v
|
||||
|
||||
|
||||
def __crc(source):
|
||||
return str((zlib.crc32(to_bytearray(source)) & 0xffffffff) % 5 + 1)
|
||||
|
||||
|
||||
def __compact(array):
|
||||
return filter(lambda x: x, array)
|
||||
|
||||
|
||||
def base64_encode_url(url):
|
||||
"""
|
||||
Returns the Base64-decoded version of url.
|
||||
The method tries to unquote the url because quoting it
|
||||
|
||||
:param str url:
|
||||
the url to encode. the value is URIdecoded and then
|
||||
re-encoded before converting to base64 representation
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
url = unquote(url)
|
||||
except:
|
||||
pass
|
||||
url = smart_escape(url)
|
||||
b64 = base64.b64encode(url.encode('utf-8'))
|
||||
return b64.decode('ascii')
|
||||
|
||||
|
||||
def __json_serializer(obj):
|
||||
"""JSON serializer for objects not serializable by default json code"""
|
||||
if isinstance(obj, (datetime, date)):
|
||||
return obj.isoformat()
|
||||
raise TypeError("Object of type %s is not JSON serializable" % type(obj))
|
Loading…
Add table
Add a link
Reference in a new issue