mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-14 02:26:58 -07:00
Initial Commit
This commit is contained in:
commit
88daa3fb91
1311 changed files with 256240 additions and 0 deletions
0
lib/mako/ext/__init__.py
Normal file
0
lib/mako/ext/__init__.py
Normal file
65
lib/mako/ext/autohandler.py
Normal file
65
lib/mako/ext/autohandler.py
Normal file
|
@ -0,0 +1,65 @@
|
|||
# ext/autohandler.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""adds autohandler functionality to Mako templates.
|
||||
|
||||
requires that the TemplateLookup class is used with templates.
|
||||
|
||||
usage:
|
||||
|
||||
<%!
|
||||
from mako.ext.autohandler import autohandler
|
||||
%>
|
||||
<%inherit file="${autohandler(template, context)}"/>
|
||||
|
||||
|
||||
or with custom autohandler filename:
|
||||
|
||||
<%!
|
||||
from mako.ext.autohandler import autohandler
|
||||
%>
|
||||
<%inherit file="${autohandler(template, context, name='somefilename')}"/>
|
||||
|
||||
"""
|
||||
|
||||
import posixpath, os, re
|
||||
|
||||
def autohandler(template, context, name='autohandler'):
|
||||
lookup = context.lookup
|
||||
_template_uri = template.module._template_uri
|
||||
if not lookup.filesystem_checks:
|
||||
try:
|
||||
return lookup._uri_cache[(autohandler, _template_uri, name)]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
tokens = re.findall(r'([^/]+)', posixpath.dirname(_template_uri)) + [name]
|
||||
while len(tokens):
|
||||
path = '/' + '/'.join(tokens)
|
||||
if path != _template_uri and _file_exists(lookup, path):
|
||||
if not lookup.filesystem_checks:
|
||||
return lookup._uri_cache.setdefault(
|
||||
(autohandler, _template_uri, name), path)
|
||||
else:
|
||||
return path
|
||||
if len(tokens) == 1:
|
||||
break
|
||||
tokens[-2:] = [name]
|
||||
|
||||
if not lookup.filesystem_checks:
|
||||
return lookup._uri_cache.setdefault(
|
||||
(autohandler, _template_uri, name), None)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _file_exists(lookup, path):
|
||||
psub = re.sub(r'^/', '',path)
|
||||
for d in lookup.directories:
|
||||
if os.path.exists(d + '/' + psub):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
49
lib/mako/ext/babelplugin.py
Normal file
49
lib/mako/ext/babelplugin.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
# ext/babelplugin.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
|
||||
from babel.messages.extract import extract_python
|
||||
from mako.ext.extract import MessageExtractor
|
||||
|
||||
|
||||
class BabelMakoExtractor(MessageExtractor):
|
||||
def __init__(self, keywords, comment_tags, options):
|
||||
self.keywords = keywords
|
||||
self.options = options
|
||||
self.config = {
|
||||
'comment-tags': u' '.join(comment_tags),
|
||||
'encoding': options.get('input_encoding',
|
||||
options.get('encoding', None)),
|
||||
}
|
||||
super(BabelMakoExtractor, self).__init__()
|
||||
|
||||
def __call__(self, fileobj):
|
||||
return self.process_file(fileobj)
|
||||
|
||||
def process_python(self, code, code_lineno, translator_strings):
|
||||
comment_tags = self.config['comment-tags']
|
||||
for lineno, funcname, messages, python_translator_comments \
|
||||
in extract_python(code,
|
||||
self.keywords, comment_tags, self.options):
|
||||
yield (code_lineno + (lineno - 1), funcname, messages,
|
||||
translator_strings + python_translator_comments)
|
||||
|
||||
|
||||
def extract(fileobj, keywords, comment_tags, options):
|
||||
"""Extract messages from Mako templates.
|
||||
|
||||
:param fileobj: the file-like object the messages should be extracted from
|
||||
:param keywords: a list of keywords (i.e. function names) that should be
|
||||
recognized as translation functions
|
||||
:param comment_tags: a list of translator tags to search for and include
|
||||
in the results
|
||||
:param options: a dictionary of additional options (optional)
|
||||
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
|
||||
:rtype: ``iterator``
|
||||
"""
|
||||
extractor = BabelMakoExtractor(keywords, comment_tags, options)
|
||||
for message in extractor(fileobj):
|
||||
yield message
|
75
lib/mako/ext/beaker_cache.py
Normal file
75
lib/mako/ext/beaker_cache.py
Normal file
|
@ -0,0 +1,75 @@
|
|||
"""Provide a :class:`.CacheImpl` for the Beaker caching system."""
|
||||
|
||||
from mako import exceptions
|
||||
|
||||
from mako.cache import CacheImpl
|
||||
|
||||
try:
|
||||
from beaker import cache as beaker_cache
|
||||
except:
|
||||
has_beaker = False
|
||||
else:
|
||||
has_beaker = True
|
||||
|
||||
_beaker_cache = None
|
||||
|
||||
|
||||
class BeakerCacheImpl(CacheImpl):
|
||||
"""A :class:`.CacheImpl` provided for the Beaker caching system.
|
||||
|
||||
This plugin is used by default, based on the default
|
||||
value of ``'beaker'`` for the ``cache_impl`` parameter of the
|
||||
:class:`.Template` or :class:`.TemplateLookup` classes.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, cache):
|
||||
if not has_beaker:
|
||||
raise exceptions.RuntimeException(
|
||||
"Can't initialize Beaker plugin; Beaker is not installed.")
|
||||
global _beaker_cache
|
||||
if _beaker_cache is None:
|
||||
if 'manager' in cache.template.cache_args:
|
||||
_beaker_cache = cache.template.cache_args['manager']
|
||||
else:
|
||||
_beaker_cache = beaker_cache.CacheManager()
|
||||
super(BeakerCacheImpl, self).__init__(cache)
|
||||
|
||||
def _get_cache(self, **kw):
|
||||
expiretime = kw.pop('timeout', None)
|
||||
if 'dir' in kw:
|
||||
kw['data_dir'] = kw.pop('dir')
|
||||
elif self.cache.template.module_directory:
|
||||
kw['data_dir'] = self.cache.template.module_directory
|
||||
|
||||
if 'manager' in kw:
|
||||
kw.pop('manager')
|
||||
|
||||
if kw.get('type') == 'memcached':
|
||||
kw['type'] = 'ext:memcached'
|
||||
|
||||
if 'region' in kw:
|
||||
region = kw.pop('region')
|
||||
cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw)
|
||||
else:
|
||||
cache = _beaker_cache.get_cache(self.cache.id, **kw)
|
||||
cache_args = {'starttime': self.cache.starttime}
|
||||
if expiretime:
|
||||
cache_args['expiretime'] = expiretime
|
||||
return cache, cache_args
|
||||
|
||||
def get_or_create(self, key, creation_function, **kw):
|
||||
cache, kw = self._get_cache(**kw)
|
||||
return cache.get(key, createfunc=creation_function, **kw)
|
||||
|
||||
def put(self, key, value, **kw):
|
||||
cache, kw = self._get_cache(**kw)
|
||||
cache.put(key, value, **kw)
|
||||
|
||||
def get(self, key, **kw):
|
||||
cache, kw = self._get_cache(**kw)
|
||||
return cache.get(key, **kw)
|
||||
|
||||
def invalidate(self, key, **kw):
|
||||
cache, kw = self._get_cache(**kw)
|
||||
cache.remove_value(key, **kw)
|
101
lib/mako/ext/extract.py
Normal file
101
lib/mako/ext/extract.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
import re
|
||||
from mako import compat
|
||||
from mako import lexer
|
||||
from mako import parsetree
|
||||
|
||||
|
||||
class MessageExtractor(object):
|
||||
def process_file(self, fileobj):
|
||||
template_node = lexer.Lexer(
|
||||
fileobj.read(),
|
||||
input_encoding=self.config['encoding']).parse()
|
||||
for extracted in self.extract_nodes(template_node.get_children()):
|
||||
yield extracted
|
||||
|
||||
def extract_nodes(self, nodes):
|
||||
translator_comments = []
|
||||
in_translator_comments = False
|
||||
comment_tags = list(
|
||||
filter(None, re.split(r'\s+', self.config['comment-tags'])))
|
||||
|
||||
for node in nodes:
|
||||
child_nodes = None
|
||||
if in_translator_comments and \
|
||||
isinstance(node, parsetree.Text) and \
|
||||
not node.content.strip():
|
||||
# Ignore whitespace within translator comments
|
||||
continue
|
||||
|
||||
if isinstance(node, parsetree.Comment):
|
||||
value = node.text.strip()
|
||||
if in_translator_comments:
|
||||
translator_comments.extend(
|
||||
self._split_comment(node.lineno, value))
|
||||
continue
|
||||
for comment_tag in comment_tags:
|
||||
if value.startswith(comment_tag):
|
||||
in_translator_comments = True
|
||||
translator_comments.extend(
|
||||
self._split_comment(node.lineno, value))
|
||||
continue
|
||||
|
||||
if isinstance(node, parsetree.DefTag):
|
||||
code = node.function_decl.code
|
||||
child_nodes = node.nodes
|
||||
elif isinstance(node, parsetree.BlockTag):
|
||||
code = node.body_decl.code
|
||||
child_nodes = node.nodes
|
||||
elif isinstance(node, parsetree.CallTag):
|
||||
code = node.code.code
|
||||
child_nodes = node.nodes
|
||||
elif isinstance(node, parsetree.PageTag):
|
||||
code = node.body_decl.code
|
||||
elif isinstance(node, parsetree.CallNamespaceTag):
|
||||
code = node.expression
|
||||
child_nodes = node.nodes
|
||||
elif isinstance(node, parsetree.ControlLine):
|
||||
if node.isend:
|
||||
in_translator_comments = False
|
||||
continue
|
||||
code = node.text
|
||||
elif isinstance(node, parsetree.Code):
|
||||
in_translator_comments = False
|
||||
code = node.code.code
|
||||
elif isinstance(node, parsetree.Expression):
|
||||
code = node.code.code
|
||||
else:
|
||||
continue
|
||||
|
||||
# Comments don't apply unless they immediately preceed the message
|
||||
if translator_comments and \
|
||||
translator_comments[-1][0] < node.lineno - 1:
|
||||
translator_comments = []
|
||||
|
||||
translator_strings = [
|
||||
comment[1] for comment in translator_comments]
|
||||
|
||||
if isinstance(code, compat.text_type):
|
||||
code = code.encode('ascii', 'backslashreplace')
|
||||
|
||||
used_translator_comments = False
|
||||
code = compat.byte_buffer(code)
|
||||
|
||||
for message in self.process_python(
|
||||
code, node.lineno, translator_strings):
|
||||
yield message
|
||||
used_translator_comments = True
|
||||
|
||||
if used_translator_comments:
|
||||
translator_comments = []
|
||||
in_translator_comments = False
|
||||
|
||||
if child_nodes:
|
||||
for extracted in self.extract_nodes(child_nodes):
|
||||
yield extracted
|
||||
|
||||
@staticmethod
|
||||
def _split_comment(lineno, comment):
|
||||
"""Return the multiline comment at lineno split into a list of
|
||||
comment line numbers and the accompanying comment line"""
|
||||
return [(lineno + index, line) for index, line in
|
||||
enumerate(comment.splitlines())]
|
38
lib/mako/ext/linguaplugin.py
Normal file
38
lib/mako/ext/linguaplugin.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
import io
|
||||
from lingua.extractors import Extractor
|
||||
from lingua.extractors import Message
|
||||
from lingua.extractors import get_extractor
|
||||
from mako.ext.extract import MessageExtractor
|
||||
from mako import compat
|
||||
|
||||
|
||||
class LinguaMakoExtractor(Extractor, MessageExtractor):
|
||||
'''Mako templates'''
|
||||
extensions = ['.mako']
|
||||
default_config = {
|
||||
'encoding': 'utf-8',
|
||||
'comment-tags': '',
|
||||
}
|
||||
|
||||
def __call__(self, filename, options, fileobj=None):
|
||||
self.options = options
|
||||
self.filename = filename
|
||||
self.python_extractor = get_extractor('x.py')
|
||||
if fileobj is None:
|
||||
fileobj = open(filename, 'rb')
|
||||
return self.process_file(fileobj)
|
||||
|
||||
def process_python(self, code, code_lineno, translator_strings):
|
||||
source = code.getvalue().strip()
|
||||
if source.endswith(compat.b(':')):
|
||||
source += compat.b(' pass')
|
||||
code = io.BytesIO(source)
|
||||
for msg in self.python_extractor(
|
||||
self.filename, self.options, code, code_lineno):
|
||||
if translator_strings:
|
||||
msg = Message(msg.msgctxt, msg.msgid, msg.msgid_plural,
|
||||
msg.flags,
|
||||
compat.u(' ').join(
|
||||
translator_strings + [msg.comment]),
|
||||
msg.tcomment, msg.location)
|
||||
yield msg
|
20
lib/mako/ext/preprocessors.py
Normal file
20
lib/mako/ext/preprocessors.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
# ext/preprocessors.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""preprocessing functions, used with the 'preprocessor'
|
||||
argument on Template, TemplateLookup"""
|
||||
|
||||
import re
|
||||
|
||||
def convert_comments(text):
|
||||
"""preprocess old style comments.
|
||||
|
||||
example:
|
||||
|
||||
from mako.ext.preprocessors import convert_comments
|
||||
t = Template(..., preprocessor=convert_comments)"""
|
||||
return re.sub(r'(?<=\n)\s*#[^#]', "##", text)
|
||||
|
122
lib/mako/ext/pygmentplugin.py
Normal file
122
lib/mako/ext/pygmentplugin.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
# ext/pygmentplugin.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from pygments.lexers.web import \
|
||||
HtmlLexer, XmlLexer, JavascriptLexer, CssLexer
|
||||
from pygments.lexers.agile import PythonLexer, Python3Lexer
|
||||
from pygments.lexer import DelegatingLexer, RegexLexer, bygroups, \
|
||||
include, using
|
||||
from pygments.token import \
|
||||
Text, Comment, Operator, Keyword, Name, String, Other
|
||||
from pygments.formatters.html import HtmlFormatter
|
||||
from pygments import highlight
|
||||
from mako import compat
|
||||
|
||||
class MakoLexer(RegexLexer):
|
||||
name = 'Mako'
|
||||
aliases = ['mako']
|
||||
filenames = ['*.mao']
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'(\s*)(\%)(\s*end(?:\w+))(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, Keyword, Other)),
|
||||
(r'(\s*)(\%(?!%))([^\n]*)(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
|
||||
(r'(\s*)(##[^\n]*)(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, Other)),
|
||||
(r'''(?s)<%doc>.*?</%doc>''', Comment.Preproc),
|
||||
(r'(<%)([\w\.\:]+)',
|
||||
bygroups(Comment.Preproc, Name.Builtin), 'tag'),
|
||||
(r'(</%)([\w\.\:]+)(>)',
|
||||
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
|
||||
(r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'),
|
||||
(r'(<%(?:!?))(.*?)(%>)(?s)',
|
||||
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
|
||||
(r'(\$\{)(.*?)(\})',
|
||||
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
|
||||
(r'''(?sx)
|
||||
(.+?) # anything, followed by:
|
||||
(?:
|
||||
(?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line
|
||||
(?=\#\*) | # multiline comment
|
||||
(?=</?%) | # a python block
|
||||
# call start or end
|
||||
(?=\$\{) | # a substitution
|
||||
(?<=\n)(?=\s*%) |
|
||||
# - don't consume
|
||||
(\\\n) | # an escaped newline
|
||||
\Z # end of string
|
||||
)
|
||||
''', bygroups(Other, Operator)),
|
||||
(r'\s+', Text),
|
||||
],
|
||||
'ondeftags': [
|
||||
(r'<%', Comment.Preproc),
|
||||
(r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
|
||||
include('tag'),
|
||||
],
|
||||
'tag': [
|
||||
(r'((?:\w+)\s*=)\s*(".*?")',
|
||||
bygroups(Name.Attribute, String)),
|
||||
(r'/?\s*>', Comment.Preproc, '#pop'),
|
||||
(r'\s+', Text),
|
||||
],
|
||||
'attr': [
|
||||
('".*?"', String, '#pop'),
|
||||
("'.*?'", String, '#pop'),
|
||||
(r'[^\s>]+', String, '#pop'),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class MakoHtmlLexer(DelegatingLexer):
|
||||
name = 'HTML+Mako'
|
||||
aliases = ['html+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
|
||||
**options)
|
||||
|
||||
class MakoXmlLexer(DelegatingLexer):
|
||||
name = 'XML+Mako'
|
||||
aliases = ['xml+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
|
||||
**options)
|
||||
|
||||
class MakoJavascriptLexer(DelegatingLexer):
|
||||
name = 'JavaScript+Mako'
|
||||
aliases = ['js+mako', 'javascript+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
|
||||
MakoLexer, **options)
|
||||
|
||||
class MakoCssLexer(DelegatingLexer):
|
||||
name = 'CSS+Mako'
|
||||
aliases = ['css+mako']
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
|
||||
**options)
|
||||
|
||||
|
||||
pygments_html_formatter = HtmlFormatter(cssclass='syntax-highlighted',
|
||||
linenos=True)
|
||||
def syntax_highlight(filename='', language=None):
|
||||
mako_lexer = MakoLexer()
|
||||
if compat.py3k:
|
||||
python_lexer = Python3Lexer()
|
||||
else:
|
||||
python_lexer = PythonLexer()
|
||||
if filename.startswith('memory:') or language == 'mako':
|
||||
return lambda string: highlight(string, mako_lexer,
|
||||
pygments_html_formatter)
|
||||
return lambda string: highlight(string, python_lexer,
|
||||
pygments_html_formatter)
|
||||
|
58
lib/mako/ext/turbogears.py
Normal file
58
lib/mako/ext/turbogears.py
Normal file
|
@ -0,0 +1,58 @@
|
|||
# ext/turbogears.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import inspect
|
||||
from mako import compat
|
||||
from mako.lookup import TemplateLookup
|
||||
from mako.template import Template
|
||||
|
||||
class TGPlugin(object):
|
||||
"""TurboGears compatible Template Plugin."""
|
||||
|
||||
def __init__(self, extra_vars_func=None, options=None, extension='mak'):
|
||||
self.extra_vars_func = extra_vars_func
|
||||
self.extension = extension
|
||||
if not options:
|
||||
options = {}
|
||||
|
||||
# Pull the options out and initialize the lookup
|
||||
lookup_options = {}
|
||||
for k, v in options.items():
|
||||
if k.startswith('mako.'):
|
||||
lookup_options[k[5:]] = v
|
||||
elif k in ['directories', 'filesystem_checks', 'module_directory']:
|
||||
lookup_options[k] = v
|
||||
self.lookup = TemplateLookup(**lookup_options)
|
||||
|
||||
self.tmpl_options = {}
|
||||
# transfer lookup args to template args, based on those available
|
||||
# in getargspec
|
||||
for kw in inspect.getargspec(Template.__init__)[0]:
|
||||
if kw in lookup_options:
|
||||
self.tmpl_options[kw] = lookup_options[kw]
|
||||
|
||||
def load_template(self, templatename, template_string=None):
|
||||
"""Loads a template from a file or a string"""
|
||||
if template_string is not None:
|
||||
return Template(template_string, **self.tmpl_options)
|
||||
# Translate TG dot notation to normal / template path
|
||||
if '/' not in templatename:
|
||||
templatename = '/' + templatename.replace('.', '/') + '.' +\
|
||||
self.extension
|
||||
|
||||
# Lookup template
|
||||
return self.lookup.get_template(templatename)
|
||||
|
||||
def render(self, info, format="html", fragment=False, template=None):
|
||||
if isinstance(template, compat.string_types):
|
||||
template = self.load_template(template)
|
||||
|
||||
# Load extra vars func if provided
|
||||
if self.extra_vars_func:
|
||||
info.update(self.extra_vars_func())
|
||||
|
||||
return template.render(**info)
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue