mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-20 21:33:18 -07:00
Update mako to 1.1.0
This commit is contained in:
parent
84ce4758d1
commit
f2d7beec90
27 changed files with 2424 additions and 1890 deletions
|
@ -1,5 +1,5 @@
|
|||
# ext/autohandler.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
@ -8,26 +8,29 @@
|
|||
|
||||
requires that the TemplateLookup class is used with templates.
|
||||
|
||||
usage:
|
||||
usage::
|
||||
|
||||
<%!
|
||||
from mako.ext.autohandler import autohandler
|
||||
%>
|
||||
<%inherit file="${autohandler(template, context)}"/>
|
||||
<%!
|
||||
from mako.ext.autohandler import autohandler
|
||||
%>
|
||||
<%inherit file="${autohandler(template, context)}"/>
|
||||
|
||||
|
||||
or with custom autohandler filename:
|
||||
or with custom autohandler filename::
|
||||
|
||||
<%!
|
||||
from mako.ext.autohandler import autohandler
|
||||
%>
|
||||
<%inherit file="${autohandler(template, context, name='somefilename')}"/>
|
||||
<%!
|
||||
from mako.ext.autohandler import autohandler
|
||||
%>
|
||||
<%inherit file="${autohandler(template, context, name='somefilename')}"/>
|
||||
|
||||
"""
|
||||
|
||||
import posixpath, os, re
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
|
||||
def autohandler(template, context, name='autohandler'):
|
||||
|
||||
def autohandler(template, context, name="autohandler"):
|
||||
lookup = context.lookup
|
||||
_template_uri = template.module._template_uri
|
||||
if not lookup.filesystem_checks:
|
||||
|
@ -36,30 +39,32 @@ def autohandler(template, context, name='autohandler'):
|
|||
except KeyError:
|
||||
pass
|
||||
|
||||
tokens = re.findall(r'([^/]+)', posixpath.dirname(_template_uri)) + [name]
|
||||
tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name]
|
||||
while len(tokens):
|
||||
path = '/' + '/'.join(tokens)
|
||||
path = "/" + "/".join(tokens)
|
||||
if path != _template_uri and _file_exists(lookup, path):
|
||||
if not lookup.filesystem_checks:
|
||||
return lookup._uri_cache.setdefault(
|
||||
(autohandler, _template_uri, name), path)
|
||||
(autohandler, _template_uri, name), path
|
||||
)
|
||||
else:
|
||||
return path
|
||||
if len(tokens) == 1:
|
||||
break
|
||||
tokens[-2:] = [name]
|
||||
|
||||
|
||||
if not lookup.filesystem_checks:
|
||||
return lookup._uri_cache.setdefault(
|
||||
(autohandler, _template_uri, name), None)
|
||||
(autohandler, _template_uri, name), None
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _file_exists(lookup, path):
|
||||
psub = re.sub(r'^/', '',path)
|
||||
psub = re.sub(r"^/", "", path)
|
||||
for d in lookup.directories:
|
||||
if os.path.exists(d + '/' + psub):
|
||||
if os.path.exists(d + "/" + psub):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
# ext/babelplugin.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
|
||||
from babel.messages.extract import extract_python
|
||||
|
||||
from mako.ext.extract import MessageExtractor
|
||||
|
||||
|
||||
|
@ -14,22 +15,30 @@ class BabelMakoExtractor(MessageExtractor):
|
|||
self.keywords = keywords
|
||||
self.options = options
|
||||
self.config = {
|
||||
'comment-tags': u' '.join(comment_tags),
|
||||
'encoding': options.get('input_encoding',
|
||||
options.get('encoding', None)),
|
||||
}
|
||||
"comment-tags": u" ".join(comment_tags),
|
||||
"encoding": options.get(
|
||||
"input_encoding", options.get("encoding", None)
|
||||
),
|
||||
}
|
||||
super(BabelMakoExtractor, self).__init__()
|
||||
|
||||
def __call__(self, fileobj):
|
||||
return self.process_file(fileobj)
|
||||
|
||||
def process_python(self, code, code_lineno, translator_strings):
|
||||
comment_tags = self.config['comment-tags']
|
||||
for lineno, funcname, messages, python_translator_comments \
|
||||
in extract_python(code,
|
||||
self.keywords, comment_tags, self.options):
|
||||
yield (code_lineno + (lineno - 1), funcname, messages,
|
||||
translator_strings + python_translator_comments)
|
||||
comment_tags = self.config["comment-tags"]
|
||||
for (
|
||||
lineno,
|
||||
funcname,
|
||||
messages,
|
||||
python_translator_comments,
|
||||
) in extract_python(code, self.keywords, comment_tags, self.options):
|
||||
yield (
|
||||
code_lineno + (lineno - 1),
|
||||
funcname,
|
||||
messages,
|
||||
translator_strings + python_translator_comments,
|
||||
)
|
||||
|
||||
|
||||
def extract(fileobj, keywords, comment_tags, options):
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
# ext/beaker_cache.py
|
||||
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provide a :class:`.CacheImpl` for the Beaker caching system."""
|
||||
|
||||
from mako import exceptions
|
||||
|
||||
from mako.cache import CacheImpl
|
||||
|
||||
try:
|
||||
|
@ -15,6 +20,7 @@ _beaker_cache = None
|
|||
|
||||
|
||||
class BeakerCacheImpl(CacheImpl):
|
||||
|
||||
"""A :class:`.CacheImpl` provided for the Beaker caching system.
|
||||
|
||||
This plugin is used by default, based on the default
|
||||
|
@ -26,36 +32,37 @@ class BeakerCacheImpl(CacheImpl):
|
|||
def __init__(self, cache):
|
||||
if not has_beaker:
|
||||
raise exceptions.RuntimeException(
|
||||
"Can't initialize Beaker plugin; Beaker is not installed.")
|
||||
"Can't initialize Beaker plugin; Beaker is not installed."
|
||||
)
|
||||
global _beaker_cache
|
||||
if _beaker_cache is None:
|
||||
if 'manager' in cache.template.cache_args:
|
||||
_beaker_cache = cache.template.cache_args['manager']
|
||||
if "manager" in cache.template.cache_args:
|
||||
_beaker_cache = cache.template.cache_args["manager"]
|
||||
else:
|
||||
_beaker_cache = beaker_cache.CacheManager()
|
||||
super(BeakerCacheImpl, self).__init__(cache)
|
||||
|
||||
def _get_cache(self, **kw):
|
||||
expiretime = kw.pop('timeout', None)
|
||||
if 'dir' in kw:
|
||||
kw['data_dir'] = kw.pop('dir')
|
||||
expiretime = kw.pop("timeout", None)
|
||||
if "dir" in kw:
|
||||
kw["data_dir"] = kw.pop("dir")
|
||||
elif self.cache.template.module_directory:
|
||||
kw['data_dir'] = self.cache.template.module_directory
|
||||
kw["data_dir"] = self.cache.template.module_directory
|
||||
|
||||
if 'manager' in kw:
|
||||
kw.pop('manager')
|
||||
if "manager" in kw:
|
||||
kw.pop("manager")
|
||||
|
||||
if kw.get('type') == 'memcached':
|
||||
kw['type'] = 'ext:memcached'
|
||||
if kw.get("type") == "memcached":
|
||||
kw["type"] = "ext:memcached"
|
||||
|
||||
if 'region' in kw:
|
||||
region = kw.pop('region')
|
||||
if "region" in kw:
|
||||
region = kw.pop("region")
|
||||
cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw)
|
||||
else:
|
||||
cache = _beaker_cache.get_cache(self.cache.id, **kw)
|
||||
cache_args = {'starttime': self.cache.starttime}
|
||||
cache_args = {"starttime": self.cache.starttime}
|
||||
if expiretime:
|
||||
cache_args['expiretime'] = expiretime
|
||||
cache_args["expiretime"] = expiretime
|
||||
return cache, cache_args
|
||||
|
||||
def get_or_create(self, key, creation_function, **kw):
|
||||
|
|
|
@ -1,4 +1,11 @@
|
|||
# ext/extract.py
|
||||
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import re
|
||||
|
||||
from mako import compat
|
||||
from mako import lexer
|
||||
from mako import parsetree
|
||||
|
@ -7,22 +14,26 @@ from mako import parsetree
|
|||
class MessageExtractor(object):
|
||||
def process_file(self, fileobj):
|
||||
template_node = lexer.Lexer(
|
||||
fileobj.read(),
|
||||
input_encoding=self.config['encoding']).parse()
|
||||
fileobj.read(), input_encoding=self.config["encoding"]
|
||||
).parse()
|
||||
for extracted in self.extract_nodes(template_node.get_children()):
|
||||
yield extracted
|
||||
|
||||
def extract_nodes(self, nodes):
|
||||
translator_comments = []
|
||||
in_translator_comments = False
|
||||
input_encoding = self.config["encoding"] or "ascii"
|
||||
comment_tags = list(
|
||||
filter(None, re.split(r'\s+', self.config['comment-tags'])))
|
||||
filter(None, re.split(r"\s+", self.config["comment-tags"]))
|
||||
)
|
||||
|
||||
for node in nodes:
|
||||
child_nodes = None
|
||||
if in_translator_comments and \
|
||||
isinstance(node, parsetree.Text) and \
|
||||
not node.content.strip():
|
||||
if (
|
||||
in_translator_comments
|
||||
and isinstance(node, parsetree.Text)
|
||||
and not node.content.strip()
|
||||
):
|
||||
# Ignore whitespace within translator comments
|
||||
continue
|
||||
|
||||
|
@ -30,13 +41,15 @@ class MessageExtractor(object):
|
|||
value = node.text.strip()
|
||||
if in_translator_comments:
|
||||
translator_comments.extend(
|
||||
self._split_comment(node.lineno, value))
|
||||
self._split_comment(node.lineno, value)
|
||||
)
|
||||
continue
|
||||
for comment_tag in comment_tags:
|
||||
if value.startswith(comment_tag):
|
||||
in_translator_comments = True
|
||||
translator_comments.extend(
|
||||
self._split_comment(node.lineno, value))
|
||||
self._split_comment(node.lineno, value)
|
||||
)
|
||||
continue
|
||||
|
||||
if isinstance(node, parsetree.DefTag):
|
||||
|
@ -66,22 +79,31 @@ class MessageExtractor(object):
|
|||
else:
|
||||
continue
|
||||
|
||||
# Comments don't apply unless they immediately preceed the message
|
||||
if translator_comments and \
|
||||
translator_comments[-1][0] < node.lineno - 1:
|
||||
# Comments don't apply unless they immediately precede the message
|
||||
if (
|
||||
translator_comments
|
||||
and translator_comments[-1][0] < node.lineno - 1
|
||||
):
|
||||
translator_comments = []
|
||||
|
||||
translator_strings = [
|
||||
comment[1] for comment in translator_comments]
|
||||
comment[1] for comment in translator_comments
|
||||
]
|
||||
|
||||
if isinstance(code, compat.text_type):
|
||||
code = code.encode('ascii', 'backslashreplace')
|
||||
code = code.encode(input_encoding, "backslashreplace")
|
||||
|
||||
used_translator_comments = False
|
||||
code = compat.byte_buffer(code)
|
||||
# We add extra newline to work around a pybabel bug
|
||||
# (see python-babel/babel#274, parse_encoding dies if the first
|
||||
# input string of the input is non-ascii)
|
||||
# Also, because we added it, we have to subtract one from
|
||||
# node.lineno
|
||||
code = compat.byte_buffer(compat.b("\n") + code)
|
||||
|
||||
for message in self.process_python(
|
||||
code, node.lineno, translator_strings):
|
||||
code, node.lineno - 1, translator_strings
|
||||
):
|
||||
yield message
|
||||
used_translator_comments = True
|
||||
|
||||
|
@ -97,5 +119,7 @@ class MessageExtractor(object):
|
|||
def _split_comment(lineno, comment):
|
||||
"""Return the multiline comment at lineno split into a list of
|
||||
comment line numbers and the accompanying comment line"""
|
||||
return [(lineno + index, line) for index, line in
|
||||
enumerate(comment.splitlines())]
|
||||
return [
|
||||
(lineno + index, line)
|
||||
for index, line in enumerate(comment.splitlines())
|
||||
]
|
||||
|
|
|
@ -1,38 +1,57 @@
|
|||
# ext/linguaplugin.py
|
||||
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import io
|
||||
|
||||
from lingua.extractors import Extractor
|
||||
from lingua.extractors import Message
|
||||
from lingua.extractors import get_extractor
|
||||
from mako.ext.extract import MessageExtractor
|
||||
from lingua.extractors import Message
|
||||
|
||||
from mako import compat
|
||||
from mako.ext.extract import MessageExtractor
|
||||
|
||||
|
||||
class LinguaMakoExtractor(Extractor, MessageExtractor):
|
||||
'''Mako templates'''
|
||||
extensions = ['.mako']
|
||||
default_config = {
|
||||
'encoding': 'utf-8',
|
||||
'comment-tags': '',
|
||||
}
|
||||
|
||||
"""Mako templates"""
|
||||
|
||||
extensions = [".mako"]
|
||||
default_config = {"encoding": "utf-8", "comment-tags": ""}
|
||||
|
||||
def __call__(self, filename, options, fileobj=None):
|
||||
self.options = options
|
||||
self.filename = filename
|
||||
self.python_extractor = get_extractor('x.py')
|
||||
self.python_extractor = get_extractor("x.py")
|
||||
if fileobj is None:
|
||||
fileobj = open(filename, 'rb')
|
||||
fileobj = open(filename, "rb")
|
||||
return self.process_file(fileobj)
|
||||
|
||||
def process_python(self, code, code_lineno, translator_strings):
|
||||
source = code.getvalue().strip()
|
||||
if source.endswith(compat.b(':')):
|
||||
source += compat.b(' pass')
|
||||
code = io.BytesIO(source)
|
||||
if source.endswith(compat.b(":")):
|
||||
if source in (
|
||||
compat.b("try:"),
|
||||
compat.b("else:"),
|
||||
) or source.startswith(compat.b("except")):
|
||||
source = compat.b("") # Ignore try/except and else
|
||||
elif source.startswith(compat.b("elif")):
|
||||
source = source[2:] # Replace "elif" with "if"
|
||||
source += compat.b("pass")
|
||||
code = io.BytesIO(source)
|
||||
for msg in self.python_extractor(
|
||||
self.filename, self.options, code, code_lineno):
|
||||
self.filename, self.options, code, code_lineno - 1
|
||||
):
|
||||
if translator_strings:
|
||||
msg = Message(msg.msgctxt, msg.msgid, msg.msgid_plural,
|
||||
msg.flags,
|
||||
compat.u(' ').join(
|
||||
translator_strings + [msg.comment]),
|
||||
msg.tcomment, msg.location)
|
||||
msg = Message(
|
||||
msg.msgctxt,
|
||||
msg.msgid,
|
||||
msg.msgid_plural,
|
||||
msg.flags,
|
||||
compat.u(" ").join(translator_strings + [msg.comment]),
|
||||
msg.tcomment,
|
||||
msg.location,
|
||||
)
|
||||
yield msg
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
# ext/preprocessors.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""preprocessing functions, used with the 'preprocessor'
|
||||
"""preprocessing functions, used with the 'preprocessor'
|
||||
argument on Template, TemplateLookup"""
|
||||
|
||||
import re
|
||||
|
||||
|
||||
def convert_comments(text):
|
||||
"""preprocess old style comments.
|
||||
|
||||
|
||||
example:
|
||||
|
||||
|
||||
from mako.ext.preprocessors import convert_comments
|
||||
t = Template(..., preprocessor=convert_comments)"""
|
||||
return re.sub(r'(?<=\n)\s*#[^#]', "##", text)
|
||||
|
||||
return re.sub(r"(?<=\n)\s*#[^#]", "##", text)
|
||||
|
|
|
@ -1,44 +1,73 @@
|
|||
# ext/pygmentplugin.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from pygments.lexers.web import \
|
||||
HtmlLexer, XmlLexer, JavascriptLexer, CssLexer
|
||||
from pygments.lexers.agile import PythonLexer, Python3Lexer
|
||||
from pygments.lexer import DelegatingLexer, RegexLexer, bygroups, \
|
||||
include, using
|
||||
from pygments.token import \
|
||||
Text, Comment, Operator, Keyword, Name, String, Other
|
||||
from pygments.formatters.html import HtmlFormatter
|
||||
from pygments import highlight
|
||||
from pygments.formatters.html import HtmlFormatter
|
||||
from pygments.lexer import bygroups
|
||||
from pygments.lexer import DelegatingLexer
|
||||
from pygments.lexer import include
|
||||
from pygments.lexer import RegexLexer
|
||||
from pygments.lexer import using
|
||||
from pygments.lexers.agile import Python3Lexer
|
||||
from pygments.lexers.agile import PythonLexer
|
||||
from pygments.lexers.web import CssLexer
|
||||
from pygments.lexers.web import HtmlLexer
|
||||
from pygments.lexers.web import JavascriptLexer
|
||||
from pygments.lexers.web import XmlLexer
|
||||
from pygments.token import Comment
|
||||
from pygments.token import Keyword
|
||||
from pygments.token import Name
|
||||
from pygments.token import Operator
|
||||
from pygments.token import Other
|
||||
from pygments.token import String
|
||||
from pygments.token import Text
|
||||
|
||||
from mako import compat
|
||||
|
||||
|
||||
class MakoLexer(RegexLexer):
|
||||
name = 'Mako'
|
||||
aliases = ['mako']
|
||||
filenames = ['*.mao']
|
||||
name = "Mako"
|
||||
aliases = ["mako"]
|
||||
filenames = ["*.mao"]
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'(\s*)(\%)(\s*end(?:\w+))(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, Keyword, Other)),
|
||||
(r'(\s*)(\%(?!%))([^\n]*)(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
|
||||
(r'(\s*)(##[^\n]*)(\n|\Z)',
|
||||
bygroups(Text, Comment.Preproc, Other)),
|
||||
(r'''(?s)<%doc>.*?</%doc>''', Comment.Preproc),
|
||||
(r'(<%)([\w\.\:]+)',
|
||||
bygroups(Comment.Preproc, Name.Builtin), 'tag'),
|
||||
(r'(</%)([\w\.\:]+)(>)',
|
||||
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
|
||||
(r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'),
|
||||
(r'(<%(?:!?))(.*?)(%>)(?s)',
|
||||
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
|
||||
(r'(\$\{)(.*?)(\})',
|
||||
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
|
||||
(r'''(?sx)
|
||||
"root": [
|
||||
(
|
||||
r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)",
|
||||
bygroups(Text, Comment.Preproc, Keyword, Other),
|
||||
),
|
||||
(
|
||||
r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)",
|
||||
bygroups(Text, Comment.Preproc, using(PythonLexer), Other),
|
||||
),
|
||||
(
|
||||
r"(\s*)(##[^\n]*)(\n|\Z)",
|
||||
bygroups(Text, Comment.Preproc, Other),
|
||||
),
|
||||
(r"""(?s)<%doc>.*?</%doc>""", Comment.Preproc),
|
||||
(
|
||||
r"(<%)([\w\.\:]+)",
|
||||
bygroups(Comment.Preproc, Name.Builtin),
|
||||
"tag",
|
||||
),
|
||||
(
|
||||
r"(</%)([\w\.\:]+)(>)",
|
||||
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc),
|
||||
),
|
||||
(r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"),
|
||||
(
|
||||
r"(?s)(<%(?:!?))(.*?)(%>)",
|
||||
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
|
||||
),
|
||||
(
|
||||
r"(\$\{)(.*?)(\})",
|
||||
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
|
||||
),
|
||||
(
|
||||
r"""(?sx)
|
||||
(.+?) # anything, followed by:
|
||||
(?:
|
||||
(?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line
|
||||
|
@ -51,72 +80,78 @@ class MakoLexer(RegexLexer):
|
|||
(\\\n) | # an escaped newline
|
||||
\Z # end of string
|
||||
)
|
||||
''', bygroups(Other, Operator)),
|
||||
(r'\s+', Text),
|
||||
""",
|
||||
bygroups(Other, Operator),
|
||||
),
|
||||
(r"\s+", Text),
|
||||
],
|
||||
'ondeftags': [
|
||||
(r'<%', Comment.Preproc),
|
||||
(r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
|
||||
include('tag'),
|
||||
"ondeftags": [
|
||||
(r"<%", Comment.Preproc),
|
||||
(r"(?<=<%)(include|inherit|namespace|page)", Name.Builtin),
|
||||
include("tag"),
|
||||
],
|
||||
'tag': [
|
||||
(r'((?:\w+)\s*=)\s*(".*?")',
|
||||
bygroups(Name.Attribute, String)),
|
||||
(r'/?\s*>', Comment.Preproc, '#pop'),
|
||||
(r'\s+', Text),
|
||||
"tag": [
|
||||
(r'((?:\w+)\s*=)\s*(".*?")', bygroups(Name.Attribute, String)),
|
||||
(r"/?\s*>", Comment.Preproc, "#pop"),
|
||||
(r"\s+", Text),
|
||||
],
|
||||
'attr': [
|
||||
('".*?"', String, '#pop'),
|
||||
("'.*?'", String, '#pop'),
|
||||
(r'[^\s>]+', String, '#pop'),
|
||||
"attr": [
|
||||
('".*?"', String, "#pop"),
|
||||
("'.*?'", String, "#pop"),
|
||||
(r"[^\s>]+", String, "#pop"),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class MakoHtmlLexer(DelegatingLexer):
|
||||
name = 'HTML+Mako'
|
||||
aliases = ['html+mako']
|
||||
name = "HTML+Mako"
|
||||
aliases = ["html+mako"]
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
|
||||
**options)
|
||||
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, **options)
|
||||
|
||||
|
||||
class MakoXmlLexer(DelegatingLexer):
|
||||
name = 'XML+Mako'
|
||||
aliases = ['xml+mako']
|
||||
name = "XML+Mako"
|
||||
aliases = ["xml+mako"]
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
|
||||
**options)
|
||||
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, **options)
|
||||
|
||||
|
||||
class MakoJavascriptLexer(DelegatingLexer):
|
||||
name = 'JavaScript+Mako'
|
||||
aliases = ['js+mako', 'javascript+mako']
|
||||
name = "JavaScript+Mako"
|
||||
aliases = ["js+mako", "javascript+mako"]
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
|
||||
MakoLexer, **options)
|
||||
super(MakoJavascriptLexer, self).__init__(
|
||||
JavascriptLexer, MakoLexer, **options
|
||||
)
|
||||
|
||||
|
||||
class MakoCssLexer(DelegatingLexer):
|
||||
name = 'CSS+Mako'
|
||||
aliases = ['css+mako']
|
||||
name = "CSS+Mako"
|
||||
aliases = ["css+mako"]
|
||||
|
||||
def __init__(self, **options):
|
||||
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
|
||||
**options)
|
||||
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, **options)
|
||||
|
||||
|
||||
pygments_html_formatter = HtmlFormatter(cssclass='syntax-highlighted',
|
||||
linenos=True)
|
||||
def syntax_highlight(filename='', language=None):
|
||||
pygments_html_formatter = HtmlFormatter(
|
||||
cssclass="syntax-highlighted", linenos=True
|
||||
)
|
||||
|
||||
|
||||
def syntax_highlight(filename="", language=None):
|
||||
mako_lexer = MakoLexer()
|
||||
if compat.py3k:
|
||||
python_lexer = Python3Lexer()
|
||||
else:
|
||||
python_lexer = PythonLexer()
|
||||
if filename.startswith('memory:') or language == 'mako':
|
||||
return lambda string: highlight(string, mako_lexer,
|
||||
pygments_html_formatter)
|
||||
return lambda string: highlight(string, python_lexer,
|
||||
pygments_html_formatter)
|
||||
|
||||
if filename.startswith("memory:") or language == "mako":
|
||||
return lambda string: highlight(
|
||||
string, mako_lexer, pygments_html_formatter
|
||||
)
|
||||
return lambda string: highlight(
|
||||
string, python_lexer, pygments_html_formatter
|
||||
)
|
||||
|
|
|
@ -1,18 +1,19 @@
|
|||
# ext/turbogears.py
|
||||
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
|
||||
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
|
||||
#
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import inspect
|
||||
from mako import compat
|
||||
from mako.lookup import TemplateLookup
|
||||
from mako.template import Template
|
||||
|
||||
|
||||
class TGPlugin(object):
|
||||
|
||||
"""TurboGears compatible Template Plugin."""
|
||||
|
||||
def __init__(self, extra_vars_func=None, options=None, extension='mak'):
|
||||
def __init__(self, extra_vars_func=None, options=None, extension="mak"):
|
||||
self.extra_vars_func = extra_vars_func
|
||||
self.extension = extension
|
||||
if not options:
|
||||
|
@ -21,16 +22,16 @@ class TGPlugin(object):
|
|||
# Pull the options out and initialize the lookup
|
||||
lookup_options = {}
|
||||
for k, v in options.items():
|
||||
if k.startswith('mako.'):
|
||||
if k.startswith("mako."):
|
||||
lookup_options[k[5:]] = v
|
||||
elif k in ['directories', 'filesystem_checks', 'module_directory']:
|
||||
elif k in ["directories", "filesystem_checks", "module_directory"]:
|
||||
lookup_options[k] = v
|
||||
self.lookup = TemplateLookup(**lookup_options)
|
||||
|
||||
self.tmpl_options = {}
|
||||
# transfer lookup args to template args, based on those available
|
||||
# in getargspec
|
||||
for kw in inspect.getargspec(Template.__init__)[0]:
|
||||
for kw in compat.inspect_getargspec(Template.__init__)[0]:
|
||||
if kw in lookup_options:
|
||||
self.tmpl_options[kw] = lookup_options[kw]
|
||||
|
||||
|
@ -39,14 +40,17 @@ class TGPlugin(object):
|
|||
if template_string is not None:
|
||||
return Template(template_string, **self.tmpl_options)
|
||||
# Translate TG dot notation to normal / template path
|
||||
if '/' not in templatename:
|
||||
templatename = '/' + templatename.replace('.', '/') + '.' +\
|
||||
self.extension
|
||||
if "/" not in templatename:
|
||||
templatename = (
|
||||
"/" + templatename.replace(".", "/") + "." + self.extension
|
||||
)
|
||||
|
||||
# Lookup template
|
||||
return self.lookup.get_template(templatename)
|
||||
|
||||
def render(self, info, format="html", fragment=False, template=None):
|
||||
def render(
|
||||
self, info, format="html", fragment=False, template=None # noqa
|
||||
):
|
||||
if isinstance(template, compat.string_types):
|
||||
template = self.load_template(template)
|
||||
|
||||
|
@ -55,4 +59,3 @@ class TGPlugin(object):
|
|||
info.update(self.extra_vars_func())
|
||||
|
||||
return template.render(**info)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue