Update mako to 1.1.0

This commit is contained in:
JonnyWong16 2019-11-23 18:57:21 -08:00
parent 84ce4758d1
commit f2d7beec90
27 changed files with 2424 additions and 1890 deletions

View file

@ -1,20 +1,19 @@
This is the MIT license: http://www.opensource.org/licenses/mit-license.php
Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>.
Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>.
Mako is a trademark of Michael Bayer.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -42,7 +42,7 @@ Python is a great scripting language. Don't reinvent the wheel...your templates
Documentation
==============
See documentation for Mako at http://www.makotemplates.org/docs/
See documentation for Mako at https://docs.makotemplates.org/en/latest/
License
========

View file

@ -1,8 +1,8 @@
# mako/__init__.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__version__ = '1.0.1'
__version__ = "1.1.0"

View file

@ -1,5 +1,5 @@
# mako/_ast_util.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -8,69 +8,77 @@
ast
~~~
The `ast` module helps Python applications to process trees of the Python
abstract syntax grammar. The abstract syntax itself might change with
each Python release; this module helps to find out programmatically what
the current grammar looks like and allows modifications of it.
An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
a flag to the `compile()` builtin function or by using the `parse()`
function from this module. The result will be a tree of objects whose
classes all inherit from `ast.AST`.
A modified abstract syntax tree can be compiled into a Python code object
using the built-in `compile()` function.
Additionally various helper functions are provided that make working with
the trees simpler. The main intention of the helper functions and this
module in general is to provide an easy to use interface for libraries
that work tightly with the python syntax (template engines for example).
This is a stripped down version of Armin Ronacher's ast module.
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
from _ast import *
from _ast import Add
from _ast import And
from _ast import AST
from _ast import BitAnd
from _ast import BitOr
from _ast import BitXor
from _ast import Div
from _ast import Eq
from _ast import FloorDiv
from _ast import Gt
from _ast import GtE
from _ast import If
from _ast import In
from _ast import Invert
from _ast import Is
from _ast import IsNot
from _ast import LShift
from _ast import Lt
from _ast import LtE
from _ast import Mod
from _ast import Mult
from _ast import Name
from _ast import Not
from _ast import NotEq
from _ast import NotIn
from _ast import Or
from _ast import PyCF_ONLY_AST
from _ast import RShift
from _ast import Sub
from _ast import UAdd
from _ast import USub
from mako.compat import arg_stringname
BOOLOP_SYMBOLS = {
And: 'and',
Or: 'or'
}
BOOLOP_SYMBOLS = {And: "and", Or: "or"}
BINOP_SYMBOLS = {
Add: '+',
Sub: '-',
Mult: '*',
Div: '/',
FloorDiv: '//',
Mod: '%',
LShift: '<<',
RShift: '>>',
BitOr: '|',
BitAnd: '&',
BitXor: '^'
Add: "+",
Sub: "-",
Mult: "*",
Div: "/",
FloorDiv: "//",
Mod: "%",
LShift: "<<",
RShift: ">>",
BitOr: "|",
BitAnd: "&",
BitXor: "^",
}
CMPOP_SYMBOLS = {
Eq: '==',
Gt: '>',
GtE: '>=',
In: 'in',
Is: 'is',
IsNot: 'is not',
Lt: '<',
LtE: '<=',
NotEq: '!=',
NotIn: 'not in'
Eq: "==",
Gt: ">",
GtE: ">=",
In: "in",
Is: "is",
IsNot: "is not",
Lt: "<",
LtE: "<=",
NotEq: "!=",
NotIn: "not in",
}
UNARYOP_SYMBOLS = {
Invert: '~',
Not: 'not',
UAdd: '+',
USub: '-'
}
UNARYOP_SYMBOLS = {Invert: "~", Not: "not", UAdd: "+", USub: "-"}
ALL_SYMBOLS = {}
ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
@ -79,105 +87,15 @@ ALL_SYMBOLS.update(CMPOP_SYMBOLS)
ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
def parse(expr, filename='<unknown>', mode='exec'):
def parse(expr, filename="<unknown>", mode="exec"):
"""Parse an expression into an AST node."""
return compile(expr, filename, mode, PyCF_ONLY_AST)
def to_source(node, indent_with=' ' * 4):
"""
This function can convert a node tree back into python sourcecode. This
is useful for debugging purposes, especially if you're dealing with custom
asts not generated by python itself.
It could be that the sourcecode is evaluable when the AST itself is not
compilable / evaluable. The reason for this is that the AST contains some
more data than regular sourcecode does, which is dropped during
conversion.
Each level of indentation is replaced with `indent_with`. Per default this
parameter is equal to four spaces as suggested by PEP 8, but it might be
adjusted to match the application's styleguide.
"""
generator = SourceGenerator(indent_with)
generator.visit(node)
return ''.join(generator.result)
def dump(node):
"""
A very verbose representation of the node passed. This is useful for
debugging purposes.
"""
def _format(node):
if isinstance(node, AST):
return '%s(%s)' % (node.__class__.__name__,
', '.join('%s=%s' % (a, _format(b))
for a, b in iter_fields(node)))
elif isinstance(node, list):
return '[%s]' % ', '.join(_format(x) for x in node)
return repr(node)
if not isinstance(node, AST):
raise TypeError('expected AST, got %r' % node.__class__.__name__)
return _format(node)
def copy_location(new_node, old_node):
"""
Copy the source location hint (`lineno` and `col_offset`) from the
old to the new node if possible and return the new one.
"""
for attr in 'lineno', 'col_offset':
if attr in old_node._attributes and attr in new_node._attributes \
and hasattr(old_node, attr):
setattr(new_node, attr, getattr(old_node, attr))
return new_node
def fix_missing_locations(node):
"""
Some nodes require a line number and the column offset. Without that
information the compiler will abort the compilation. Because it can be
a dull task to add appropriate line numbers and column offsets when
adding new nodes this function can help. It copies the line number and
column offset of the parent node to the child nodes without this
information.
Unlike `copy_location` this works recursive and won't touch nodes that
already have a location information.
"""
def _fix(node, lineno, col_offset):
if 'lineno' in node._attributes:
if not hasattr(node, 'lineno'):
node.lineno = lineno
else:
lineno = node.lineno
if 'col_offset' in node._attributes:
if not hasattr(node, 'col_offset'):
node.col_offset = col_offset
else:
col_offset = node.col_offset
for child in iter_child_nodes(node):
_fix(child, lineno, col_offset)
_fix(node, 1, 0)
return node
def increment_lineno(node, n=1):
"""
Increment the line numbers of all nodes by `n` if they have line number
attributes. This is useful to "move code" to a different location in a
file.
"""
for node in zip((node,), walk(node)):
if 'lineno' in node._attributes:
node.lineno = getattr(node, 'lineno', 0) + n
def iter_fields(node):
"""Iterate over all fields of a node, only yielding existing fields."""
# CPython 2.5 compat
if not hasattr(node, '_fields') or not node._fields:
if not hasattr(node, "_fields") or not node._fields:
return
for field in node._fields:
try:
@ -186,66 +104,8 @@ def iter_fields(node):
pass
def get_fields(node):
"""Like `iter_fiels` but returns a dict."""
return dict(iter_fields(node))
def iter_child_nodes(node):
"""Iterate over all child nodes or a node."""
for name, field in iter_fields(node):
if isinstance(field, AST):
yield field
elif isinstance(field, list):
for item in field:
if isinstance(item, AST):
yield item
def get_child_nodes(node):
"""Like `iter_child_nodes` but returns a list."""
return list(iter_child_nodes(node))
def get_compile_mode(node):
"""
Get the mode for `compile` of a given node. If the node is not a `mod`
node (`Expression`, `Module` etc.) a `TypeError` is thrown.
"""
if not isinstance(node, mod):
raise TypeError('expected mod node, got %r' % node.__class__.__name__)
return {
Expression: 'eval',
Interactive: 'single'
}.get(node.__class__, 'expr')
def get_docstring(node):
"""
Return the docstring for the given node or `None` if no docstring can be
found. If the node provided does not accept docstrings a `TypeError`
will be raised.
"""
if not isinstance(node, (FunctionDef, ClassDef, Module)):
raise TypeError("%r can't have docstrings" % node.__class__.__name__)
if node.body and isinstance(node.body[0], Str):
return node.body[0].s
def walk(node):
"""
Iterate over all nodes. This is useful if you only want to modify nodes in
place and don't care about the context or the order the nodes are returned.
"""
from collections import deque
todo = deque([node])
while todo:
node = todo.popleft()
todo.extend(iter_child_nodes(node))
yield node
class NodeVisitor(object):
"""
Walks the abstract syntax tree and call visitor functions for every node
found. The visitor functions may return values which will be forwarded
@ -268,7 +128,7 @@ class NodeVisitor(object):
exists for this node. In that case the generic visit function is
used instead.
"""
method = 'visit_' + node.__class__.__name__
method = "visit_" + node.__class__.__name__
return getattr(self, method, None)
def visit(self, node):
@ -290,6 +150,7 @@ class NodeVisitor(object):
class NodeTransformer(NodeVisitor):
"""
Walks the abstract syntax tree and allows modifications of nodes.
@ -349,6 +210,7 @@ class NodeTransformer(NodeVisitor):
class SourceGenerator(NodeVisitor):
"""
This visitor is able to transform a well formed syntax tree into python
sourcecode. For more details have a look at the docstring of the
@ -364,7 +226,7 @@ class SourceGenerator(NodeVisitor):
def write(self, x):
if self.new_lines:
if self.result:
self.result.append('\n' * self.new_lines)
self.result.append("\n" * self.new_lines)
self.result.append(self.indent_with * self.indentation)
self.new_lines = 0
self.result.append(x)
@ -383,14 +245,15 @@ class SourceGenerator(NodeVisitor):
self.body(node.body)
if node.orelse:
self.newline()
self.write('else:')
self.write("else:")
self.body(node.orelse)
def signature(self, node):
want_comma = []
def write_comma():
if want_comma:
self.write(', ')
self.write(", ")
else:
want_comma.append(True)
@ -399,19 +262,19 @@ class SourceGenerator(NodeVisitor):
write_comma()
self.visit(arg)
if default is not None:
self.write('=')
self.write("=")
self.visit(default)
if node.vararg is not None:
write_comma()
self.write('*' + arg_stringname(node.vararg))
self.write("*" + arg_stringname(node.vararg))
if node.kwarg is not None:
write_comma()
self.write('**' + arg_stringname(node.kwarg))
self.write("**" + arg_stringname(node.kwarg))
def decorators(self, node):
for decorator in node.decorator_list:
self.newline()
self.write('@')
self.write("@")
self.visit(decorator)
# Statements
@ -420,29 +283,29 @@ class SourceGenerator(NodeVisitor):
self.newline()
for idx, target in enumerate(node.targets):
if idx:
self.write(', ')
self.write(", ")
self.visit(target)
self.write(' = ')
self.write(" = ")
self.visit(node.value)
def visit_AugAssign(self, node):
self.newline()
self.visit(node.target)
self.write(BINOP_SYMBOLS[type(node.op)] + '=')
self.write(BINOP_SYMBOLS[type(node.op)] + "=")
self.visit(node.value)
def visit_ImportFrom(self, node):
self.newline()
self.write('from %s%s import ' % ('.' * node.level, node.module))
self.write("from %s%s import " % ("." * node.level, node.module))
for idx, item in enumerate(node.names):
if idx:
self.write(', ')
self.write(", ")
self.write(item)
def visit_Import(self, node):
self.newline()
for item in node.names:
self.write('import ')
self.write("import ")
self.visit(item)
def visit_Expr(self, node):
@ -453,208 +316,210 @@ class SourceGenerator(NodeVisitor):
self.newline(n=2)
self.decorators(node)
self.newline()
self.write('def %s(' % node.name)
self.write("def %s(" % node.name)
self.signature(node.args)
self.write('):')
self.write("):")
self.body(node.body)
def visit_ClassDef(self, node):
have_args = []
def paren_or_comma():
if have_args:
self.write(', ')
self.write(", ")
else:
have_args.append(True)
self.write('(')
self.write("(")
self.newline(n=3)
self.decorators(node)
self.newline()
self.write('class %s' % node.name)
self.write("class %s" % node.name)
for base in node.bases:
paren_or_comma()
self.visit(base)
# XXX: the if here is used to keep this module compatible
# with python 2.6.
if hasattr(node, 'keywords'):
if hasattr(node, "keywords"):
for keyword in node.keywords:
paren_or_comma()
self.write(keyword.arg + '=')
self.write(keyword.arg + "=")
self.visit(keyword.value)
if node.starargs is not None:
if getattr(node, "starargs", None):
paren_or_comma()
self.write('*')
self.write("*")
self.visit(node.starargs)
if node.kwargs is not None:
if getattr(node, "kwargs", None):
paren_or_comma()
self.write('**')
self.write("**")
self.visit(node.kwargs)
self.write(have_args and '):' or ':')
self.write(have_args and "):" or ":")
self.body(node.body)
def visit_If(self, node):
self.newline()
self.write('if ')
self.write("if ")
self.visit(node.test)
self.write(':')
self.write(":")
self.body(node.body)
while True:
else_ = node.orelse
if len(else_) == 1 and isinstance(else_[0], If):
node = else_[0]
self.newline()
self.write('elif ')
self.write("elif ")
self.visit(node.test)
self.write(':')
self.write(":")
self.body(node.body)
else:
self.newline()
self.write('else:')
self.write("else:")
self.body(else_)
break
def visit_For(self, node):
self.newline()
self.write('for ')
self.write("for ")
self.visit(node.target)
self.write(' in ')
self.write(" in ")
self.visit(node.iter)
self.write(':')
self.write(":")
self.body_or_else(node)
def visit_While(self, node):
self.newline()
self.write('while ')
self.write("while ")
self.visit(node.test)
self.write(':')
self.write(":")
self.body_or_else(node)
def visit_With(self, node):
self.newline()
self.write('with ')
self.write("with ")
self.visit(node.context_expr)
if node.optional_vars is not None:
self.write(' as ')
self.write(" as ")
self.visit(node.optional_vars)
self.write(':')
self.write(":")
self.body(node.body)
def visit_Pass(self, node):
self.newline()
self.write('pass')
self.write("pass")
def visit_Print(self, node):
# XXX: python 2.6 only
self.newline()
self.write('print ')
self.write("print ")
want_comma = False
if node.dest is not None:
self.write(' >> ')
self.write(" >> ")
self.visit(node.dest)
want_comma = True
for value in node.values:
if want_comma:
self.write(', ')
self.write(", ")
self.visit(value)
want_comma = True
if not node.nl:
self.write(',')
self.write(",")
def visit_Delete(self, node):
self.newline()
self.write('del ')
self.write("del ")
for idx, target in enumerate(node):
if idx:
self.write(', ')
self.write(", ")
self.visit(target)
def visit_TryExcept(self, node):
self.newline()
self.write('try:')
self.write("try:")
self.body(node.body)
for handler in node.handlers:
self.visit(handler)
def visit_TryFinally(self, node):
self.newline()
self.write('try:')
self.write("try:")
self.body(node.body)
self.newline()
self.write('finally:')
self.write("finally:")
self.body(node.finalbody)
def visit_Global(self, node):
self.newline()
self.write('global ' + ', '.join(node.names))
self.write("global " + ", ".join(node.names))
def visit_Nonlocal(self, node):
self.newline()
self.write('nonlocal ' + ', '.join(node.names))
self.write("nonlocal " + ", ".join(node.names))
def visit_Return(self, node):
self.newline()
self.write('return ')
self.write("return ")
self.visit(node.value)
def visit_Break(self, node):
self.newline()
self.write('break')
self.write("break")
def visit_Continue(self, node):
self.newline()
self.write('continue')
self.write("continue")
def visit_Raise(self, node):
# XXX: Python 2.6 / 3.0 compatibility
self.newline()
self.write('raise')
if hasattr(node, 'exc') and node.exc is not None:
self.write(' ')
self.write("raise")
if hasattr(node, "exc") and node.exc is not None:
self.write(" ")
self.visit(node.exc)
if node.cause is not None:
self.write(' from ')
self.write(" from ")
self.visit(node.cause)
elif hasattr(node, 'type') and node.type is not None:
elif hasattr(node, "type") and node.type is not None:
self.visit(node.type)
if node.inst is not None:
self.write(', ')
self.write(", ")
self.visit(node.inst)
if node.tback is not None:
self.write(', ')
self.write(", ")
self.visit(node.tback)
# Expressions
def visit_Attribute(self, node):
self.visit(node.value)
self.write('.' + node.attr)
self.write("." + node.attr)
def visit_Call(self, node):
want_comma = []
def write_comma():
if want_comma:
self.write(', ')
self.write(", ")
else:
want_comma.append(True)
self.visit(node.func)
self.write('(')
self.write("(")
for arg in node.args:
write_comma()
self.visit(arg)
for keyword in node.keywords:
write_comma()
self.write(keyword.arg + '=')
self.write(keyword.arg + "=")
self.visit(keyword.value)
if node.starargs is not None:
if getattr(node, "starargs", None):
write_comma()
self.write('*')
self.write("*")
self.visit(node.starargs)
if node.kwargs is not None:
if getattr(node, "kwargs", None):
write_comma()
self.write('**')
self.write("**")
self.visit(node.kwargs)
self.write(')')
self.write(")")
def visit_Name(self, node):
self.write(node.id)
@ -674,106 +539,111 @@ class SourceGenerator(NodeVisitor):
def visit_Num(self, node):
self.write(repr(node.n))
# newly needed in Python 3.8
def visit_Constant(self, node):
self.write(repr(node.value))
def visit_Tuple(self, node):
self.write('(')
self.write("(")
idx = -1
for idx, item in enumerate(node.elts):
if idx:
self.write(', ')
self.write(", ")
self.visit(item)
self.write(idx and ')' or ',)')
self.write(idx and ")" or ",)")
def sequence_visit(left, right):
def visit(self, node):
self.write(left)
for idx, item in enumerate(node.elts):
if idx:
self.write(', ')
self.write(", ")
self.visit(item)
self.write(right)
return visit
visit_List = sequence_visit('[', ']')
visit_Set = sequence_visit('{', '}')
visit_List = sequence_visit("[", "]")
visit_Set = sequence_visit("{", "}")
del sequence_visit
def visit_Dict(self, node):
self.write('{')
self.write("{")
for idx, (key, value) in enumerate(zip(node.keys, node.values)):
if idx:
self.write(', ')
self.write(", ")
self.visit(key)
self.write(': ')
self.write(": ")
self.visit(value)
self.write('}')
self.write("}")
def visit_BinOp(self, node):
self.write('(')
self.write("(")
self.visit(node.left)
self.write(' %s ' % BINOP_SYMBOLS[type(node.op)])
self.write(" %s " % BINOP_SYMBOLS[type(node.op)])
self.visit(node.right)
self.write(')')
self.write(")")
def visit_BoolOp(self, node):
self.write('(')
self.write("(")
for idx, value in enumerate(node.values):
if idx:
self.write(' %s ' % BOOLOP_SYMBOLS[type(node.op)])
self.write(" %s " % BOOLOP_SYMBOLS[type(node.op)])
self.visit(value)
self.write(')')
self.write(")")
def visit_Compare(self, node):
self.write('(')
self.write("(")
self.visit(node.left)
for op, right in zip(node.ops, node.comparators):
self.write(' %s ' % CMPOP_SYMBOLS[type(op)])
self.write(" %s " % CMPOP_SYMBOLS[type(op)])
self.visit(right)
self.write(')')
self.write(")")
def visit_UnaryOp(self, node):
self.write('(')
self.write("(")
op = UNARYOP_SYMBOLS[type(node.op)]
self.write(op)
if op == 'not':
self.write(' ')
if op == "not":
self.write(" ")
self.visit(node.operand)
self.write(')')
self.write(")")
def visit_Subscript(self, node):
self.visit(node.value)
self.write('[')
self.write("[")
self.visit(node.slice)
self.write(']')
self.write("]")
def visit_Slice(self, node):
if node.lower is not None:
self.visit(node.lower)
self.write(':')
self.write(":")
if node.upper is not None:
self.visit(node.upper)
if node.step is not None:
self.write(':')
if not (isinstance(node.step, Name) and node.step.id == 'None'):
self.write(":")
if not (isinstance(node.step, Name) and node.step.id == "None"):
self.visit(node.step)
def visit_ExtSlice(self, node):
for idx, item in node.dims:
if idx:
self.write(', ')
self.write(", ")
self.visit(item)
def visit_Yield(self, node):
self.write('yield ')
self.write("yield ")
self.visit(node.value)
def visit_Lambda(self, node):
self.write('lambda ')
self.write("lambda ")
self.signature(node.args)
self.write(': ')
self.write(": ")
self.visit(node.body)
def visit_Ellipsis(self, node):
self.write('Ellipsis')
self.write("Ellipsis")
def generator_visit(left, right):
def visit(self, node):
@ -782,64 +652,65 @@ class SourceGenerator(NodeVisitor):
for comprehension in node.generators:
self.visit(comprehension)
self.write(right)
return visit
visit_ListComp = generator_visit('[', ']')
visit_GeneratorExp = generator_visit('(', ')')
visit_SetComp = generator_visit('{', '}')
visit_ListComp = generator_visit("[", "]")
visit_GeneratorExp = generator_visit("(", ")")
visit_SetComp = generator_visit("{", "}")
del generator_visit
def visit_DictComp(self, node):
self.write('{')
self.write("{")
self.visit(node.key)
self.write(': ')
self.write(": ")
self.visit(node.value)
for comprehension in node.generators:
self.visit(comprehension)
self.write('}')
self.write("}")
def visit_IfExp(self, node):
self.visit(node.body)
self.write(' if ')
self.write(" if ")
self.visit(node.test)
self.write(' else ')
self.write(" else ")
self.visit(node.orelse)
def visit_Starred(self, node):
self.write('*')
self.write("*")
self.visit(node.value)
def visit_Repr(self, node):
# XXX: python 2.6 only
self.write('`')
self.write("`")
self.visit(node.value)
self.write('`')
self.write("`")
# Helper Nodes
def visit_alias(self, node):
self.write(node.name)
if node.asname is not None:
self.write(' as ' + node.asname)
self.write(" as " + node.asname)
def visit_comprehension(self, node):
self.write(' for ')
self.write(" for ")
self.visit(node.target)
self.write(' in ')
self.write(" in ")
self.visit(node.iter)
if node.ifs:
for if_ in node.ifs:
self.write(' if ')
self.write(" if ")
self.visit(if_)
def visit_excepthandler(self, node):
self.newline()
self.write('except')
self.write("except")
if node.type is not None:
self.write(' ')
self.write(" ")
self.visit(node.type)
if node.name is not None:
self.write(' as ')
self.write(" as ")
self.visit(node.name)
self.write(':')
self.write(":")
self.body(node.body)

View file

@ -1,5 +1,5 @@
# mako/ast.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -7,11 +7,17 @@
"""utilities for analyzing expressions and blocks of Python
code, as well as generating Python from AST nodes"""
from mako import exceptions, pyparser, compat
import re
from mako import compat
from mako import exceptions
from mako import pyparser
class PythonCode(object):
"""represents information about a string containing Python code"""
def __init__(self, code, **exception_kwargs):
self.code = code
@ -41,8 +47,11 @@ class PythonCode(object):
f = pyparser.FindIdentifiers(self, **exception_kwargs)
f.visit(expr)
class ArgumentList(object):
"""parses a fragment of code as a comma-separated list of expressions"""
def __init__(self, code, **exception_kwargs):
self.codeargs = []
self.args = []
@ -60,58 +69,69 @@ class ArgumentList(object):
f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)
f.visit(expr)
class PythonFragment(PythonCode):
"""extends PythonCode to provide identifier lookups in partial control
statements
e.g.
e.g.::
for x in 5:
elif y==9:
except (MyException, e):
etc.
"""
def __init__(self, code, **exception_kwargs):
m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S)
m = re.match(r"^(\w+)(?:\s+(.*?))?:\s*(#|$)", code.strip(), re.S)
if not m:
raise exceptions.CompileException(
"Fragment '%s' is not a partial control statement" %
code, **exception_kwargs)
"Fragment '%s' is not a partial control statement" % code,
**exception_kwargs
)
if m.group(3):
code = code[:m.start(3)]
(keyword, expr) = m.group(1,2)
if keyword in ['for','if', 'while']:
code = code[: m.start(3)]
(keyword, expr) = m.group(1, 2)
if keyword in ["for", "if", "while"]:
code = code + "pass"
elif keyword == 'try':
elif keyword == "try":
code = code + "pass\nexcept:pass"
elif keyword == 'elif' or keyword == 'else':
elif keyword == "elif" or keyword == "else":
code = "if False:pass\n" + code + "pass"
elif keyword == 'except':
elif keyword == "except":
code = "try:pass\n" + code + "pass"
elif keyword == 'with':
elif keyword == "with":
code = code + "pass"
else:
raise exceptions.CompileException(
"Unsupported control keyword: '%s'" %
keyword, **exception_kwargs)
"Unsupported control keyword: '%s'" % keyword,
**exception_kwargs
)
super(PythonFragment, self).__init__(code, **exception_kwargs)
class FunctionDecl(object):
"""function declaration"""
def __init__(self, code, allow_kwargs=True, **exception_kwargs):
self.code = code
expr = pyparser.parse(code, "exec", **exception_kwargs)
f = pyparser.ParseFunc(self, **exception_kwargs)
f.visit(expr)
if not hasattr(self, 'funcname'):
if not hasattr(self, "funcname"):
raise exceptions.CompileException(
"Code '%s' is not a function declaration" % code,
**exception_kwargs)
**exception_kwargs
)
if not allow_kwargs and self.kwargs:
raise exceptions.CompileException(
"'**%s' keyword argument not allowed here" %
self.kwargnames[-1], **exception_kwargs)
"'**%s' keyword argument not allowed here"
% self.kwargnames[-1],
**exception_kwargs
)
def get_argument_expressions(self, as_call=False):
"""Return the argument declarations of this FunctionDecl as a printable
@ -146,8 +166,10 @@ class FunctionDecl(object):
# `def foo(*, a=1, b, c=3)`
namedecls.append(name)
else:
namedecls.append("%s=%s" % (
name, pyparser.ExpressionGenerator(default).value()))
namedecls.append(
"%s=%s"
% (name, pyparser.ExpressionGenerator(default).value())
)
else:
namedecls.append(name)
@ -160,8 +182,10 @@ class FunctionDecl(object):
namedecls.append(name)
else:
default = defaults.pop(0)
namedecls.append("%s=%s" % (
name, pyparser.ExpressionGenerator(default).value()))
namedecls.append(
"%s=%s"
% (name, pyparser.ExpressionGenerator(default).value())
)
namedecls.reverse()
return namedecls
@ -170,9 +194,12 @@ class FunctionDecl(object):
def allargnames(self):
return tuple(self.argnames) + tuple(self.kwargnames)
class FunctionArgs(FunctionDecl):
"""the argument portion of a function declaration"""
def __init__(self, code, **kwargs):
super(FunctionArgs, self).__init__("def ANON(%s):pass" % code,
**kwargs)
super(FunctionArgs, self).__init__(
"def ANON(%s):pass" % code, **kwargs
)

View file

@ -1,10 +1,11 @@
# mako/cache.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from mako import compat, util
from mako import compat
from mako import util
_cache_plugins = util.PluginLoader("mako.cache")
@ -13,6 +14,7 @@ register_plugin("beaker", "mako.ext.beaker_cache", "BeakerCacheImpl")
class Cache(object):
"""Represents a data content cache made available to the module
space of a specific :class:`.Template` object.
@ -89,12 +91,11 @@ class Cache(object):
return creation_function()
return self.impl.get_or_create(
key,
creation_function,
**self._get_cache_kw(kw, context))
key, creation_function, **self._get_cache_kw(kw, context)
)
def set(self, key, value, **kw):
"""Place a value in the cache.
r"""Place a value in the cache.
:param key: the value's key.
:param value: the value.
@ -112,7 +113,7 @@ class Cache(object):
"""
def get(self, key, **kw):
"""Retrieve a value from the cache.
r"""Retrieve a value from the cache.
:param key: the value's key.
:param \**kw: cache configuration arguments. The
@ -124,7 +125,7 @@ class Cache(object):
return self.impl.get(key, **self._get_cache_kw(kw, None))
def invalidate(self, key, **kw):
"""Invalidate a value in the cache.
r"""Invalidate a value in the cache.
:param key: the value's key.
:param \**kw: cache configuration arguments. The
@ -140,7 +141,7 @@ class Cache(object):
template.
"""
self.invalidate('render_body', __M_defname='render_body')
self.invalidate("render_body", __M_defname="render_body")
def invalidate_def(self, name):
"""Invalidate the cached content of a particular ``<%def>`` within this
@ -148,7 +149,7 @@ class Cache(object):
"""
self.invalidate('render_%s' % name, __M_defname='render_%s' % name)
self.invalidate("render_%s" % name, __M_defname="render_%s" % name)
def invalidate_closure(self, name):
"""Invalidate a nested ``<%def>`` within this template.
@ -164,7 +165,7 @@ class Cache(object):
self.invalidate(name, __M_defname=name)
def _get_cache_kw(self, kw, context):
defname = kw.pop('__M_defname', None)
defname = kw.pop("__M_defname", None)
if not defname:
tmpl_kw = self.template.cache_args.copy()
tmpl_kw.update(kw)
@ -176,11 +177,12 @@ class Cache(object):
self._def_regions[defname] = tmpl_kw
if context and self.impl.pass_context:
tmpl_kw = tmpl_kw.copy()
tmpl_kw.setdefault('context', context)
tmpl_kw.setdefault("context", context)
return tmpl_kw
class CacheImpl(object):
"""Provide a cache implementation for use by :class:`.Cache`."""
def __init__(self, cache):
@ -192,7 +194,7 @@ class CacheImpl(object):
"""
def get_or_create(self, key, creation_function, **kw):
"""Retrieve a value from the cache, using the given creation function
r"""Retrieve a value from the cache, using the given creation function
to generate a new value.
This function *must* return a value, either from
@ -210,7 +212,7 @@ class CacheImpl(object):
raise NotImplementedError()
def set(self, key, value, **kw):
"""Place a value in the cache.
r"""Place a value in the cache.
:param key: the value's key.
:param value: the value.
@ -220,7 +222,7 @@ class CacheImpl(object):
raise NotImplementedError()
def get(self, key, **kw):
"""Retrieve a value from the cache.
r"""Retrieve a value from the cache.
:param key: the value's key.
:param \**kw: cache configuration arguments.
@ -229,7 +231,7 @@ class CacheImpl(object):
raise NotImplementedError()
def invalidate(self, key, **kw):
"""Invalidate a value in the cache.
r"""Invalidate a value in the cache.
:param key: the value's key.
:param \**kw: cache configuration arguments.

View file

@ -1,43 +1,66 @@
# mako/cmd.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from argparse import ArgumentParser
from os.path import isfile, dirname
from os.path import dirname
from os.path import isfile
import sys
from mako.template import Template
from mako.lookup import TemplateLookup
from mako import exceptions
from mako.lookup import TemplateLookup
from mako.template import Template
def varsplit(var):
if "=" not in var:
return (var, "")
return var.split("=", 1)
def _exit():
sys.stderr.write(exceptions.text_error_template().render())
sys.exit(1)
def cmdline(argv=None):
parser = ArgumentParser("usage: %prog [FILENAME]")
parser.add_argument("--var", default=[], action="append",
help="variable (can be used multiple times, use name=value)")
parser.add_argument("--template-dir", default=[], action="append",
parser = ArgumentParser()
parser.add_argument(
"--var",
default=[],
action="append",
help="variable (can be used multiple times, use name=value)",
)
parser.add_argument(
"--template-dir",
default=[],
action="append",
help="Directory to use for template lookup (multiple "
"directories may be provided). If not given then if the "
"template is read from stdin, the value defaults to be "
"the current directory, otherwise it defaults to be the "
"parent directory of the file provided.")
parser.add_argument('input', nargs='?', default='-')
"parent directory of the file provided.",
)
parser.add_argument(
"--output-encoding", default=None, help="force output encoding"
)
parser.add_argument("input", nargs="?", default="-")
options = parser.parse_args(argv)
if options.input == '-':
output_encoding = options.output_encoding
if options.input == "-":
lookup_dirs = options.template_dir or ["."]
lookup = TemplateLookup(lookup_dirs)
try:
template = Template(sys.stdin.read(), lookup=lookup)
template = Template(
sys.stdin.read(),
lookup=lookup,
output_encoding=output_encoding,
)
except:
_exit()
else:
@ -47,13 +70,17 @@ def cmdline(argv=None):
lookup_dirs = options.template_dir or [dirname(filename)]
lookup = TemplateLookup(lookup_dirs)
try:
template = Template(filename=filename, lookup=lookup)
template = Template(
filename=filename,
lookup=lookup,
output_encoding=output_encoding,
)
except:
_exit()
kw = dict([varsplit(var) for var in options.var])
try:
print(template.render(**kw))
sys.stdout.write(template.render(**kw))
except:
_exit()

File diff suppressed because it is too large Load diff

View file

@ -1,20 +1,61 @@
# mako/compat.py
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import collections
import inspect
import sys
import time
py3k = sys.version_info >= (3, 0)
py33 = sys.version_info >= (3, 3)
py2k = sys.version_info < (3,)
py26 = sys.version_info >= (2, 6)
jython = sys.platform.startswith('java')
win32 = sys.platform.startswith('win')
pypy = hasattr(sys, 'pypy_version_info')
py27 = sys.version_info >= (2, 7)
jython = sys.platform.startswith("java")
win32 = sys.platform.startswith("win")
pypy = hasattr(sys, "pypy_version_info")
ArgSpec = collections.namedtuple(
"ArgSpec", ["args", "varargs", "keywords", "defaults"]
)
def inspect_getargspec(func):
"""getargspec based on fully vendored getfullargspec from Python 3.3."""
if inspect.ismethod(func):
func = func.__func__
if not inspect.isfunction(func):
raise TypeError("{!r} is not a Python function".format(func))
co = func.__code__
if not inspect.iscode(co):
raise TypeError("{!r} is not a code object".format(co))
nargs = co.co_argcount
names = co.co_varnames
nkwargs = co.co_kwonlyargcount if py3k else 0
args = list(names[:nargs])
nargs += nkwargs
varargs = None
if co.co_flags & inspect.CO_VARARGS:
varargs = co.co_varnames[nargs]
nargs = nargs + 1
varkw = None
if co.co_flags & inspect.CO_VARKEYWORDS:
varkw = co.co_varnames[nargs]
return ArgSpec(args, varargs, varkw, func.__defaults__)
if py3k:
from io import StringIO
import builtins as compat_builtins
from urllib.parse import quote_plus, unquote_plus
from html.entities import codepoint2name, name2codepoint
string_types = str,
string_types = (str,)
binary_type = bytes
text_type = str
@ -29,8 +70,10 @@ if py3k:
def octal(lit):
return eval("0o" + lit)
else:
import __builtin__ as compat_builtins
import __builtin__ as compat_builtins # noqa
try:
from cStringIO import StringIO
except:
@ -38,14 +81,15 @@ else:
byte_buffer = StringIO
from urllib import quote_plus, unquote_plus
from htmlentitydefs import codepoint2name, name2codepoint
string_types = basestring,
from urllib import quote_plus, unquote_plus # noqa
from htmlentitydefs import codepoint2name, name2codepoint # noqa
string_types = (basestring,) # noqa
binary_type = str
text_type = unicode
text_type = unicode # noqa
def u(s):
return unicode(s, "utf-8")
return unicode(s, "utf-8") # noqa
def b(s):
return s
@ -54,14 +98,18 @@ else:
return eval("0" + lit)
if py33:
if py3k:
from importlib import machinery
def load_module(module_id, path):
return machinery.SourceFileLoader(module_id, path).load_module()
else:
import imp
def load_module(module_id, path):
fp = open(path, 'rb')
fp = open(path, "rb")
try:
return imp.load_source(module_id, path, fp)
finally:
@ -69,90 +117,32 @@ else:
if py3k:
def reraise(tp, value, tb=None, cause=None):
if cause is not None:
value.__cause__ = cause
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
exec("def reraise(tp, value, tb=None, cause=None):\n"
" raise tp, value, tb\n")
exec(
"def reraise(tp, value, tb=None, cause=None):\n"
" raise tp, value, tb\n"
)
def exception_as():
return sys.exc_info()[1]
try:
import threading
if py3k:
import _thread as thread
else:
import thread
except ImportError:
import dummy_threading as threading
if py3k:
import _dummy_thread as thread
else:
import dummy_thread as thread
if win32 or jython:
time_func = time.clock
else:
time_func = time.time
all = all # noqa
try:
from functools import partial
except:
def partial(func, *args, **keywords):
def newfunc(*fargs, **fkeywords):
newkeywords = keywords.copy()
newkeywords.update(fkeywords)
return func(*(args + fargs), **newkeywords)
return newfunc
all = all
import json
def exception_name(exc):
return exc.__class__.__name__
try:
from inspect import CO_VARKEYWORDS, CO_VARARGS
def inspect_func_args(fn):
if py3k:
co = fn.__code__
else:
co = fn.func_code
nargs = co.co_argcount
names = co.co_varnames
args = list(names[:nargs])
varargs = None
if co.co_flags & CO_VARARGS:
varargs = co.co_varnames[nargs]
nargs = nargs + 1
varkw = None
if co.co_flags & CO_VARKEYWORDS:
varkw = co.co_varnames[nargs]
if py3k:
return args, varargs, varkw, fn.__defaults__
else:
return args, varargs, varkw, fn.func_defaults
except ImportError:
import inspect
def inspect_func_args(fn):
return inspect.getargspec(fn)
if py3k:
def callable(fn):
return hasattr(fn, '__call__')
else:
callable = callable
################################################
# cross-compatible metaclass implementation
@ -160,6 +150,8 @@ else:
def with_metaclass(meta, base=object):
"""Create a base class with a metaclass."""
return meta("%sBase" % meta.__name__, (base,), {})
################################################
@ -168,7 +160,7 @@ def arg_stringname(func_arg):
In Python3.4 a function's args are
of _ast.arg type not _ast.name
"""
if hasattr(func_arg, 'arg'):
if hasattr(func_arg, "arg"):
return func_arg.arg
else:
return str(func_arg)

View file

@ -1,21 +1,26 @@
# mako/exceptions.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""exception classes"""
import traceback
import sys
from mako import util, compat
import traceback
from mako import compat
from mako import util
class MakoException(Exception):
pass
class RuntimeException(MakoException):
pass
def _format_filepos(lineno, pos, filename):
if filename is None:
return " at line: %d char: %d" % (lineno, pos)
@ -25,41 +30,53 @@ def _format_filepos(lineno, pos, filename):
class CompileException(MakoException):
def __init__(self, message, source, lineno, pos, filename):
MakoException.__init__(self,
message + _format_filepos(lineno, pos, filename))
MakoException.__init__(
self, message + _format_filepos(lineno, pos, filename)
)
self.lineno = lineno
self.pos = pos
self.filename = filename
self.source = source
class SyntaxException(MakoException):
def __init__(self, message, source, lineno, pos, filename):
MakoException.__init__(self,
message + _format_filepos(lineno, pos, filename))
MakoException.__init__(
self, message + _format_filepos(lineno, pos, filename)
)
self.lineno = lineno
self.pos = pos
self.filename = filename
self.source = source
class UnsupportedError(MakoException):
"""raised when a retired feature is used."""
class NameConflictError(MakoException):
"""raised when a reserved word is used inappropriately"""
class TemplateLookupException(MakoException):
pass
class TopLevelLookupException(TemplateLookupException):
pass
class RichTraceback(object):
"""Pull the current exception from the ``sys`` traceback and extracts
Mako-specific template information.
See the usage examples in :ref:`handling_exceptions`.
"""
def __init__(self, error=None, traceback=None):
self.source, self.lineno = "", 0
@ -98,7 +115,7 @@ class RichTraceback(object):
# str(Exception(u'\xe6')) work in Python < 2.6
self.message = self.error.args[0]
if not isinstance(self.message, compat.text_type):
self.message = compat.text_type(self.message, 'ascii', 'replace')
self.message = compat.text_type(self.message, "ascii", "replace")
def _get_reformatted_records(self, records):
for rec in records:
@ -134,25 +151,30 @@ class RichTraceback(object):
source, and code line from that line number of the template."""
import mako.template
mods = {}
rawrecords = traceback.extract_tb(trcback)
new_trcback = []
for filename, lineno, function, line in rawrecords:
if not line:
line = ''
line = ""
try:
(line_map, template_lines) = mods[filename]
(line_map, template_lines, template_filename) = mods[filename]
except KeyError:
try:
info = mako.template._get_module_info(filename)
module_source = info.code
template_source = info.source
template_filename = info.template_filename or filename
template_filename = (
info.template_filename
or info.template_uri
or filename
)
except KeyError:
# A normal .py file (not a Template)
if not compat.py3k:
try:
fp = open(filename, 'rb')
fp = open(filename, "rb")
encoding = util.parse_encoding(fp)
fp.close()
except IOError:
@ -160,21 +182,33 @@ class RichTraceback(object):
if encoding:
line = line.decode(encoding)
else:
line = line.decode('ascii', 'replace')
new_trcback.append((filename, lineno, function, line,
None, None, None, None))
line = line.decode("ascii", "replace")
new_trcback.append(
(
filename,
lineno,
function,
line,
None,
None,
None,
None,
)
)
continue
template_ln = 1
source_map = mako.template.ModuleInfo.\
get_module_source_metadata(
module_source, full_line_map=True)
line_map = source_map['full_line_map']
mtm = mako.template.ModuleInfo
source_map = mtm.get_module_source_metadata(
module_source, full_line_map=True
)
line_map = source_map["full_line_map"]
template_lines = [line for line in
template_source.split("\n")]
mods[filename] = (line_map, template_lines)
template_lines = [
line_ for line_ in template_source.split("\n")
]
mods[filename] = (line_map, template_lines, template_filename)
template_ln = line_map[lineno - 1]
@ -182,9 +216,18 @@ class RichTraceback(object):
template_line = template_lines[template_ln - 1]
else:
template_line = None
new_trcback.append((filename, lineno, function,
line, template_filename, template_ln,
template_line, template_source))
new_trcback.append(
(
filename,
lineno,
function,
line,
template_filename,
template_ln,
template_line,
template_source,
)
)
if not self.source:
for l in range(len(new_trcback) - 1, 0, -1):
if new_trcback[l][5]:
@ -195,15 +238,17 @@ class RichTraceback(object):
if new_trcback:
try:
# A normal .py file (not a Template)
fp = open(new_trcback[-1][0], 'rb')
fp = open(new_trcback[-1][0], "rb")
encoding = util.parse_encoding(fp)
if compat.py3k and not encoding:
encoding = "utf-8"
fp.seek(0)
self.source = fp.read()
fp.close()
if encoding:
self.source = self.source.decode(encoding)
except IOError:
self.source = ''
self.source = ""
self.lineno = new_trcback[-1][1]
return new_trcback
@ -216,7 +261,9 @@ def text_error_template(lookup=None):
"""
import mako.template
return mako.template.Template(r"""
return mako.template.Template(
r"""
<%page args="error=None, traceback=None"/>
<%!
from mako.exceptions import RichTraceback
@ -230,28 +277,36 @@ Traceback (most recent call last):
${line | trim}
% endfor
${tback.errorname}: ${tback.message}
""")
"""
)
def _install_pygments():
global syntax_highlight, pygments_html_formatter
from mako.ext.pygmentplugin import syntax_highlight,\
pygments_html_formatter
from mako.ext.pygmentplugin import syntax_highlight # noqa
from mako.ext.pygmentplugin import pygments_html_formatter # noqa
def _install_fallback():
global syntax_highlight, pygments_html_formatter
from mako.filters import html_escape
pygments_html_formatter = None
def syntax_highlight(filename='', language=None):
def syntax_highlight(filename="", language=None):
return html_escape
def _install_highlighting():
try:
_install_pygments()
except ImportError:
_install_fallback()
_install_highlighting()
def html_error_template():
"""Provides a template that renders a stack trace in an HTML format,
providing an excerpt of code as well as substituting source template
@ -266,7 +321,9 @@ def html_error_template():
"""
import mako.template
return mako.template.Template(r"""
return mako.template.Template(
r"""
<%!
from mako.exceptions import RichTraceback, syntax_highlight,\
pygments_html_formatter
@ -369,5 +426,7 @@ def html_error_template():
</body>
</html>
% endif
""", output_encoding=sys.getdefaultencoding(),
encoding_errors='htmlentityreplace')
""",
output_encoding=sys.getdefaultencoding(),
encoding_errors="htmlentityreplace",
)

View file

@ -1,5 +1,5 @@
# ext/autohandler.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -8,26 +8,29 @@
requires that the TemplateLookup class is used with templates.
usage:
usage::
<%!
<%!
from mako.ext.autohandler import autohandler
%>
<%inherit file="${autohandler(template, context)}"/>
%>
<%inherit file="${autohandler(template, context)}"/>
or with custom autohandler filename:
or with custom autohandler filename::
<%!
<%!
from mako.ext.autohandler import autohandler
%>
<%inherit file="${autohandler(template, context, name='somefilename')}"/>
%>
<%inherit file="${autohandler(template, context, name='somefilename')}"/>
"""
import posixpath, os, re
import os
import posixpath
import re
def autohandler(template, context, name='autohandler'):
def autohandler(template, context, name="autohandler"):
lookup = context.lookup
_template_uri = template.module._template_uri
if not lookup.filesystem_checks:
@ -36,13 +39,14 @@ def autohandler(template, context, name='autohandler'):
except KeyError:
pass
tokens = re.findall(r'([^/]+)', posixpath.dirname(_template_uri)) + [name]
tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name]
while len(tokens):
path = '/' + '/'.join(tokens)
path = "/" + "/".join(tokens)
if path != _template_uri and _file_exists(lookup, path):
if not lookup.filesystem_checks:
return lookup._uri_cache.setdefault(
(autohandler, _template_uri, name), path)
(autohandler, _template_uri, name), path
)
else:
return path
if len(tokens) == 1:
@ -51,15 +55,16 @@ def autohandler(template, context, name='autohandler'):
if not lookup.filesystem_checks:
return lookup._uri_cache.setdefault(
(autohandler, _template_uri, name), None)
(autohandler, _template_uri, name), None
)
else:
return None
def _file_exists(lookup, path):
psub = re.sub(r'^/', '',path)
psub = re.sub(r"^/", "", path)
for d in lookup.directories:
if os.path.exists(d + '/' + psub):
if os.path.exists(d + "/" + psub):
return True
else:
return False

View file

@ -1,11 +1,12 @@
# ext/babelplugin.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from babel.messages.extract import extract_python
from mako.ext.extract import MessageExtractor
@ -14,9 +15,10 @@ class BabelMakoExtractor(MessageExtractor):
self.keywords = keywords
self.options = options
self.config = {
'comment-tags': u' '.join(comment_tags),
'encoding': options.get('input_encoding',
options.get('encoding', None)),
"comment-tags": u" ".join(comment_tags),
"encoding": options.get(
"input_encoding", options.get("encoding", None)
),
}
super(BabelMakoExtractor, self).__init__()
@ -24,12 +26,19 @@ class BabelMakoExtractor(MessageExtractor):
return self.process_file(fileobj)
def process_python(self, code, code_lineno, translator_strings):
comment_tags = self.config['comment-tags']
for lineno, funcname, messages, python_translator_comments \
in extract_python(code,
self.keywords, comment_tags, self.options):
yield (code_lineno + (lineno - 1), funcname, messages,
translator_strings + python_translator_comments)
comment_tags = self.config["comment-tags"]
for (
lineno,
funcname,
messages,
python_translator_comments,
) in extract_python(code, self.keywords, comment_tags, self.options):
yield (
code_lineno + (lineno - 1),
funcname,
messages,
translator_strings + python_translator_comments,
)
def extract(fileobj, keywords, comment_tags, options):

View file

@ -1,7 +1,12 @@
# ext/beaker_cache.py
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Provide a :class:`.CacheImpl` for the Beaker caching system."""
from mako import exceptions
from mako.cache import CacheImpl
try:
@ -15,6 +20,7 @@ _beaker_cache = None
class BeakerCacheImpl(CacheImpl):
"""A :class:`.CacheImpl` provided for the Beaker caching system.
This plugin is used by default, based on the default
@ -26,36 +32,37 @@ class BeakerCacheImpl(CacheImpl):
def __init__(self, cache):
if not has_beaker:
raise exceptions.RuntimeException(
"Can't initialize Beaker plugin; Beaker is not installed.")
"Can't initialize Beaker plugin; Beaker is not installed."
)
global _beaker_cache
if _beaker_cache is None:
if 'manager' in cache.template.cache_args:
_beaker_cache = cache.template.cache_args['manager']
if "manager" in cache.template.cache_args:
_beaker_cache = cache.template.cache_args["manager"]
else:
_beaker_cache = beaker_cache.CacheManager()
super(BeakerCacheImpl, self).__init__(cache)
def _get_cache(self, **kw):
expiretime = kw.pop('timeout', None)
if 'dir' in kw:
kw['data_dir'] = kw.pop('dir')
expiretime = kw.pop("timeout", None)
if "dir" in kw:
kw["data_dir"] = kw.pop("dir")
elif self.cache.template.module_directory:
kw['data_dir'] = self.cache.template.module_directory
kw["data_dir"] = self.cache.template.module_directory
if 'manager' in kw:
kw.pop('manager')
if "manager" in kw:
kw.pop("manager")
if kw.get('type') == 'memcached':
kw['type'] = 'ext:memcached'
if kw.get("type") == "memcached":
kw["type"] = "ext:memcached"
if 'region' in kw:
region = kw.pop('region')
if "region" in kw:
region = kw.pop("region")
cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw)
else:
cache = _beaker_cache.get_cache(self.cache.id, **kw)
cache_args = {'starttime': self.cache.starttime}
cache_args = {"starttime": self.cache.starttime}
if expiretime:
cache_args['expiretime'] = expiretime
cache_args["expiretime"] = expiretime
return cache, cache_args
def get_or_create(self, key, creation_function, **kw):

View file

@ -1,4 +1,11 @@
# ext/extract.py
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import re
from mako import compat
from mako import lexer
from mako import parsetree
@ -7,22 +14,26 @@ from mako import parsetree
class MessageExtractor(object):
def process_file(self, fileobj):
template_node = lexer.Lexer(
fileobj.read(),
input_encoding=self.config['encoding']).parse()
fileobj.read(), input_encoding=self.config["encoding"]
).parse()
for extracted in self.extract_nodes(template_node.get_children()):
yield extracted
def extract_nodes(self, nodes):
translator_comments = []
in_translator_comments = False
input_encoding = self.config["encoding"] or "ascii"
comment_tags = list(
filter(None, re.split(r'\s+', self.config['comment-tags'])))
filter(None, re.split(r"\s+", self.config["comment-tags"]))
)
for node in nodes:
child_nodes = None
if in_translator_comments and \
isinstance(node, parsetree.Text) and \
not node.content.strip():
if (
in_translator_comments
and isinstance(node, parsetree.Text)
and not node.content.strip()
):
# Ignore whitespace within translator comments
continue
@ -30,13 +41,15 @@ class MessageExtractor(object):
value = node.text.strip()
if in_translator_comments:
translator_comments.extend(
self._split_comment(node.lineno, value))
self._split_comment(node.lineno, value)
)
continue
for comment_tag in comment_tags:
if value.startswith(comment_tag):
in_translator_comments = True
translator_comments.extend(
self._split_comment(node.lineno, value))
self._split_comment(node.lineno, value)
)
continue
if isinstance(node, parsetree.DefTag):
@ -66,22 +79,31 @@ class MessageExtractor(object):
else:
continue
# Comments don't apply unless they immediately preceed the message
if translator_comments and \
translator_comments[-1][0] < node.lineno - 1:
# Comments don't apply unless they immediately precede the message
if (
translator_comments
and translator_comments[-1][0] < node.lineno - 1
):
translator_comments = []
translator_strings = [
comment[1] for comment in translator_comments]
comment[1] for comment in translator_comments
]
if isinstance(code, compat.text_type):
code = code.encode('ascii', 'backslashreplace')
code = code.encode(input_encoding, "backslashreplace")
used_translator_comments = False
code = compat.byte_buffer(code)
# We add extra newline to work around a pybabel bug
# (see python-babel/babel#274, parse_encoding dies if the first
# input string of the input is non-ascii)
# Also, because we added it, we have to subtract one from
# node.lineno
code = compat.byte_buffer(compat.b("\n") + code)
for message in self.process_python(
code, node.lineno, translator_strings):
code, node.lineno - 1, translator_strings
):
yield message
used_translator_comments = True
@ -97,5 +119,7 @@ class MessageExtractor(object):
def _split_comment(lineno, comment):
"""Return the multiline comment at lineno split into a list of
comment line numbers and the accompanying comment line"""
return [(lineno + index, line) for index, line in
enumerate(comment.splitlines())]
return [
(lineno + index, line)
for index, line in enumerate(comment.splitlines())
]

View file

@ -1,38 +1,57 @@
# ext/linguaplugin.py
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import io
from lingua.extractors import Extractor
from lingua.extractors import Message
from lingua.extractors import get_extractor
from mako.ext.extract import MessageExtractor
from lingua.extractors import Message
from mako import compat
from mako.ext.extract import MessageExtractor
class LinguaMakoExtractor(Extractor, MessageExtractor):
'''Mako templates'''
extensions = ['.mako']
default_config = {
'encoding': 'utf-8',
'comment-tags': '',
}
"""Mako templates"""
extensions = [".mako"]
default_config = {"encoding": "utf-8", "comment-tags": ""}
def __call__(self, filename, options, fileobj=None):
self.options = options
self.filename = filename
self.python_extractor = get_extractor('x.py')
self.python_extractor = get_extractor("x.py")
if fileobj is None:
fileobj = open(filename, 'rb')
fileobj = open(filename, "rb")
return self.process_file(fileobj)
def process_python(self, code, code_lineno, translator_strings):
source = code.getvalue().strip()
if source.endswith(compat.b(':')):
source += compat.b(' pass')
if source.endswith(compat.b(":")):
if source in (
compat.b("try:"),
compat.b("else:"),
) or source.startswith(compat.b("except")):
source = compat.b("") # Ignore try/except and else
elif source.startswith(compat.b("elif")):
source = source[2:] # Replace "elif" with "if"
source += compat.b("pass")
code = io.BytesIO(source)
for msg in self.python_extractor(
self.filename, self.options, code, code_lineno):
self.filename, self.options, code, code_lineno - 1
):
if translator_strings:
msg = Message(msg.msgctxt, msg.msgid, msg.msgid_plural,
msg = Message(
msg.msgctxt,
msg.msgid,
msg.msgid_plural,
msg.flags,
compat.u(' ').join(
translator_strings + [msg.comment]),
msg.tcomment, msg.location)
compat.u(" ").join(translator_strings + [msg.comment]),
msg.tcomment,
msg.location,
)
yield msg

View file

@ -1,5 +1,5 @@
# ext/preprocessors.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -9,6 +9,7 @@ argument on Template, TemplateLookup"""
import re
def convert_comments(text):
"""preprocess old style comments.
@ -16,5 +17,4 @@ def convert_comments(text):
from mako.ext.preprocessors import convert_comments
t = Template(..., preprocessor=convert_comments)"""
return re.sub(r'(?<=\n)\s*#[^#]', "##", text)
return re.sub(r"(?<=\n)\s*#[^#]", "##", text)

View file

@ -1,44 +1,73 @@
# ext/pygmentplugin.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from pygments.lexers.web import \
HtmlLexer, XmlLexer, JavascriptLexer, CssLexer
from pygments.lexers.agile import PythonLexer, Python3Lexer
from pygments.lexer import DelegatingLexer, RegexLexer, bygroups, \
include, using
from pygments.token import \
Text, Comment, Operator, Keyword, Name, String, Other
from pygments.formatters.html import HtmlFormatter
from pygments import highlight
from pygments.formatters.html import HtmlFormatter
from pygments.lexer import bygroups
from pygments.lexer import DelegatingLexer
from pygments.lexer import include
from pygments.lexer import RegexLexer
from pygments.lexer import using
from pygments.lexers.agile import Python3Lexer
from pygments.lexers.agile import PythonLexer
from pygments.lexers.web import CssLexer
from pygments.lexers.web import HtmlLexer
from pygments.lexers.web import JavascriptLexer
from pygments.lexers.web import XmlLexer
from pygments.token import Comment
from pygments.token import Keyword
from pygments.token import Name
from pygments.token import Operator
from pygments.token import Other
from pygments.token import String
from pygments.token import Text
from mako import compat
class MakoLexer(RegexLexer):
name = 'Mako'
aliases = ['mako']
filenames = ['*.mao']
name = "Mako"
aliases = ["mako"]
filenames = ["*.mao"]
tokens = {
'root': [
(r'(\s*)(\%)(\s*end(?:\w+))(\n|\Z)',
bygroups(Text, Comment.Preproc, Keyword, Other)),
(r'(\s*)(\%(?!%))([^\n]*)(\n|\Z)',
bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
(r'(\s*)(##[^\n]*)(\n|\Z)',
bygroups(Text, Comment.Preproc, Other)),
(r'''(?s)<%doc>.*?</%doc>''', Comment.Preproc),
(r'(<%)([\w\.\:]+)',
bygroups(Comment.Preproc, Name.Builtin), 'tag'),
(r'(</%)([\w\.\:]+)(>)',
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
(r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'),
(r'(<%(?:!?))(.*?)(%>)(?s)',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(\$\{)(.*?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'''(?sx)
"root": [
(
r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)",
bygroups(Text, Comment.Preproc, Keyword, Other),
),
(
r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)",
bygroups(Text, Comment.Preproc, using(PythonLexer), Other),
),
(
r"(\s*)(##[^\n]*)(\n|\Z)",
bygroups(Text, Comment.Preproc, Other),
),
(r"""(?s)<%doc>.*?</%doc>""", Comment.Preproc),
(
r"(<%)([\w\.\:]+)",
bygroups(Comment.Preproc, Name.Builtin),
"tag",
),
(
r"(</%)([\w\.\:]+)(>)",
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc),
),
(r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"),
(
r"(?s)(<%(?:!?))(.*?)(%>)",
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
),
(
r"(\$\{)(.*?)(\})",
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
),
(
r"""(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line
@ -51,72 +80,78 @@ class MakoLexer(RegexLexer):
(\\\n) | # an escaped newline
\Z # end of string
)
''', bygroups(Other, Operator)),
(r'\s+', Text),
""",
bygroups(Other, Operator),
),
(r"\s+", Text),
],
'ondeftags': [
(r'<%', Comment.Preproc),
(r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
include('tag'),
"ondeftags": [
(r"<%", Comment.Preproc),
(r"(?<=<%)(include|inherit|namespace|page)", Name.Builtin),
include("tag"),
],
'tag': [
(r'((?:\w+)\s*=)\s*(".*?")',
bygroups(Name.Attribute, String)),
(r'/?\s*>', Comment.Preproc, '#pop'),
(r'\s+', Text),
"tag": [
(r'((?:\w+)\s*=)\s*(".*?")', bygroups(Name.Attribute, String)),
(r"/?\s*>", Comment.Preproc, "#pop"),
(r"\s+", Text),
],
'attr': [
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
"attr": [
('".*?"', String, "#pop"),
("'.*?'", String, "#pop"),
(r"[^\s>]+", String, "#pop"),
],
}
class MakoHtmlLexer(DelegatingLexer):
name = 'HTML+Mako'
aliases = ['html+mako']
name = "HTML+Mako"
aliases = ["html+mako"]
def __init__(self, **options):
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
**options)
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, **options)
class MakoXmlLexer(DelegatingLexer):
name = 'XML+Mako'
aliases = ['xml+mako']
name = "XML+Mako"
aliases = ["xml+mako"]
def __init__(self, **options):
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
**options)
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, **options)
class MakoJavascriptLexer(DelegatingLexer):
name = 'JavaScript+Mako'
aliases = ['js+mako', 'javascript+mako']
name = "JavaScript+Mako"
aliases = ["js+mako", "javascript+mako"]
def __init__(self, **options):
super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
MakoLexer, **options)
super(MakoJavascriptLexer, self).__init__(
JavascriptLexer, MakoLexer, **options
)
class MakoCssLexer(DelegatingLexer):
name = 'CSS+Mako'
aliases = ['css+mako']
name = "CSS+Mako"
aliases = ["css+mako"]
def __init__(self, **options):
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
**options)
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, **options)
pygments_html_formatter = HtmlFormatter(cssclass='syntax-highlighted',
linenos=True)
def syntax_highlight(filename='', language=None):
pygments_html_formatter = HtmlFormatter(
cssclass="syntax-highlighted", linenos=True
)
def syntax_highlight(filename="", language=None):
mako_lexer = MakoLexer()
if compat.py3k:
python_lexer = Python3Lexer()
else:
python_lexer = PythonLexer()
if filename.startswith('memory:') or language == 'mako':
return lambda string: highlight(string, mako_lexer,
pygments_html_formatter)
return lambda string: highlight(string, python_lexer,
pygments_html_formatter)
if filename.startswith("memory:") or language == "mako":
return lambda string: highlight(
string, mako_lexer, pygments_html_formatter
)
return lambda string: highlight(
string, python_lexer, pygments_html_formatter
)

View file

@ -1,18 +1,19 @@
# ext/turbogears.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import inspect
from mako import compat
from mako.lookup import TemplateLookup
from mako.template import Template
class TGPlugin(object):
"""TurboGears compatible Template Plugin."""
def __init__(self, extra_vars_func=None, options=None, extension='mak'):
def __init__(self, extra_vars_func=None, options=None, extension="mak"):
self.extra_vars_func = extra_vars_func
self.extension = extension
if not options:
@ -21,16 +22,16 @@ class TGPlugin(object):
# Pull the options out and initialize the lookup
lookup_options = {}
for k, v in options.items():
if k.startswith('mako.'):
if k.startswith("mako."):
lookup_options[k[5:]] = v
elif k in ['directories', 'filesystem_checks', 'module_directory']:
elif k in ["directories", "filesystem_checks", "module_directory"]:
lookup_options[k] = v
self.lookup = TemplateLookup(**lookup_options)
self.tmpl_options = {}
# transfer lookup args to template args, based on those available
# in getargspec
for kw in inspect.getargspec(Template.__init__)[0]:
for kw in compat.inspect_getargspec(Template.__init__)[0]:
if kw in lookup_options:
self.tmpl_options[kw] = lookup_options[kw]
@ -39,14 +40,17 @@ class TGPlugin(object):
if template_string is not None:
return Template(template_string, **self.tmpl_options)
# Translate TG dot notation to normal / template path
if '/' not in templatename:
templatename = '/' + templatename.replace('.', '/') + '.' +\
self.extension
if "/" not in templatename:
templatename = (
"/" + templatename.replace(".", "/") + "." + self.extension
)
# Lookup template
return self.lookup.get_template(templatename)
def render(self, info, format="html", fragment=False, template=None):
def render(
self, info, format="html", fragment=False, template=None # noqa
):
if isinstance(template, compat.string_types):
template = self.load_template(template)
@ -55,4 +59,3 @@ class TGPlugin(object):
info.update(self.extra_vars_func())
return template.render(**info)

View file

@ -1,29 +1,31 @@
# mako/filters.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import re
import codecs
from mako.compat import quote_plus, unquote_plus, codepoint2name, \
name2codepoint
import re
from mako import compat
from mako.compat import codepoint2name
from mako.compat import name2codepoint
from mako.compat import quote_plus
from mako.compat import unquote_plus
xml_escapes = {
'&': '&amp;',
'>': '&gt;',
'<': '&lt;',
'"': '&#34;', # also &quot; in html-only
"'": '&#39;' # also &apos; in html-only
"&": "&amp;",
">": "&gt;",
"<": "&lt;",
'"': "&#34;", # also &quot; in html-only
"'": "&#39;", # also &apos; in html-only
}
# XXX: &quot; is valid in HTML and XML
# &apos; is not valid HTML, but is valid XML
def legacy_html_escape(s):
"""legacy HTML escape for non-unicode mode."""
s = s.replace("&", "&amp;")
@ -36,28 +38,34 @@ def legacy_html_escape(s):
try:
import markupsafe
html_escape = markupsafe.escape
except ImportError:
html_escape = legacy_html_escape
def xml_escape(string):
return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string)
def url_escape(string):
# convert into a list of octets
string = string.encode("utf8")
return quote_plus(string)
def legacy_url_escape(string):
# convert into a list of octets
return quote_plus(string)
def url_unescape(string):
text = unquote_plus(string)
if not is_ascii_str(text):
text = text.decode("utf8")
return text
def trim(string):
return string.strip()
@ -71,21 +79,31 @@ class Decode(object):
return decode(str(x))
else:
return compat.text_type(x, encoding=key)
return decode
decode = Decode()
_ASCII_re = re.compile(r'\A[\x00-\x7f]*\Z')
_ASCII_re = re.compile(r"\A[\x00-\x7f]*\Z")
def is_ascii_str(text):
return isinstance(text, str) and _ASCII_re.match(text)
################################################################
class XMLEntityEscaper(object):
def __init__(self, codepoint2name, name2codepoint):
self.codepoint2entity = dict([(c, compat.text_type('&%s;' % n))
for c, n in codepoint2name.items()])
self.codepoint2entity = dict(
[
(c, compat.text_type("&%s;" % n))
for c, n in codepoint2name.items()
]
)
self.name2codepoint = name2codepoint
def escape_entities(self, text):
@ -100,8 +118,7 @@ class XMLEntityEscaper(object):
try:
return self.codepoint2entity[codepoint]
except (KeyError, IndexError):
return '&#x%X;' % codepoint
return "&#x%X;" % codepoint
__escapable = re.compile(r'["&<>]|[^\x00-\x7f]')
@ -114,19 +131,22 @@ class XMLEntityEscaper(object):
The return value is guaranteed to be ASCII.
"""
return self.__escapable.sub(self.__escape, compat.text_type(text)
).encode('ascii')
return self.__escapable.sub(
self.__escape, compat.text_type(text)
).encode("ascii")
# XXX: This regexp will not match all valid XML entity names__.
# (It punts on details involving involving CombiningChars and Extenders.)
#
# .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef
__characterrefs = re.compile(r'''& (?:
__characterrefs = re.compile(
r"""& (?:
\#(\d+)
| \#x([\da-f]+)
| ( (?!\d) [:\w] [-.:\w]+ )
) ;''',
re.X | re.UNICODE)
) ;""",
re.X | re.UNICODE,
)
def __unescape(self, m):
dval, hval, name = m.groups()
@ -135,7 +155,7 @@ class XMLEntityEscaper(object):
elif hval:
codepoint = int(hval, 16)
else:
codepoint = self.name2codepoint.get(name, 0xfffd)
codepoint = self.name2codepoint.get(name, 0xFFFD)
# U+FFFD = "REPLACEMENT CHARACTER"
if codepoint < 128:
return chr(codepoint)
@ -159,43 +179,41 @@ html_entities_unescape = _html_entities_escaper.unescape
def htmlentityreplace_errors(ex):
"""An encoding error handler.
This python `codecs`_ error handler replaces unencodable
This python codecs error handler replaces unencodable
characters with HTML entities, or, if no HTML entity exists for
the character, XML character references.
the character, XML character references::
>>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace')
'The cost was &euro;12.'
"""
if isinstance(ex, UnicodeEncodeError):
# Handle encoding errors
bad_text = ex.object[ex.start:ex.end]
bad_text = ex.object[ex.start : ex.end]
text = _html_entities_escaper.escape(bad_text)
return (compat.text_type(text), ex.end)
raise ex
codecs.register_error('htmlentityreplace', htmlentityreplace_errors)
codecs.register_error("htmlentityreplace", htmlentityreplace_errors)
# TODO: options to make this dynamic per-compilation will be added in a later
# release
DEFAULT_ESCAPES = {
'x': 'filters.xml_escape',
'h': 'filters.html_escape',
'u': 'filters.url_escape',
'trim': 'filters.trim',
'entity': 'filters.html_entities_escape',
'unicode': 'unicode',
'decode': 'decode',
'str': 'str',
'n': 'n'
"x": "filters.xml_escape",
"h": "filters.html_escape",
"u": "filters.url_escape",
"trim": "filters.trim",
"entity": "filters.html_entities_escape",
"unicode": "unicode",
"decode": "decode",
"str": "str",
"n": "n",
}
if compat.py3k:
DEFAULT_ESCAPES.update({
'unicode': 'str'
})
DEFAULT_ESCAPES.update({"unicode": "str"})
NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy()
NON_UNICODE_ESCAPES['h'] = 'filters.legacy_html_escape'
NON_UNICODE_ESCAPES['u'] = 'filters.legacy_url_escape'
NON_UNICODE_ESCAPES["h"] = "filters.legacy_html_escape"
NON_UNICODE_ESCAPES["u"] = "filters.legacy_url_escape"

View file

@ -1,22 +1,31 @@
# mako/lexer.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""provides the Lexer class for parsing template strings into parse trees."""
import re
import codecs
from mako import parsetree, exceptions, compat
import re
from mako import compat
from mako import exceptions
from mako import parsetree
from mako.pygen import adjust_whitespace
_regexp_cache = {}
class Lexer(object):
def __init__(self, text, filename=None,
def __init__(
self,
text,
filename=None,
disable_unicode=False,
input_encoding=None, preprocessor=None):
input_encoding=None,
preprocessor=None,
):
self.text = text
self.filename = filename
self.template = parsetree.TemplateNode(self.filename)
@ -32,22 +41,24 @@ class Lexer(object):
if compat.py3k and disable_unicode:
raise exceptions.UnsupportedError(
"Mako for Python 3 does not "
"support disabling Unicode")
"Mako for Python 3 does not " "support disabling Unicode"
)
if preprocessor is None:
self.preprocessor = []
elif not hasattr(preprocessor, '__iter__'):
elif not hasattr(preprocessor, "__iter__"):
self.preprocessor = [preprocessor]
else:
self.preprocessor = preprocessor
@property
def exception_kwargs(self):
return {'source': self.text,
'lineno': self.matched_lineno,
'pos': self.matched_charpos,
'filename': self.filename}
return {
"source": self.text,
"lineno": self.matched_lineno,
"pos": self.matched_charpos,
"filename": self.filename,
}
def match(self, regexp, flags=None):
"""compile the given regexp, cache the reg, and call match_reg()."""
@ -81,54 +92,63 @@ class Lexer(object):
else:
self.match_position = end
self.matched_lineno = self.lineno
lines = re.findall(r"\n", self.text[mp:self.match_position])
lines = re.findall(r"\n", self.text[mp : self.match_position])
cp = mp - 1
while (cp >= 0 and cp < self.textlength and self.text[cp] != '\n'):
while cp >= 0 and cp < self.textlength and self.text[cp] != "\n":
cp -= 1
self.matched_charpos = mp - cp
self.lineno += len(lines)
#print "MATCHED:", match.group(0), "LINE START:",
# print "MATCHED:", match.group(0), "LINE START:",
# self.matched_lineno, "LINE END:", self.lineno
#print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \
# print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \
# (match and "TRUE" or "FALSE")
return match
def parse_until_text(self, *text):
def parse_until_text(self, watch_nesting, *text):
startpos = self.match_position
text_re = r'|'.join(text)
text_re = r"|".join(text)
brace_level = 0
paren_level = 0
bracket_level = 0
while True:
match = self.match(r'#.*\n')
match = self.match(r"#.*\n")
if match:
continue
match = self.match(r'(\"\"\"|\'\'\'|\"|\')((?<!\\)\\\1|.)*?\1',
re.S)
match = self.match(
r"(\"\"\"|\'\'\'|\"|\')[^\\]*?(\\.[^\\]*?)*\1", re.S
)
if match:
continue
match = self.match(r'(%s)' % text_re)
if match:
if match.group(1) == '}' and brace_level > 0:
brace_level -= 1
continue
return \
self.text[startpos:
self.match_position - len(match.group(1))],\
match.group(1)
match = self.match(r"(%s)" % text_re)
if match and not (
watch_nesting
and (brace_level > 0 or paren_level > 0 or bracket_level > 0)
):
return (
self.text[
startpos : self.match_position - len(match.group(1))
],
match.group(1),
)
elif not match:
match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S)
if match:
brace_level += match.group(1).count('{')
brace_level -= match.group(1).count('}')
brace_level += match.group(1).count("{")
brace_level -= match.group(1).count("}")
paren_level += match.group(1).count("(")
paren_level -= match.group(1).count(")")
bracket_level += match.group(1).count("[")
bracket_level -= match.group(1).count("]")
continue
raise exceptions.SyntaxException(
"Expected: %s" %
','.join(text),
**self.exception_kwargs)
"Expected: %s" % ",".join(text), **self.exception_kwargs
)
def append_node(self, nodecls, *args, **kwargs):
kwargs.setdefault('source', self.text)
kwargs.setdefault('lineno', self.matched_lineno)
kwargs.setdefault('pos', self.matched_charpos)
kwargs['filename'] = self.filename
kwargs.setdefault("source", self.text)
kwargs.setdefault("lineno", self.matched_lineno)
kwargs.setdefault("pos", self.matched_charpos)
kwargs["filename"] = self.filename
node = nodecls(*args, **kwargs)
if len(self.tag):
self.tag[-1].nodes.append(node)
@ -141,8 +161,10 @@ class Lexer(object):
if self.control_line:
control_frame = self.control_line[-1]
control_frame.nodes.append(node)
if not (isinstance(node, parsetree.ControlLine) and
control_frame.is_ternary(node.keyword)):
if not (
isinstance(node, parsetree.ControlLine)
and control_frame.is_ternary(node.keyword)
):
if self.ternary_stack and self.ternary_stack[-1]:
self.ternary_stack[-1][-1].nodes.append(node)
if isinstance(node, parsetree.Tag):
@ -156,17 +178,20 @@ class Lexer(object):
elif node.is_primary:
self.control_line.append(node)
self.ternary_stack.append([])
elif self.control_line and \
self.control_line[-1].is_ternary(node.keyword):
elif self.control_line and self.control_line[-1].is_ternary(
node.keyword
):
self.ternary_stack[-1].append(node)
elif self.control_line and \
not self.control_line[-1].is_ternary(node.keyword):
elif self.control_line and not self.control_line[-1].is_ternary(
node.keyword
):
raise exceptions.SyntaxException(
"Keyword '%s' not a legal ternary for keyword '%s'" %
(node.keyword, self.control_line[-1].keyword),
**self.exception_kwargs)
"Keyword '%s' not a legal ternary for keyword '%s'"
% (node.keyword, self.control_line[-1].keyword),
**self.exception_kwargs
)
_coding_re = re.compile(r'#.*coding[:=]\s*([-\w.]+).*\r?\n')
_coding_re = re.compile(r"#.*coding[:=]\s*([-\w.]+).*\r?\n")
def decode_raw_stream(self, text, decode_raw, known_encoding, filename):
"""given string/unicode or bytes/string, determine encoding
@ -176,43 +201,48 @@ class Lexer(object):
"""
if isinstance(text, compat.text_type):
m = self._coding_re.match(text)
encoding = m and m.group(1) or known_encoding or 'ascii'
encoding = m and m.group(1) or known_encoding or "ascii"
return encoding, text
if text.startswith(codecs.BOM_UTF8):
text = text[len(codecs.BOM_UTF8):]
parsed_encoding = 'utf-8'
m = self._coding_re.match(text.decode('utf-8', 'ignore'))
if m is not None and m.group(1) != 'utf-8':
text = text[len(codecs.BOM_UTF8) :]
parsed_encoding = "utf-8"
m = self._coding_re.match(text.decode("utf-8", "ignore"))
if m is not None and m.group(1) != "utf-8":
raise exceptions.CompileException(
"Found utf-8 BOM in file, with conflicting "
"magic encoding comment of '%s'" % m.group(1),
text.decode('utf-8', 'ignore'),
0, 0, filename)
text.decode("utf-8", "ignore"),
0,
0,
filename,
)
else:
m = self._coding_re.match(text.decode('utf-8', 'ignore'))
m = self._coding_re.match(text.decode("utf-8", "ignore"))
if m:
parsed_encoding = m.group(1)
else:
parsed_encoding = known_encoding or 'ascii'
parsed_encoding = known_encoding or "ascii"
if decode_raw:
try:
text = text.decode(parsed_encoding)
except UnicodeDecodeError:
raise exceptions.CompileException(
"Unicode decode operation of encoding '%s' failed" %
parsed_encoding,
text.decode('utf-8', 'ignore'),
0, 0, filename)
"Unicode decode operation of encoding '%s' failed"
% parsed_encoding,
text.decode("utf-8", "ignore"),
0,
0,
filename,
)
return parsed_encoding, text
def parse(self):
self.encoding, self.text = self.decode_raw_stream(self.text,
not self.disable_unicode,
self.encoding,
self.filename,)
self.encoding, self.text = self.decode_raw_stream(
self.text, not self.disable_unicode, self.encoding, self.filename
)
for preproc in self.preprocessor:
self.text = preproc(self.text)
@ -223,7 +253,7 @@ class Lexer(object):
self.textlength = len(self.text)
while (True):
while True:
if self.match_position > self.textlength:
break
@ -249,20 +279,24 @@ class Lexer(object):
raise exceptions.CompileException("assertion failed")
if len(self.tag):
raise exceptions.SyntaxException("Unclosed tag: <%%%s>" %
self.tag[-1].keyword,
**self.exception_kwargs)
raise exceptions.SyntaxException(
"Unclosed tag: <%%%s>" % self.tag[-1].keyword,
**self.exception_kwargs
)
if len(self.control_line):
raise exceptions.SyntaxException(
"Unterminated control keyword: '%s'" %
self.control_line[-1].keyword,
"Unterminated control keyword: '%s'"
% self.control_line[-1].keyword,
self.text,
self.control_line[-1].lineno,
self.control_line[-1].pos, self.filename)
self.control_line[-1].pos,
self.filename,
)
return self.template
def match_tag_start(self):
match = self.match(r'''
match = self.match(
r"""
\<% # opening tag
([\w\.\:]+) # keyword
@ -274,9 +308,9 @@ class Lexer(object):
(/)?> # closing
''',
re.I | re.S | re.X)
""",
re.I | re.S | re.X,
)
if match:
keyword, attr, isend = match.groups()
@ -284,22 +318,23 @@ class Lexer(object):
attributes = {}
if attr:
for att in re.findall(
r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr):
r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr
):
key, val1, val2 = att
text = val1 or val2
text = text.replace('\r\n', '\n')
text = text.replace("\r\n", "\n")
attributes[key] = text
self.append_node(parsetree.Tag, keyword, attributes)
if isend:
self.tag.pop()
else:
if keyword == 'text':
match = self.match(r'(.*?)(?=\</%text>)', re.S)
if keyword == "text":
match = self.match(r"(.*?)(?=\</%text>)", re.S)
if not match:
raise exceptions.SyntaxException(
"Unclosed tag: <%%%s>" %
self.tag[-1].keyword,
**self.exception_kwargs)
"Unclosed tag: <%%%s>" % self.tag[-1].keyword,
**self.exception_kwargs
)
self.append_node(parsetree.Text, match.group(1))
return self.match_tag_end()
return True
@ -307,25 +342,27 @@ class Lexer(object):
return False
def match_tag_end(self):
match = self.match(r'\</%[\t ]*(.+?)[\t ]*>')
match = self.match(r"\</%[\t ]*(.+?)[\t ]*>")
if match:
if not len(self.tag):
raise exceptions.SyntaxException(
"Closing tag without opening tag: </%%%s>" %
match.group(1),
**self.exception_kwargs)
"Closing tag without opening tag: </%%%s>"
% match.group(1),
**self.exception_kwargs
)
elif self.tag[-1].keyword != match.group(1):
raise exceptions.SyntaxException(
"Closing tag </%%%s> does not match tag: <%%%s>" %
(match.group(1), self.tag[-1].keyword),
**self.exception_kwargs)
"Closing tag </%%%s> does not match tag: <%%%s>"
% (match.group(1), self.tag[-1].keyword),
**self.exception_kwargs
)
self.tag.pop()
return True
else:
return False
def match_end(self):
match = self.match(r'\Z', re.S)
match = self.match(r"\Z", re.S)
if match:
string = match.group()
if string:
@ -336,7 +373,8 @@ class Lexer(object):
return False
def match_text(self):
match = self.match(r"""
match = self.match(
r"""
(.*?) # anything, followed by:
(
(?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based
@ -351,7 +389,9 @@ class Lexer(object):
(\\\r?\n) # an escaped newline - throw away
|
\Z # end of string
)""", re.X | re.S)
)""",
re.X | re.S,
)
if match:
text = match.group(1)
@ -365,14 +405,17 @@ class Lexer(object):
match = self.match(r"<%(!)?")
if match:
line, pos = self.matched_lineno, self.matched_charpos
text, end = self.parse_until_text(r'%>')
text, end = self.parse_until_text(False, r"%>")
# the trailing newline helps
# compiler.parse() not complain about indentation
text = adjust_whitespace(text) + "\n"
self.append_node(
parsetree.Code,
text,
match.group(1) == '!', lineno=line, pos=pos)
match.group(1) == "!",
lineno=line,
pos=pos,
)
return True
else:
return False
@ -381,16 +424,19 @@ class Lexer(object):
match = self.match(r"\${")
if match:
line, pos = self.matched_lineno, self.matched_charpos
text, end = self.parse_until_text(r'\|', r'}')
if end == '|':
escapes, end = self.parse_until_text(r'}')
text, end = self.parse_until_text(True, r"\|", r"}")
if end == "|":
escapes, end = self.parse_until_text(True, r"}")
else:
escapes = ""
text = text.replace('\r\n', '\n')
text = text.replace("\r\n", "\n")
self.append_node(
parsetree.Expression,
text, escapes.strip(),
lineno=line, pos=pos)
text,
escapes.strip(),
lineno=line,
pos=pos,
)
return True
else:
return False
@ -398,31 +444,35 @@ class Lexer(object):
def match_control_line(self):
match = self.match(
r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)"
r"(?:\r?\n|\Z)", re.M)
r"(?:\r?\n|\Z)",
re.M,
)
if match:
operator = match.group(1)
text = match.group(2)
if operator == '%':
m2 = re.match(r'(end)?(\w+)\s*(.*)', text)
if operator == "%":
m2 = re.match(r"(end)?(\w+)\s*(.*)", text)
if not m2:
raise exceptions.SyntaxException(
"Invalid control line: '%s'" %
text,
**self.exception_kwargs)
"Invalid control line: '%s'" % text,
**self.exception_kwargs
)
isend, keyword = m2.group(1, 2)
isend = (isend is not None)
isend = isend is not None
if isend:
if not len(self.control_line):
raise exceptions.SyntaxException(
"No starting keyword '%s' for '%s'" %
(keyword, text),
**self.exception_kwargs)
"No starting keyword '%s' for '%s'"
% (keyword, text),
**self.exception_kwargs
)
elif self.control_line[-1].keyword != keyword:
raise exceptions.SyntaxException(
"Keyword '%s' doesn't match keyword '%s'" %
(text, self.control_line[-1].keyword),
**self.exception_kwargs)
"Keyword '%s' doesn't match keyword '%s'"
% (text, self.control_line[-1].keyword),
**self.exception_kwargs
)
self.append_node(parsetree.ControlLine, keyword, isend, text)
else:
self.append_node(parsetree.Comment, text)
@ -438,4 +488,3 @@ class Lexer(object):
return True
else:
return False

View file

@ -1,11 +1,16 @@
# mako/lookup.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import os, stat, posixpath, re
from mako import exceptions, util
import os
import posixpath
import re
import stat
from mako import exceptions
from mako import util
from mako.template import Template
try:
@ -13,7 +18,9 @@ try:
except:
import dummy_threading as threading
class TemplateCollection(object):
"""Represent a collection of :class:`.Template` objects,
identifiable via URI.
@ -79,7 +86,9 @@ class TemplateCollection(object):
"""
return uri
class TemplateLookup(TemplateCollection):
"""Represent a collection of templates that locates template source files
from the local filesystem.
@ -144,7 +153,8 @@ class TemplateLookup(TemplateCollection):
"""
def __init__(self,
def __init__(
self,
directories=None,
module_directory=None,
filesystem_checks=True,
@ -154,15 +164,13 @@ class TemplateLookup(TemplateCollection):
disable_unicode=False,
bytestring_passthrough=False,
output_encoding=None,
encoding_errors='strict',
encoding_errors="strict",
cache_args=None,
cache_impl='beaker',
cache_impl="beaker",
cache_enabled=True,
cache_type=None,
cache_dir=None,
cache_url=None,
modulename_callable=None,
module_writer=None,
default_filters=None,
@ -173,10 +181,12 @@ class TemplateLookup(TemplateCollection):
enable_loop=True,
input_encoding=None,
preprocessor=None,
lexer_cls=None):
lexer_cls=None,
include_error_handler=None,
):
self.directories = [posixpath.normpath(d) for d in
util.to_list(directories, ())
self.directories = [
posixpath.normpath(d) for d in util.to_list(directories, ())
]
self.module_directory = module_directory
self.modulename_callable = modulename_callable
@ -187,33 +197,34 @@ class TemplateLookup(TemplateCollection):
cache_args = {}
# transfer deprecated cache_* args
if cache_dir:
cache_args.setdefault('dir', cache_dir)
cache_args.setdefault("dir", cache_dir)
if cache_url:
cache_args.setdefault('url', cache_url)
cache_args.setdefault("url", cache_url)
if cache_type:
cache_args.setdefault('type', cache_type)
cache_args.setdefault("type", cache_type)
self.template_args = {
'format_exceptions':format_exceptions,
'error_handler':error_handler,
'disable_unicode':disable_unicode,
'bytestring_passthrough':bytestring_passthrough,
'output_encoding':output_encoding,
'cache_impl':cache_impl,
'encoding_errors':encoding_errors,
'input_encoding':input_encoding,
'module_directory':module_directory,
'module_writer':module_writer,
'cache_args':cache_args,
'cache_enabled':cache_enabled,
'default_filters':default_filters,
'buffer_filters':buffer_filters,
'strict_undefined':strict_undefined,
'imports':imports,
'future_imports':future_imports,
'enable_loop':enable_loop,
'preprocessor':preprocessor,
'lexer_cls':lexer_cls
"format_exceptions": format_exceptions,
"error_handler": error_handler,
"include_error_handler": include_error_handler,
"disable_unicode": disable_unicode,
"bytestring_passthrough": bytestring_passthrough,
"output_encoding": output_encoding,
"cache_impl": cache_impl,
"encoding_errors": encoding_errors,
"input_encoding": input_encoding,
"module_directory": module_directory,
"module_writer": module_writer,
"cache_args": cache_args,
"cache_enabled": cache_enabled,
"default_filters": default_filters,
"buffer_filters": buffer_filters,
"strict_undefined": strict_undefined,
"imports": imports,
"future_imports": future_imports,
"enable_loop": enable_loop,
"preprocessor": preprocessor,
"lexer_cls": lexer_cls,
}
if collection_size == -1:
@ -228,7 +239,8 @@ class TemplateLookup(TemplateCollection):
"""Return a :class:`.Template` object corresponding to the given
``uri``.
.. note:: The ``relativeto`` argument is not supported here at the moment.
.. note:: The ``relativeto`` argument is not supported here at
the moment.
"""
@ -238,14 +250,18 @@ class TemplateLookup(TemplateCollection):
else:
return self._collection[uri]
except KeyError:
u = re.sub(r'^\/+', '', uri)
for dir in self.directories:
srcfile = posixpath.normpath(posixpath.join(dir, u))
u = re.sub(r"^\/+", "", uri)
for dir_ in self.directories:
# make sure the path seperators are posix - os.altsep is empty
# on POSIX and cannot be used.
dir_ = dir_.replace(os.path.sep, posixpath.sep)
srcfile = posixpath.normpath(posixpath.join(dir_, u))
if os.path.isfile(srcfile):
return self._load(srcfile, uri)
else:
raise exceptions.TopLevelLookupException(
"Cant locate template for uri %r" % uri)
"Cant locate template for uri %r" % uri
)
def adjust_uri(self, uri, relativeto):
"""Adjust the given ``uri`` based on the given relative URI."""
@ -254,17 +270,17 @@ class TemplateLookup(TemplateCollection):
if key in self._uri_cache:
return self._uri_cache[key]
if uri[0] != '/':
if uri[0] != "/":
if relativeto is not None:
v = self._uri_cache[key] = posixpath.join(
posixpath.dirname(relativeto), uri)
posixpath.dirname(relativeto), uri
)
else:
v = self._uri_cache[key] = '/' + uri
v = self._uri_cache[key] = "/" + uri
else:
v = self._uri_cache[key] = uri
return v
def filename_to_uri(self, filename):
"""Convert the given ``filename`` to a URI relative to
this :class:`.TemplateCollection`."""
@ -283,9 +299,9 @@ class TemplateLookup(TemplateCollection):
"""
filename = posixpath.normpath(filename)
for dir in self.directories:
if filename[0:len(dir)] == dir:
return filename[len(dir):]
for dir_ in self.directories:
if filename[0 : len(dir_)] == dir_:
return filename[len(dir_) :]
else:
return None
@ -308,7 +324,8 @@ class TemplateLookup(TemplateCollection):
filename=posixpath.normpath(filename),
lookup=self,
module_filename=module_filename,
**self.template_args)
**self.template_args
)
return template
except:
# if compilation fails etc, ensure
@ -325,8 +342,7 @@ class TemplateLookup(TemplateCollection):
try:
template_stat = os.stat(template.filename)
if template.module._modified_time < \
template_stat[stat.ST_MTIME]:
if template.module._modified_time < template_stat[stat.ST_MTIME]:
self._collection.pop(uri, None)
return self._load(template.filename, uri)
else:
@ -334,8 +350,8 @@ class TemplateLookup(TemplateCollection):
except OSError:
self._collection.pop(uri, None)
raise exceptions.TemplateLookupException(
"Cant locate template for uri %r" % uri)
"Cant locate template for uri %r" % uri
)
def put_string(self, uri, text):
"""Place a new :class:`.Template` object into this
@ -344,10 +360,8 @@ class TemplateLookup(TemplateCollection):
"""
self._collection[uri] = Template(
text,
lookup=self,
uri=uri,
**self.template_args)
text, lookup=self, uri=uri, **self.template_args
)
def put_template(self, uri, template):
"""Place a new :class:`.Template` object into this
@ -356,4 +370,3 @@ class TemplateLookup(TemplateCollection):
"""
self._collection[uri] = template

View file

@ -1,15 +1,22 @@
# mako/parsetree.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""defines the parse tree components for Mako templates."""
from mako import exceptions, ast, util, filters, compat
import re
from mako import ast
from mako import compat
from mako import exceptions
from mako import filters
from mako import util
class Node(object):
"""base class for a Node in the parse tree."""
def __init__(self, source, lineno, pos, filename):
@ -20,8 +27,12 @@ class Node(object):
@property
def exception_kwargs(self):
return {'source': self.source, 'lineno': self.lineno,
'pos': self.pos, 'filename': self.filename}
return {
"source": self.source,
"lineno": self.lineno,
"pos": self.pos,
"filename": self.filename,
}
def get_children(self):
return []
@ -34,11 +45,13 @@ class Node(object):
method = getattr(visitor, "visit" + self.__class__.__name__, traverse)
method(self)
class TemplateNode(Node):
"""a 'container' node that stores the overall collection of nodes."""
def __init__(self, filename):
super(TemplateNode, self).__init__('', 0, 0, filename)
super(TemplateNode, self).__init__("", 0, 0, filename)
self.nodes = []
self.page_attributes = {}
@ -48,9 +61,12 @@ class TemplateNode(Node):
def __repr__(self):
return "TemplateNode(%s, %r)" % (
util.sorted_dict_repr(self.page_attributes),
self.nodes)
self.nodes,
)
class ControlLine(Node):
"""defines a control line, a line-oriented python line or end tag.
e.g.::
@ -68,7 +84,7 @@ class ControlLine(Node):
self.text = text
self.keyword = keyword
self.isend = isend
self.is_primary = keyword in ['for', 'if', 'while', 'try', 'with']
self.is_primary = keyword in ["for", "if", "while", "try", "with"]
self.nodes = []
if self.isend:
self._declared_identifiers = []
@ -92,9 +108,9 @@ class ControlLine(Node):
for this ControlLine"""
return keyword in {
'if':set(['else', 'elif']),
'try':set(['except', 'finally']),
'for':set(['else'])
"if": set(["else", "elif"]),
"try": set(["except", "finally"]),
"for": set(["else"]),
}.get(self.keyword, [])
def __repr__(self):
@ -102,10 +118,12 @@ class ControlLine(Node):
self.keyword,
self.text,
self.isend,
(self.lineno, self.pos)
(self.lineno, self.pos),
)
class Text(Node):
"""defines plain text in the template."""
def __init__(self, content, **kwargs):
@ -115,7 +133,9 @@ class Text(Node):
def __repr__(self):
return "Text(%r, %r)" % (self.content, (self.lineno, self.pos))
class Code(Node):
"""defines a Python code block, either inline or module level.
e.g.::
@ -148,10 +168,12 @@ class Code(Node):
return "Code(%r, %r, %r)" % (
self.text,
self.ismodule,
(self.lineno, self.pos)
(self.lineno, self.pos),
)
class Comment(Node):
"""defines a comment line.
# this is a comment
@ -165,7 +187,9 @@ class Comment(Node):
def __repr__(self):
return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos))
class Expression(Node):
"""defines an inline expression.
${x+y}
@ -194,41 +218,47 @@ class Expression(Node):
return "Expression(%r, %r, %r)" % (
self.text,
self.escapes_code.args,
(self.lineno, self.pos)
(self.lineno, self.pos),
)
class _TagMeta(type):
"""metaclass to allow Tag to produce a subclass according to
its keyword"""
_classmap = {}
def __init__(cls, clsname, bases, dict):
if getattr(cls, '__keyword__', None) is not None:
def __init__(cls, clsname, bases, dict_):
if getattr(cls, "__keyword__", None) is not None:
cls._classmap[cls.__keyword__] = cls
super(_TagMeta, cls).__init__(clsname, bases, dict)
super(_TagMeta, cls).__init__(clsname, bases, dict_)
def __call__(cls, keyword, attributes, **kwargs):
if ":" in keyword:
ns, defname = keyword.split(':')
return type.__call__(CallNamespaceTag, ns, defname,
attributes, **kwargs)
ns, defname = keyword.split(":")
return type.__call__(
CallNamespaceTag, ns, defname, attributes, **kwargs
)
try:
cls = _TagMeta._classmap[keyword]
except KeyError:
raise exceptions.CompileException(
"No such tag: '%s'" % keyword,
source=kwargs['source'],
lineno=kwargs['lineno'],
pos=kwargs['pos'],
filename=kwargs['filename']
source=kwargs["source"],
lineno=kwargs["lineno"],
pos=kwargs["pos"],
filename=kwargs["filename"],
)
return type.__call__(cls, keyword, attributes, **kwargs)
class Tag(compat.with_metaclass(_TagMeta, Node)):
"""abstract base class for tags.
e.g.::
<%sometag/>
<%someothertag>
@ -236,11 +266,19 @@ class Tag(compat.with_metaclass(_TagMeta, Node)):
</%someothertag>
"""
__keyword__ = None
def __init__(self, keyword, attributes, expressions,
nonexpressions, required, **kwargs):
"""construct a new Tag instance.
def __init__(
self,
keyword,
attributes,
expressions,
nonexpressions,
required,
**kwargs
):
r"""construct a new Tag instance.
this constructor not called directly, and is only called
by subclasses.
@ -266,9 +304,10 @@ class Tag(compat.with_metaclass(_TagMeta, Node)):
missing = [r for r in required if r not in self.parsed_attributes]
if len(missing):
raise exceptions.CompileException(
"Missing attribute(s): %s" %
",".join([repr(m) for m in missing]),
**self.exception_kwargs)
"Missing attribute(s): %s"
% ",".join([repr(m) for m in missing]),
**self.exception_kwargs
)
self.parent = None
self.nodes = []
@ -284,36 +323,40 @@ class Tag(compat.with_metaclass(_TagMeta, Node)):
for key in self.attributes:
if key in expressions:
expr = []
for x in re.compile(r'(\${.+?})',
re.S).split(self.attributes[key]):
m = re.compile(r'^\${(.+?)}$', re.S).match(x)
for x in re.compile(r"(\${.+?})", re.S).split(
self.attributes[key]
):
m = re.compile(r"^\${(.+?)}$", re.S).match(x)
if m:
code = ast.PythonCode(m.group(1).rstrip(),
**self.exception_kwargs)
code = ast.PythonCode(
m.group(1).rstrip(), **self.exception_kwargs
)
# we aren't discarding "declared_identifiers" here,
# which we do so that list comprehension-declared
# variables aren't counted. As yet can't find a
# condition that requires it here.
undeclared_identifiers = \
undeclared_identifiers.union(
code.undeclared_identifiers)
expr.append('(%s)' % m.group(1))
undeclared_identifiers = undeclared_identifiers.union(
code.undeclared_identifiers
)
expr.append("(%s)" % m.group(1))
else:
if x:
expr.append(repr(x))
self.parsed_attributes[key] = " + ".join(expr) or repr('')
self.parsed_attributes[key] = " + ".join(expr) or repr("")
elif key in nonexpressions:
if re.search(r'\${.+?}', self.attributes[key]):
if re.search(r"\${.+?}", self.attributes[key]):
raise exceptions.CompileException(
"Attibute '%s' in tag '%s' does not allow embedded "
"expressions" % (key, self.keyword),
**self.exception_kwargs)
**self.exception_kwargs
)
self.parsed_attributes[key] = repr(self.attributes[key])
else:
raise exceptions.CompileException(
"Invalid attribute for tag '%s': '%s'" %
(self.keyword, key),
**self.exception_kwargs)
"Invalid attribute for tag '%s': '%s'"
% (self.keyword, key),
**self.exception_kwargs
)
self.expression_undeclared_identifiers = undeclared_identifiers
def declared_identifiers(self):
@ -323,54 +366,64 @@ class Tag(compat.with_metaclass(_TagMeta, Node)):
return self.expression_undeclared_identifiers
def __repr__(self):
return "%s(%r, %s, %r, %r)" % (self.__class__.__name__,
return "%s(%r, %s, %r, %r)" % (
self.__class__.__name__,
self.keyword,
util.sorted_dict_repr(self.attributes),
(self.lineno, self.pos),
self.nodes
self.nodes,
)
class IncludeTag(Tag):
__keyword__ = 'include'
__keyword__ = "include"
def __init__(self, keyword, attributes, **kwargs):
super(IncludeTag, self).__init__(
keyword,
attributes,
('file', 'import', 'args'),
(), ('file',), **kwargs)
("file", "import", "args"),
(),
("file",),
**kwargs
)
self.page_args = ast.PythonCode(
"__DUMMY(%s)" % attributes.get('args', ''),
**self.exception_kwargs)
"__DUMMY(%s)" % attributes.get("args", ""), **self.exception_kwargs
)
def declared_identifiers(self):
return []
def undeclared_identifiers(self):
identifiers = self.page_args.undeclared_identifiers.\
difference(set(["__DUMMY"])).\
difference(self.page_args.declared_identifiers)
return identifiers.union(super(IncludeTag, self).
undeclared_identifiers())
identifiers = self.page_args.undeclared_identifiers.difference(
set(["__DUMMY"])
).difference(self.page_args.declared_identifiers)
return identifiers.union(
super(IncludeTag, self).undeclared_identifiers()
)
class NamespaceTag(Tag):
__keyword__ = 'namespace'
__keyword__ = "namespace"
def __init__(self, keyword, attributes, **kwargs):
super(NamespaceTag, self).__init__(
keyword, attributes,
('file',),
('name','inheritable',
'import','module'),
(), **kwargs)
keyword,
attributes,
("file",),
("name", "inheritable", "import", "module"),
(),
**kwargs
)
self.name = attributes.get('name', '__anon_%s' % hex(abs(id(self))))
if not 'name' in attributes and not 'import' in attributes:
self.name = attributes.get("name", "__anon_%s" % hex(abs(id(self))))
if "name" not in attributes and "import" not in attributes:
raise exceptions.CompileException(
"'name' and/or 'import' attributes are required "
"for <%namespace>",
**self.exception_kwargs)
if 'file' in attributes and 'module' in attributes:
**self.exception_kwargs
)
if "file" in attributes and "module" in attributes:
raise exceptions.CompileException(
"<%namespace> may only have one of 'file' or 'module'",
**self.exception_kwargs
@ -379,52 +432,53 @@ class NamespaceTag(Tag):
def declared_identifiers(self):
return []
class TextTag(Tag):
__keyword__ = 'text'
__keyword__ = "text"
def __init__(self, keyword, attributes, **kwargs):
super(TextTag, self).__init__(
keyword,
attributes, (),
('filter'), (), **kwargs)
keyword, attributes, (), ("filter"), (), **kwargs
)
self.filter_args = ast.ArgumentList(
attributes.get('filter', ''),
**self.exception_kwargs)
def undeclared_identifiers(self):
return self.filter_args.\
undeclared_identifiers.\
difference(filters.DEFAULT_ESCAPES.keys()).union(
self.expression_undeclared_identifiers
attributes.get("filter", ""), **self.exception_kwargs
)
def undeclared_identifiers(self):
return self.filter_args.undeclared_identifiers.difference(
filters.DEFAULT_ESCAPES.keys()
).union(self.expression_undeclared_identifiers)
class DefTag(Tag):
__keyword__ = 'def'
__keyword__ = "def"
def __init__(self, keyword, attributes, **kwargs):
expressions = ['buffered', 'cached'] + [
c for c in attributes if c.startswith('cache_')]
expressions = ["buffered", "cached"] + [
c for c in attributes if c.startswith("cache_")
]
super(DefTag, self).__init__(
keyword,
attributes,
expressions,
('name', 'filter', 'decorator'),
('name',),
**kwargs)
name = attributes['name']
if re.match(r'^[\w_]+$', name):
("name", "filter", "decorator"),
("name",),
**kwargs
)
name = attributes["name"]
if re.match(r"^[\w_]+$", name):
raise exceptions.CompileException(
"Missing parenthesis in %def",
**self.exception_kwargs)
self.function_decl = ast.FunctionDecl("def " + name + ":pass",
**self.exception_kwargs)
"Missing parenthesis in %def", **self.exception_kwargs
)
self.function_decl = ast.FunctionDecl(
"def " + name + ":pass", **self.exception_kwargs
)
self.name = self.function_decl.funcname
self.decorator = attributes.get('decorator', '')
self.decorator = attributes.get("decorator", "")
self.filter_args = ast.ArgumentList(
attributes.get('filter', ''),
**self.exception_kwargs)
attributes.get("filter", ""), **self.exception_kwargs
)
is_anonymous = False
is_block = False
@ -442,51 +496,58 @@ class DefTag(Tag):
def undeclared_identifiers(self):
res = []
for c in self.function_decl.defaults:
res += list(ast.PythonCode(c, **self.exception_kwargs).
undeclared_identifiers)
return set(res).union(
self.filter_args.\
undeclared_identifiers.\
difference(filters.DEFAULT_ESCAPES.keys())
).union(
self.expression_undeclared_identifiers
).difference(
self.function_decl.allargnames
res += list(
ast.PythonCode(
c, **self.exception_kwargs
).undeclared_identifiers
)
return (
set(res)
.union(
self.filter_args.undeclared_identifiers.difference(
filters.DEFAULT_ESCAPES.keys()
)
)
.union(self.expression_undeclared_identifiers)
.difference(self.function_decl.allargnames)
)
class BlockTag(Tag):
__keyword__ = 'block'
__keyword__ = "block"
def __init__(self, keyword, attributes, **kwargs):
expressions = ['buffered', 'cached', 'args'] + [
c for c in attributes if c.startswith('cache_')]
expressions = ["buffered", "cached", "args"] + [
c for c in attributes if c.startswith("cache_")
]
super(BlockTag, self).__init__(
keyword,
attributes,
expressions,
('name','filter', 'decorator'),
("name", "filter", "decorator"),
(),
**kwargs)
name = attributes.get('name')
if name and not re.match(r'^[\w_]+$',name):
**kwargs
)
name = attributes.get("name")
if name and not re.match(r"^[\w_]+$", name):
raise exceptions.CompileException(
"%block may not specify an argument signature",
**self.exception_kwargs)
if not name and attributes.get('args', None):
raise exceptions.CompileException(
"Only named %blocks may specify args",
**self.exception_kwargs
)
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
**self.exception_kwargs)
if not name and attributes.get("args", None):
raise exceptions.CompileException(
"Only named %blocks may specify args", **self.exception_kwargs
)
self.body_decl = ast.FunctionArgs(
attributes.get("args", ""), **self.exception_kwargs
)
self.name = name
self.decorator = attributes.get('decorator', '')
self.decorator = attributes.get("decorator", "")
self.filter_args = ast.ArgumentList(
attributes.get('filter', ''),
**self.exception_kwargs)
attributes.get("filter", ""), **self.exception_kwargs
)
is_block = True
@ -496,7 +557,7 @@ class BlockTag(Tag):
@property
def funcname(self):
return self.name or "__M_anon_%d" % (self.lineno, )
return self.name or "__M_anon_%d" % (self.lineno,)
def get_argument_expressions(self, **kw):
return self.body_decl.get_argument_expressions(**kw)
@ -505,90 +566,100 @@ class BlockTag(Tag):
return self.body_decl.allargnames
def undeclared_identifiers(self):
return (self.filter_args.\
undeclared_identifiers.\
difference(filters.DEFAULT_ESCAPES.keys())
return (
self.filter_args.undeclared_identifiers.difference(
filters.DEFAULT_ESCAPES.keys()
)
).union(self.expression_undeclared_identifiers)
class CallTag(Tag):
__keyword__ = 'call'
__keyword__ = "call"
def __init__(self, keyword, attributes, **kwargs):
super(CallTag, self).__init__(keyword, attributes,
('args'), ('expr',), ('expr',), **kwargs)
self.expression = attributes['expr']
super(CallTag, self).__init__(
keyword, attributes, ("args"), ("expr",), ("expr",), **kwargs
)
self.expression = attributes["expr"]
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
**self.exception_kwargs)
self.body_decl = ast.FunctionArgs(
attributes.get("args", ""), **self.exception_kwargs
)
def declared_identifiers(self):
return self.code.declared_identifiers.union(self.body_decl.allargnames)
def undeclared_identifiers(self):
return self.code.undeclared_identifiers.\
difference(self.code.declared_identifiers)
return self.code.undeclared_identifiers.difference(
self.code.declared_identifiers
)
class CallNamespaceTag(Tag):
def __init__(self, namespace, defname, attributes, **kwargs):
super(CallNamespaceTag, self).__init__(
namespace + ":" + defname,
attributes,
tuple(attributes.keys()) + ('args', ),
tuple(attributes.keys()) + ("args",),
(),
(),
**kwargs)
**kwargs
)
self.expression = "%s.%s(%s)" % (
namespace,
defname,
",".join(["%s=%s" % (k, v) for k, v in
self.parsed_attributes.items()
if k != 'args'])
",".join(
[
"%s=%s" % (k, v)
for k, v in self.parsed_attributes.items()
if k != "args"
]
),
)
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
self.body_decl = ast.FunctionArgs(
attributes.get('args', ''),
**self.exception_kwargs)
attributes.get("args", ""), **self.exception_kwargs
)
def declared_identifiers(self):
return self.code.declared_identifiers.union(self.body_decl.allargnames)
def undeclared_identifiers(self):
return self.code.undeclared_identifiers.\
difference(self.code.declared_identifiers)
return self.code.undeclared_identifiers.difference(
self.code.declared_identifiers
)
class InheritTag(Tag):
__keyword__ = 'inherit'
__keyword__ = "inherit"
def __init__(self, keyword, attributes, **kwargs):
super(InheritTag, self).__init__(
keyword, attributes,
('file',), (), ('file',), **kwargs)
keyword, attributes, ("file",), (), ("file",), **kwargs
)
class PageTag(Tag):
__keyword__ = 'page'
__keyword__ = "page"
def __init__(self, keyword, attributes, **kwargs):
expressions = ['cached', 'args', 'expression_filter', 'enable_loop'] + [
c for c in attributes if c.startswith('cache_')]
expressions = [
"cached",
"args",
"expression_filter",
"enable_loop",
] + [c for c in attributes if c.startswith("cache_")]
super(PageTag, self).__init__(
keyword,
attributes,
expressions,
(),
(),
**kwargs)
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
**self.exception_kwargs)
keyword, attributes, expressions, (), (), **kwargs
)
self.body_decl = ast.FunctionArgs(
attributes.get("args", ""), **self.exception_kwargs
)
self.filter_args = ast.ArgumentList(
attributes.get('expression_filter', ''),
**self.exception_kwargs)
attributes.get("expression_filter", ""), **self.exception_kwargs
)
def declared_identifiers(self):
return self.body_decl.allargnames

View file

@ -1,5 +1,5 @@
# mako/pygen.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -7,8 +7,10 @@
"""utilities for generating and formatting literal Python code."""
import re
from mako import exceptions
class PythonPrinter(object):
def __init__(self, stream):
# indentation counter
@ -52,14 +54,16 @@ class PythonPrinter(object):
self.stream.write("\n" * num)
self._update_lineno(num)
def write_indented_block(self, block):
def write_indented_block(self, block, starting_lineno=None):
"""print a line or lines of python which already contain indentation.
The indentation of the total block of lines will be adjusted to that of
the current indent level."""
self.in_indent_lines = False
for l in re.split(r'\r?\n', block):
for i, l in enumerate(re.split(r"\r?\n", block)):
self.line_buffer.append(l)
if starting_lineno is not None:
self.start_source(starting_lineno + i)
self._update_lineno(1)
def writelines(self, *lines):
@ -80,20 +84,19 @@ class PythonPrinter(object):
self._flush_adjusted_lines()
self.in_indent_lines = True
if (line is None or
re.match(r"^\s*#",line) or
re.match(r"^\s*$", line)
if (
line is None
or re.match(r"^\s*#", line)
or re.match(r"^\s*$", line)
):
hastext = False
else:
hastext = True
is_comment = line and len(line) and line[0] == '#'
is_comment = line and len(line) and line[0] == "#"
# see if this line should decrease the indentation level
if (not is_comment and
(not hastext or self._is_unindentor(line))
):
if not is_comment and (not hastext or self._is_unindentor(line)):
if self.indent > 0:
self.indent -= 1
@ -102,7 +105,8 @@ class PythonPrinter(object):
# module wont compile.
if len(self.indent_detail) == 0:
raise exceptions.SyntaxException(
"Too many whitespace closures")
"Too many whitespace closures"
)
self.indent_detail.pop()
if line is None:
@ -132,8 +136,9 @@ class PythonPrinter(object):
# its not a "compound" keyword. but lets also
# test for valid Python keywords that might be indenting us,
# else assume its a non-indenting line
m2 = re.match(r"^\s*(def|class|else|elif|except|finally)",
line)
m2 = re.match(
r"^\s*(def|class|else|elif|except|finally)", line
)
if m2:
self.indent += 1
self.indent_detail.append(indentor)
@ -172,27 +177,28 @@ class PythonPrinter(object):
# should we decide that its not good enough, heres
# more stuff to check.
#keyword = match.group(1)
# keyword = match.group(1)
# match the original indent keyword
#for crit in [
# for crit in [
# (r'if|elif', r'else|elif'),
# (r'try', r'except|finally|else'),
# (r'while|for', r'else'),
#]:
# ]:
# if re.match(crit[0], indentor) and re.match(crit[1], keyword):
# return True
#return False
# return False
def _indent_line(self, line, stripspace=''):
def _indent_line(self, line, stripspace=""):
"""indent the given line according to the current indent level.
stripspace is a string of space that will be truncated from the
start of the line before indenting."""
return re.sub(r"^%s" % stripspace, self.indentstring
* self.indent, line)
return re.sub(
r"^%s" % stripspace, self.indentstring * self.indent, line
)
def _reset_multi_line_flags(self):
"""reset the flags which would indicate we are in a backslashed
@ -210,7 +216,7 @@ class PythonPrinter(object):
# a literal multiline string with unfortunately placed
# whitespace
current_state = (self.backslashed or self.triplequoted)
current_state = self.backslashed or self.triplequoted
if re.search(r"\\$", line):
self.backslashed = True
@ -247,7 +253,7 @@ def adjust_whitespace(text):
(backslashed, triplequoted) = (0, 1)
def in_multi_line(line):
start_state = (state[backslashed] or state[triplequoted])
start_state = state[backslashed] or state[triplequoted]
if re.search(r"\\$", line):
state[backslashed] = True
@ -257,7 +263,7 @@ def adjust_whitespace(text):
def match(reg, t):
m = re.match(reg, t)
if m:
return m, t[len(m.group(0)):]
return m, t[len(m.group(0)) :]
else:
return None, t
@ -269,7 +275,7 @@ def adjust_whitespace(text):
else:
m, line = match(r".*?(?=%s|$)" % state[triplequoted], line)
else:
m, line = match(r'#', line)
m, line = match(r"#", line)
if m:
return start_state
@ -282,13 +288,13 @@ def adjust_whitespace(text):
return start_state
def _indent_line(line, stripspace=''):
return re.sub(r"^%s" % stripspace, '', line)
def _indent_line(line, stripspace=""):
return re.sub(r"^%s" % stripspace, "", line)
lines = []
stripspace = None
for line in re.split(r'\r?\n', text):
for line in re.split(r"\r?\n", text):
if in_multi_line(line):
lines.append(line)
else:

View file

@ -1,5 +1,5 @@
# mako/pyparser.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -10,46 +10,52 @@ Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler
module is used.
"""
from mako import exceptions, util, compat
from mako.compat import arg_stringname
import operator
import _ast
from mako import _ast_util
from mako import compat
from mako import exceptions
from mako import util
from mako.compat import arg_stringname
if compat.py3k:
# words that cannot be assigned to (notably
# smaller than the total keys in __builtins__)
reserved = set(['True', 'False', 'None', 'print'])
reserved = set(["True", "False", "None", "print"])
# the "id" attribute on a function node
arg_id = operator.attrgetter('arg')
arg_id = operator.attrgetter("arg")
else:
# words that cannot be assigned to (notably
# smaller than the total keys in __builtins__)
reserved = set(['True', 'False', 'None'])
reserved = set(["True", "False", "None"])
# the "id" attribute on a function node
arg_id = operator.attrgetter('id')
arg_id = operator.attrgetter("id")
import _ast
util.restore__ast(_ast)
from mako import _ast_util
def parse(code, mode='exec', **exception_kwargs):
def parse(code, mode="exec", **exception_kwargs):
"""Parse an expression into AST"""
try:
return _ast_util.parse(code, '<unknown>', mode)
return _ast_util.parse(code, "<unknown>", mode)
except Exception:
raise exceptions.SyntaxException(
"(%s) %s (%r)" % (
"(%s) %s (%r)"
% (
compat.exception_as().__class__.__name__,
compat.exception_as(),
code[0:50]
), **exception_kwargs)
code[0:50],
),
**exception_kwargs
)
class FindIdentifiers(_ast_util.NodeVisitor):
def __init__(self, listener, **exception_kwargs):
self.in_function = False
self.in_assign_targets = False
@ -119,9 +125,9 @@ class FindIdentifiers(_ast_util.NodeVisitor):
self.in_function = True
local_ident_stack = self.local_ident_stack
self.local_ident_stack = local_ident_stack.union([
arg_id(arg) for arg in self._expand_tuples(node.args.args)
])
self.local_ident_stack = local_ident_stack.union(
[arg_id(arg) for arg in self._expand_tuples(node.args.args)]
)
if islambda:
self.visit(node.body)
else:
@ -146,9 +152,11 @@ class FindIdentifiers(_ast_util.NodeVisitor):
# this is eqiuvalent to visit_AssName in
# compiler
self._add_declared(node.id)
elif node.id not in reserved and node.id \
not in self.listener.declared_identifiers and node.id \
not in self.local_ident_stack:
elif (
node.id not in reserved
and node.id not in self.listener.declared_identifiers
and node.id not in self.local_ident_stack
):
self.listener.undeclared_identifiers.add(node.id)
def visit_Import(self, node):
@ -156,24 +164,25 @@ class FindIdentifiers(_ast_util.NodeVisitor):
if name.asname is not None:
self._add_declared(name.asname)
else:
self._add_declared(name.name.split('.')[0])
self._add_declared(name.name.split(".")[0])
def visit_ImportFrom(self, node):
for name in node.names:
if name.asname is not None:
self._add_declared(name.asname)
else:
if name.name == '*':
if name.name == "*":
raise exceptions.CompileException(
"'import *' is not supported, since all identifier "
"names must be explicitly declared. Please use the "
"form 'from <modulename> import <name1>, <name2>, "
"...' instead.", **self.exception_kwargs)
"...' instead.",
**self.exception_kwargs
)
self._add_declared(name.name)
class FindTuple(_ast_util.NodeVisitor):
def __init__(self, listener, code_factory, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
@ -184,16 +193,17 @@ class FindTuple(_ast_util.NodeVisitor):
p = self.code_factory(n, **self.exception_kwargs)
self.listener.codeargs.append(p)
self.listener.args.append(ExpressionGenerator(n).value())
self.listener.declared_identifiers = \
self.listener.declared_identifiers.union(
p.declared_identifiers)
self.listener.undeclared_identifiers = \
self.listener.undeclared_identifiers.union(
p.undeclared_identifiers)
ldi = self.listener.declared_identifiers
self.listener.declared_identifiers = ldi.union(
p.declared_identifiers
)
lui = self.listener.undeclared_identifiers
self.listener.undeclared_identifiers = lui.union(
p.undeclared_identifiers
)
class ParseFunc(_ast_util.NodeVisitor):
def __init__(self, listener, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
@ -222,11 +232,11 @@ class ParseFunc(_ast_util.NodeVisitor):
self.listener.varargs = node.args.vararg
self.listener.kwargs = node.args.kwarg
class ExpressionGenerator(object):
class ExpressionGenerator(object):
def __init__(self, astnode):
self.generator = _ast_util.SourceGenerator(' ' * 4)
self.generator = _ast_util.SourceGenerator(" " * 4)
self.generator.visit(astnode)
def value(self):
return ''.join(self.generator.result)
return "".join(self.generator.result)

View file

@ -1,5 +1,5 @@
# mako/runtime.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -7,12 +7,17 @@
"""provides runtime services for templates, including Context,
Namespace, and various helper functions."""
from mako import exceptions, util, compat
from mako.compat import compat_builtins
import functools
import sys
from mako import compat
from mako import exceptions
from mako import util
from mako.compat import compat_builtins
class Context(object):
"""Provides runtime namespace, output buffer, and various
callstacks for templates.
@ -33,18 +38,19 @@ class Context(object):
# "capture" function which proxies to the
# generic "capture" function
self._data['capture'] = compat.partial(capture, self)
self._data["capture"] = functools.partial(capture, self)
# "caller" stack used by def calls with content
self.caller_stack = self._data['caller'] = CallerStack()
self.caller_stack = self._data["caller"] = CallerStack()
def _set_with_template(self, t):
self._with_template = t
illegal_names = t.reserved_names.intersection(self._data)
if illegal_names:
raise exceptions.NameConflictError(
"Reserved words passed to render(): %s" %
", ".join(illegal_names))
"Reserved words passed to render(): %s"
% ", ".join(illegal_names)
)
@property
def lookup(self):
@ -80,7 +86,6 @@ class Context(object):
"""Push a ``caller`` callable onto the callstack for
this :class:`.Context`."""
self.caller_stack.append(caller)
def pop_caller(self):
@ -177,11 +182,12 @@ class Context(object):
c = self._copy()
x = c._data
x.pop('self', None)
x.pop('parent', None)
x.pop('next', None)
x.pop("self", None)
x.pop("parent", None)
x.pop("next", None)
return c
class CallerStack(list):
def __init__(self):
self.nextcaller = None
@ -211,6 +217,7 @@ class CallerStack(list):
class Undefined(object):
"""Represents an undefined value in a template.
All template modules have a constant value
@ -218,6 +225,7 @@ class Undefined(object):
object.
"""
def __str__(self):
raise NameError("Undefined")
@ -227,9 +235,13 @@ class Undefined(object):
def __bool__(self):
return False
UNDEFINED = Undefined()
STOP_RENDERING = ""
class LoopStack(object):
"""a stack for LoopContexts that implements the context manager protocol
to automatically pop off the top of the stack on context exit
"""
@ -269,6 +281,7 @@ class LoopStack(object):
class LoopContext(object):
"""A magic loop variable.
Automatically accessible in any ``% for`` block.
@ -336,6 +349,7 @@ class LoopContext(object):
class _NSAttr(object):
def __init__(self, parent):
self.__parent = parent
def __getattr__(self, key):
ns = self.__parent
while ns:
@ -345,7 +359,9 @@ class _NSAttr(object):
ns = ns.inherits
raise AttributeError(key)
class Namespace(object):
"""Provides access to collections of rendering methods, which
can be local, from other templates, or from imported modules.
@ -361,9 +377,15 @@ class Namespace(object):
"""
def __init__(self, name, context,
callables=None, inherits=None,
populate_self=True, calling_uri=None):
def __init__(
self,
name,
context,
callables=None,
inherits=None,
populate_self=True,
calling_uri=None,
):
self.name = name
self.context = context
self.inherits = inherits
@ -461,9 +483,12 @@ class Namespace(object):
if key in self.context.namespaces:
return self.context.namespaces[key]
else:
ns = TemplateNamespace(uri, self.context._copy(),
ns = TemplateNamespace(
uri,
self.context._copy(),
templateuri=uri,
calling_uri=self._templateuri)
calling_uri=self._templateuri,
)
self.context.namespaces[key] = ns
return ns
@ -506,7 +531,7 @@ class Namespace(object):
def _populate(self, d, l):
for ident in l:
if ident == '*':
if ident == "*":
for (k, v) in self._get_star():
d[k] = v
else:
@ -524,17 +549,27 @@ class Namespace(object):
val = getattr(self.inherits, key)
else:
raise AttributeError(
"Namespace '%s' has no member '%s'" %
(self.name, key))
"Namespace '%s' has no member '%s'" % (self.name, key)
)
setattr(self, key, val)
return val
class TemplateNamespace(Namespace):
"""A :class:`.Namespace` specific to a :class:`.Template` instance."""
def __init__(self, name, context, template=None, templateuri=None,
callables=None, inherits=None,
populate_self=True, calling_uri=None):
def __init__(
self,
name,
context,
template=None,
templateuri=None,
callables=None,
inherits=None,
populate_self=True,
calling_uri=None,
):
self.name = name
self.context = context
self.inherits = inherits
@ -542,8 +577,7 @@ class TemplateNamespace(Namespace):
self.callables = dict([(c.__name__, c) for c in callables])
if templateuri is not None:
self.template = _lookup_template(context, templateuri,
calling_uri)
self.template = _lookup_template(context, templateuri, calling_uri)
self._templateuri = self.template.module._template_uri
elif template is not None:
self.template = template
@ -552,9 +586,9 @@ class TemplateNamespace(Namespace):
raise TypeError("'template' argument is required.")
if populate_self:
lclcallable, lclcontext = \
_populate_self_namespace(context, self.template,
self_ns=self)
lclcallable, lclcontext = _populate_self_namespace(
context, self.template, self_ns=self
)
@property
def module(self):
@ -589,9 +623,11 @@ class TemplateNamespace(Namespace):
if self.callables:
for key in self.callables:
yield (key, self.callables[key])
def get(key):
callable_ = self.template._get_def_callable(key)
return compat.partial(callable_, self.context)
return functools.partial(callable_, self.context)
for k in self.template.module._exports:
yield (k, get(k))
@ -600,23 +636,32 @@ class TemplateNamespace(Namespace):
val = self.callables[key]
elif self.template.has_def(key):
callable_ = self.template._get_def_callable(key)
val = compat.partial(callable_, self.context)
val = functools.partial(callable_, self.context)
elif self.inherits:
val = getattr(self.inherits, key)
else:
raise AttributeError(
"Namespace '%s' has no member '%s'" %
(self.name, key))
"Namespace '%s' has no member '%s'" % (self.name, key)
)
setattr(self, key, val)
return val
class ModuleNamespace(Namespace):
"""A :class:`.Namespace` specific to a Python module instance."""
def __init__(self, name, context, module,
callables=None, inherits=None,
populate_self=True, calling_uri=None):
def __init__(
self,
name,
context,
module,
callables=None,
inherits=None,
populate_self=True,
calling_uri=None,
):
self.name = name
self.context = context
self.inherits = inherits
@ -624,7 +669,7 @@ class ModuleNamespace(Namespace):
self.callables = dict([(c.__name__, c) for c in callables])
mod = __import__(module)
for token in module.split('.')[1:]:
for token in module.split(".")[1:]:
mod = getattr(mod, token)
self.module = mod
@ -640,27 +685,27 @@ class ModuleNamespace(Namespace):
for key in self.callables:
yield (key, self.callables[key])
for key in dir(self.module):
if key[0] != '_':
if key[0] != "_":
callable_ = getattr(self.module, key)
if compat.callable(callable_):
yield key, compat.partial(callable_, self.context)
if callable(callable_):
yield key, functools.partial(callable_, self.context)
def __getattr__(self, key):
if key in self.callables:
val = self.callables[key]
elif hasattr(self.module, key):
callable_ = getattr(self.module, key)
val = compat.partial(callable_, self.context)
val = functools.partial(callable_, self.context)
elif self.inherits:
val = getattr(self.inherits, key)
else:
raise AttributeError(
"Namespace '%s' has no member '%s'" %
(self.name, key))
"Namespace '%s' has no member '%s'" % (self.name, key)
)
setattr(self, key, val)
return val
def supports_caller(func):
"""Apply a caller_stack compatibility decorator to a plain
Python function.
@ -675,8 +720,10 @@ def supports_caller(func):
return func(context, *args, **kwargs)
finally:
context.caller_stack._pop_frame()
return wrap_stackframe
def capture(context, callable_, *args, **kwargs):
"""Execute the given template def, capturing the output into
a buffer.
@ -685,7 +732,7 @@ def capture(context, callable_, *args, **kwargs):
"""
if not compat.callable(callable_):
if not callable(callable_):
raise exceptions.RuntimeException(
"capture() function expects a callable as "
"its argument (i.e. capture(func, *args, **kwargs))"
@ -697,37 +744,56 @@ def capture(context, callable_, *args, **kwargs):
buf = context._pop_buffer()
return buf.getvalue()
def _decorate_toplevel(fn):
def decorate_render(render_fn):
def go(context, *args, **kw):
def y(*args, **kw):
return render_fn(context, *args, **kw)
try:
y.__name__ = render_fn.__name__[7:]
except TypeError:
# < Python 2.4
pass
return fn(y)(context, *args, **kw)
return go
return decorate_render
def _decorate_inline(context, fn):
def decorate_render(render_fn):
dec = fn(render_fn)
def go(*args, **kw):
return dec(context, *args, **kw)
return go
return decorate_render
def _include_file(context, uri, calling_uri, **kwargs):
"""locate the template from the given uri and include it in
the current output."""
template = _lookup_template(context, uri, calling_uri)
(callable_, ctx) = _populate_self_namespace(
context._clean_inheritance_tokens(),
template)
callable_(ctx, **_kwargs_for_include(callable_, context._data, **kwargs))
context._clean_inheritance_tokens(), template
)
kwargs = _kwargs_for_include(callable_, context._data, **kwargs)
if template.include_error_handler:
try:
callable_(ctx, **kwargs)
except Exception:
result = template.include_error_handler(ctx, compat.exception_as())
if not result:
compat.reraise(*sys.exc_info())
else:
callable_(ctx, **kwargs)
def _inherit_from(context, uri, calling_uri):
"""called by the _inherit method in template modules to set
@ -737,51 +803,60 @@ def _inherit_from(context, uri, calling_uri):
if uri is None:
return None
template = _lookup_template(context, uri, calling_uri)
self_ns = context['self']
self_ns = context["self"]
ih = self_ns
while ih.inherits is not None:
ih = ih.inherits
lclcontext = context._locals({'next': ih})
ih.inherits = TemplateNamespace("self:%s" % template.uri,
lclcontext = context._locals({"next": ih})
ih.inherits = TemplateNamespace(
"self:%s" % template.uri,
lclcontext,
template=template,
populate_self=False)
context._data['parent'] = lclcontext._data['local'] = ih.inherits
callable_ = getattr(template.module, '_mako_inherit', None)
populate_self=False,
)
context._data["parent"] = lclcontext._data["local"] = ih.inherits
callable_ = getattr(template.module, "_mako_inherit", None)
if callable_ is not None:
ret = callable_(template, lclcontext)
if ret:
return ret
gen_ns = getattr(template.module, '_mako_generate_namespaces', None)
gen_ns = getattr(template.module, "_mako_generate_namespaces", None)
if gen_ns is not None:
gen_ns(context)
return (template.callable_, lclcontext)
def _lookup_template(context, uri, relativeto):
lookup = context._with_template.lookup
if lookup is None:
raise exceptions.TemplateLookupException(
"Template '%s' has no TemplateLookup associated" %
context._with_template.uri)
"Template '%s' has no TemplateLookup associated"
% context._with_template.uri
)
uri = lookup.adjust_uri(uri, relativeto)
try:
return lookup.get_template(uri)
except exceptions.TopLevelLookupException:
raise exceptions.TemplateLookupException(str(compat.exception_as()))
def _populate_self_namespace(context, template, self_ns=None):
if self_ns is None:
self_ns = TemplateNamespace('self:%s' % template.uri,
context, template=template,
populate_self=False)
context._data['self'] = context._data['local'] = self_ns
if hasattr(template.module, '_mako_inherit'):
self_ns = TemplateNamespace(
"self:%s" % template.uri,
context,
template=template,
populate_self=False,
)
context._data["self"] = context._data["local"] = self_ns
if hasattr(template.module, "_mako_inherit"):
ret = template.module._mako_inherit(template, context)
if ret:
return ret
return (template.callable_, context)
def _render(template, callable_, args, data, as_unicode=False):
"""create a Context and return the string
output of the given template and template callable."""
@ -794,17 +869,24 @@ def _render(template, callable_, args, data, as_unicode=False):
buf = util.FastEncodingBuffer(
as_unicode=as_unicode,
encoding=template.output_encoding,
errors=template.encoding_errors)
errors=template.encoding_errors,
)
context = Context(buf, **data)
context._outputting_as_unicode = as_unicode
context._set_with_template(template)
_render_context(template, callable_, context, *args,
**_kwargs_for_callable(callable_, data))
_render_context(
template,
callable_,
context,
*args,
**_kwargs_for_callable(callable_, data)
)
return context._pop_buffer().getvalue()
def _kwargs_for_callable(callable_, data):
argspec = compat.inspect_func_args(callable_)
argspec = compat.inspect_getargspec(callable_)
# for normal pages, **pageargs is usually present
if argspec[2]:
return data
@ -813,20 +895,23 @@ def _kwargs_for_callable(callable_, data):
namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
kwargs = {}
for arg in namedargs:
if arg != 'context' and arg in data and arg not in kwargs:
if arg != "context" and arg in data and arg not in kwargs:
kwargs[arg] = data[arg]
return kwargs
def _kwargs_for_include(callable_, data, **kwargs):
argspec = compat.inspect_func_args(callable_)
argspec = compat.inspect_getargspec(callable_)
namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None]
for arg in namedargs:
if arg != 'context' and arg in data and arg not in kwargs:
if arg != "context" and arg in data and arg not in kwargs:
kwargs[arg] = data[arg]
return kwargs
def _render_context(tmpl, callable_, context, *args, **kwargs):
import mako.template as template
# create polymorphic 'self' namespace for this
# template with possibly updated context
if not isinstance(tmpl, template.DefTemplate):
@ -838,6 +923,7 @@ def _render_context(tmpl, callable_, context, *args, **kwargs):
(inherit, lclcontext) = _populate_self_namespace(context, tmpl.parent)
_exec_template(callable_, context, args=args, kwargs=kwargs)
def _exec_template(callable_, context, args=None, kwargs=None):
"""execute a rendering callable given the callable, a
Context, and optional explicit arguments
@ -847,8 +933,9 @@ def _exec_template(callable_, context, args=None, kwargs=None):
be interpreted here.
"""
template = context._with_template
if template is not None and \
(template.format_exceptions or template.error_handler):
if template is not None and (
template.format_exceptions or template.error_handler
):
try:
callable_(context, *args, **kwargs)
except Exception:
@ -859,6 +946,7 @@ def _exec_template(callable_, context, args=None, kwargs=None):
else:
callable_(context, *args, **kwargs)
def _render_error(template, context, error):
if template.error_handler:
result = template.error_handler(context, error)
@ -868,11 +956,15 @@ def _render_error(template, context, error):
error_template = exceptions.html_error_template()
if context._outputting_as_unicode:
context._buffer_stack[:] = [
util.FastEncodingBuffer(as_unicode=True)]
util.FastEncodingBuffer(as_unicode=True)
]
else:
context._buffer_stack[:] = [util.FastEncodingBuffer(
context._buffer_stack[:] = [
util.FastEncodingBuffer(
error_template.output_encoding,
error_template.encoding_errors)]
error_template.encoding_errors,
)
]
context._set_with_template(error_template)
error_template.render_context(context, error=error)

View file

@ -1,5 +1,5 @@
# mako/template.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@ -7,8 +7,7 @@
"""Provides the Template class, a facade for parsing, generating and executing
template strings, as well as template runtime operations."""
from mako.lexer import Lexer
from mako import runtime, util, exceptions, codegen, cache, compat
import json
import os
import re
import shutil
@ -18,9 +17,18 @@ import tempfile
import types
import weakref
from mako import cache
from mako import codegen
from mako import compat
from mako import exceptions
from mako import runtime
from mako import util
from mako.lexer import Lexer
class Template(object):
"""Represents a compiled template.
r"""Represents a compiled template.
:class:`.Template` includes a reference to the original
template source (via the :attr:`.source` attribute)
@ -108,6 +116,11 @@ class Template(object):
completes. Is used to provide custom error-rendering
functions.
.. seealso::
:paramref:`.Template.include_error_handler` - include-specific
error handler function
:param format_exceptions: if ``True``, exceptions which occur during
the render phase of this template will be caught and
formatted into an HTML error page, which then becomes the
@ -128,6 +141,16 @@ class Template(object):
import will not appear as the first executed statement in the generated
code and will therefore not have the desired effect.
:param include_error_handler: An error handler that runs when this template
is included within another one via the ``<%include>`` tag, and raises an
error. Compare to the :paramref:`.Template.error_handler` option.
.. versionadded:: 1.0.6
.. seealso::
:paramref:`.Template.error_handler` - top-level error handler function
:param input_encoding: Encoding of the template's source code. Can
be used in lieu of the coding comment. See
:ref:`usage_unicode` as well as :ref:`unicode_toplevel` for
@ -214,7 +237,8 @@ class Template(object):
lexer_cls = Lexer
def __init__(self,
def __init__(
self,
text=None,
filename=None,
uri=None,
@ -222,10 +246,10 @@ class Template(object):
error_handler=None,
lookup=None,
output_encoding=None,
encoding_errors='strict',
encoding_errors="strict",
module_directory=None,
cache_args=None,
cache_impl='beaker',
cache_impl="beaker",
cache_enabled=True,
cache_type=None,
cache_dir=None,
@ -242,12 +266,14 @@ class Template(object):
future_imports=None,
enable_loop=True,
preprocessor=None,
lexer_cls=None):
lexer_cls=None,
include_error_handler=None,
):
if uri:
self.module_id = re.sub(r'\W', "_", uri)
self.module_id = re.sub(r"\W", "_", uri)
self.uri = uri
elif filename:
self.module_id = re.sub(r'\W', "_", filename)
self.module_id = re.sub(r"\W", "_", filename)
drive, path = os.path.splitdrive(filename)
path = os.path.normpath(path).replace(os.path.sep, "/")
self.uri = path
@ -261,9 +287,10 @@ class Template(object):
u_norm = os.path.normpath(u_norm)
if u_norm.startswith(".."):
raise exceptions.TemplateLookupException(
"Template uri \"%s\" is invalid - "
'Template uri "%s" is invalid - '
"it cannot be relative outside "
"of the root path." % self.uri)
"of the root path." % self.uri
)
self.input_encoding = input_encoding
self.output_encoding = output_encoding
@ -276,17 +303,18 @@ class Template(object):
if compat.py3k and disable_unicode:
raise exceptions.UnsupportedError(
"Mako for Python 3 does not "
"support disabling Unicode")
"Mako for Python 3 does not " "support disabling Unicode"
)
elif output_encoding and disable_unicode:
raise exceptions.UnsupportedError(
"output_encoding must be set to "
"None when disable_unicode is used.")
"None when disable_unicode is used."
)
if default_filters is None:
if compat.py3k or self.disable_unicode:
self.default_filters = ['str']
self.default_filters = ["str"]
else:
self.default_filters = ['unicode']
self.default_filters = ["unicode"]
else:
self.default_filters = default_filters
self.buffer_filters = buffer_filters
@ -303,7 +331,7 @@ class Template(object):
(code, module) = _compile_text(self, text, filename)
self._code = code
self._source = text
ModuleInfo(module, None, self, filename, code, text)
ModuleInfo(module, None, self, filename, code, text, uri)
elif filename is not None:
# if template filename and a module directory, load
# a filesystem-based module file, generating if needed
@ -312,8 +340,7 @@ class Template(object):
elif module_directory is not None:
path = os.path.abspath(
os.path.join(
os.path.normpath(module_directory),
u_norm + ".py"
os.path.normpath(module_directory), u_norm + ".py"
)
)
else:
@ -321,33 +348,44 @@ class Template(object):
module = self._compile_from_file(path, filename)
else:
raise exceptions.RuntimeException(
"Template requires text or filename")
"Template requires text or filename"
)
self.module = module
self.filename = filename
self.callable_ = self.module.render_body
self.format_exceptions = format_exceptions
self.error_handler = error_handler
self.include_error_handler = include_error_handler
self.lookup = lookup
self.module_directory = module_directory
self._setup_cache_args(
cache_impl, cache_enabled, cache_args,
cache_type, cache_dir, cache_url
cache_impl,
cache_enabled,
cache_args,
cache_type,
cache_dir,
cache_url,
)
@util.memoized_property
def reserved_names(self):
if self.enable_loop:
return codegen.RESERVED_NAMES
else:
return codegen.RESERVED_NAMES.difference(['loop'])
return codegen.RESERVED_NAMES.difference(["loop"])
def _setup_cache_args(self,
cache_impl, cache_enabled, cache_args,
cache_type, cache_dir, cache_url):
def _setup_cache_args(
self,
cache_impl,
cache_enabled,
cache_args,
cache_type,
cache_dir,
cache_url,
):
self.cache_impl = cache_impl
self.cache_enabled = cache_enabled
if cache_args:
@ -357,49 +395,42 @@ class Template(object):
# transfer deprecated cache_* args
if cache_type:
self.cache_args['type'] = cache_type
self.cache_args["type"] = cache_type
if cache_dir:
self.cache_args['dir'] = cache_dir
self.cache_args["dir"] = cache_dir
if cache_url:
self.cache_args['url'] = cache_url
self.cache_args["url"] = cache_url
def _compile_from_file(self, path, filename):
if path is not None:
util.verify_directory(os.path.dirname(path))
filemtime = os.stat(filename)[stat.ST_MTIME]
if not os.path.exists(path) or \
os.stat(path)[stat.ST_MTIME] < filemtime:
if (
not os.path.exists(path)
or os.stat(path)[stat.ST_MTIME] < filemtime
):
data = util.read_file(filename)
_compile_module_file(
self,
data,
filename,
path,
self.module_writer)
self, data, filename, path, self.module_writer
)
module = compat.load_module(self.module_id, path)
del sys.modules[self.module_id]
if module._magic_number != codegen.MAGIC_NUMBER:
data = util.read_file(filename)
_compile_module_file(
self,
data,
filename,
path,
self.module_writer)
self, data, filename, path, self.module_writer
)
module = compat.load_module(self.module_id, path)
del sys.modules[self.module_id]
ModuleInfo(module, path, self, filename, None, None)
ModuleInfo(module, path, self, filename, None, None, None)
else:
# template filename and no module directory, compile code
# in memory
data = util.read_file(filename)
code, module = _compile_text(
self,
data,
filename)
code, module = _compile_text(self, data, filename)
self._source = None
self._code = code
ModuleInfo(module, None, self, filename, code, None)
ModuleInfo(module, None, self, filename, code, None, None)
return module
@property
@ -420,13 +451,15 @@ class Template(object):
@property
def cache_dir(self):
return self.cache_args['dir']
return self.cache_args["dir"]
@property
def cache_url(self):
return self.cache_args['url']
return self.cache_args["url"]
@property
def cache_type(self):
return self.cache_args['type']
return self.cache_args["type"]
def render(self, *args, **data):
"""Render the output of this template as a string.
@ -445,11 +478,9 @@ class Template(object):
def render_unicode(self, *args, **data):
"""Render the output of this template as a unicode object."""
return runtime._render(self,
self.callable_,
args,
data,
as_unicode=True)
return runtime._render(
self, self.callable_, args, data, as_unicode=True
)
def render_context(self, context, *args, **kwargs):
"""Render this :class:`.Template` with the given context.
@ -457,13 +488,9 @@ class Template(object):
The data is written to the context's buffer.
"""
if getattr(context, '_with_template', None) is None:
if getattr(context, "_with_template", None) is None:
context._set_with_template(self)
runtime._render_context(self,
self.callable_,
context,
*args,
**kwargs)
runtime._render_context(self, self.callable_, context, *args, **kwargs)
def has_def(self, name):
return hasattr(self.module, "render_%s" % name)
@ -473,6 +500,14 @@ class Template(object):
return DefTemplate(self, getattr(self.module, "render_%s" % name))
def list_defs(self):
"""return a list of defs in the template.
.. versionadded:: 1.0.4
"""
return [i[7:] for i in dir(self.module) if i[:7] == "render_"]
def _get_def_callable(self, name):
return getattr(self.module, "render_%s" % name)
@ -480,7 +515,9 @@ class Template(object):
def last_modified(self):
return self.module._modified_time
class ModuleTemplate(Template):
"""A Template which is constructed given an existing Python module.
e.g.::
@ -497,27 +534,30 @@ class ModuleTemplate(Template):
"""
def __init__(self, module,
def __init__(
self,
module,
module_filename=None,
template=None,
template_filename=None,
module_source=None,
template_source=None,
output_encoding=None,
encoding_errors='strict',
encoding_errors="strict",
disable_unicode=False,
bytestring_passthrough=False,
format_exceptions=False,
error_handler=None,
lookup=None,
cache_args=None,
cache_impl='beaker',
cache_impl="beaker",
cache_enabled=True,
cache_type=None,
cache_dir=None,
cache_url=None,
include_error_handler=None,
):
self.module_id = re.sub(r'\W', "_", module._template_uri)
self.module_id = re.sub(r"\W", "_", module._template_uri)
self.uri = module._template_uri
self.input_encoding = module._source_encoding
self.output_encoding = output_encoding
@ -528,32 +568,43 @@ class ModuleTemplate(Template):
if compat.py3k and disable_unicode:
raise exceptions.UnsupportedError(
"Mako for Python 3 does not "
"support disabling Unicode")
"Mako for Python 3 does not " "support disabling Unicode"
)
elif output_encoding and disable_unicode:
raise exceptions.UnsupportedError(
"output_encoding must be set to "
"None when disable_unicode is used.")
"None when disable_unicode is used."
)
self.module = module
self.filename = template_filename
ModuleInfo(module,
ModuleInfo(
module,
module_filename,
self,
template_filename,
module_source,
template_source)
template_source,
module._template_uri,
)
self.callable_ = self.module.render_body
self.format_exceptions = format_exceptions
self.error_handler = error_handler
self.include_error_handler = include_error_handler
self.lookup = lookup
self._setup_cache_args(
cache_impl, cache_enabled, cache_args,
cache_type, cache_dir, cache_url
cache_impl,
cache_enabled,
cache_args,
cache_type,
cache_dir,
cache_url,
)
class DefTemplate(Template):
"""A :class:`.Template` which represents a callable def in a parent
template."""
@ -565,6 +616,7 @@ class DefTemplate(Template):
self.encoding_errors = parent.encoding_errors
self.format_exceptions = parent.format_exceptions
self.error_handler = parent.error_handler
self.include_error_handler = parent.include_error_handler
self.enable_loop = parent.enable_loop
self.lookup = parent.lookup
self.bytestring_passthrough = parent.bytestring_passthrough
@ -572,26 +624,33 @@ class DefTemplate(Template):
def get_def(self, name):
return self.parent.get_def(name)
class ModuleInfo(object):
"""Stores information about a module currently loaded into
memory, provides reverse lookups of template source, module
source code based on a module's identifier.
"""
_modules = weakref.WeakValueDictionary()
def __init__(self,
def __init__(
self,
module,
module_filename,
template,
template_filename,
module_source,
template_source):
template_source,
template_uri,
):
self.module = module
self.module_filename = module_filename
self.template_filename = template_filename
self.module_source = module_source
self.template_source = template_source
self.template_uri = template_uri
self._modules[module.__name__] = template._mmarker = self
if module_filename:
self._modules[module_filename] = self
@ -599,14 +658,15 @@ class ModuleInfo(object):
@classmethod
def get_module_source_metadata(cls, module_source, full_line_map=False):
source_map = re.search(
r"__M_BEGIN_METADATA(.+?)__M_END_METADATA",
module_source, re.S).group(1)
source_map = compat.json.loads(source_map)
source_map['line_map'] = dict((int(k), int(v))
for k, v in source_map['line_map'].items())
r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", module_source, re.S
).group(1)
source_map = json.loads(source_map)
source_map["line_map"] = dict(
(int(k), int(v)) for k, v in source_map["line_map"].items()
)
if full_line_map:
f_line_map = source_map['full_line_map'] = []
line_map = source_map['line_map']
f_line_map = source_map["full_line_map"] = []
line_map = source_map["line_map"]
curr_templ_line = 1
for mod_line in range(1, max(line_map)):
@ -625,10 +685,12 @@ class ModuleInfo(object):
@property
def source(self):
if self.template_source is not None:
if self.module._source_encoding and \
not isinstance(self.template_source, compat.text_type):
if self.module._source_encoding and not isinstance(
self.template_source, compat.text_type
):
return self.template_source.decode(
self.module._source_encoding)
self.module._source_encoding
)
else:
return self.template_source
else:
@ -638,14 +700,18 @@ class ModuleInfo(object):
else:
return data
def _compile(template, text, filename, generate_magic_comment):
lexer = template.lexer_cls(text,
lexer = template.lexer_cls(
text,
filename,
disable_unicode=template.disable_unicode,
input_encoding=template.input_encoding,
preprocessor=template.preprocessor)
preprocessor=template.preprocessor,
)
node = lexer.parse()
source = codegen.compile(node,
source = codegen.compile(
node,
template.uri,
filename,
default_filters=template.default_filters,
@ -657,30 +723,38 @@ def _compile(template, text, filename, generate_magic_comment):
disable_unicode=template.disable_unicode,
strict_undefined=template.strict_undefined,
enable_loop=template.enable_loop,
reserved_names=template.reserved_names)
reserved_names=template.reserved_names,
)
return source, lexer
def _compile_text(template, text, filename):
identifier = template.module_id
source, lexer = _compile(template, text, filename,
generate_magic_comment=template.disable_unicode)
source, lexer = _compile(
template,
text,
filename,
generate_magic_comment=template.disable_unicode,
)
cid = identifier
if not compat.py3k and isinstance(cid, compat.text_type):
cid = cid.encode()
module = types.ModuleType(cid)
code = compile(source, cid, 'exec')
code = compile(source, cid, "exec")
# this exec() works for 2.4->3.3.
exec(code, module.__dict__, module.__dict__)
return (source, module)
def _compile_module_file(template, text, filename, outputpath, module_writer):
source, lexer = _compile(template, text, filename,
generate_magic_comment=True)
source, lexer = _compile(
template, text, filename, generate_magic_comment=True
)
if isinstance(source, compat.text_type):
source = source.encode(lexer.encoding or 'ascii')
source = source.encode(lexer.encoding or "ascii")
if module_writer:
module_writer(source, outputpath)
@ -694,12 +768,13 @@ def _compile_module_file(template, text, filename, outputpath, module_writer):
os.close(dest)
shutil.move(name, outputpath)
def _get_module_info_from_callable(callable_):
if compat.py3k:
return _get_module_info(callable_.__globals__['__name__'])
return _get_module_info(callable_.__globals__["__name__"])
else:
return _get_module_info(callable_.func_globals['__name__'])
return _get_module_info(callable_.func_globals["__name__"])
def _get_module_info(filename):
return ModuleInfo._modules[filename]

View file

@ -1,15 +1,18 @@
# mako/util.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import re
import collections
import codecs
import os
from mako import compat
import collections
import operator
import os
import re
import timeit
from mako import compat
def update_wrapper(decorated, fn):
decorated.__wrapped__ = fn
@ -27,16 +30,16 @@ class PluginLoader(object):
return self.impls[name]()
else:
import pkg_resources
for impl in pkg_resources.iter_entry_points(
self.group,
name):
for impl in pkg_resources.iter_entry_points(self.group, name):
self.impls[name] = impl.load
return impl.load()
else:
from mako import exceptions
raise exceptions.RuntimeException(
"Can't load plugin %s %s" %
(self.group, name))
"Can't load plugin %s %s" % (self.group, name)
)
def register(self, name, modulepath, objname):
def load():
@ -44,21 +47,24 @@ class PluginLoader(object):
for token in modulepath.split(".")[1:]:
mod = getattr(mod, token)
return getattr(mod, objname)
self.impls[name] = load
def verify_directory(dir):
def verify_directory(dir_):
"""create and/or verify a filesystem directory."""
tries = 0
while not os.path.exists(dir):
while not os.path.exists(dir_):
try:
tries += 1
os.makedirs(dir, compat.octal("0775"))
os.makedirs(dir_, compat.octal("0775"))
except:
if tries > 5:
raise
def to_list(x, default=None):
if x is None:
return default
@ -69,7 +75,9 @@ def to_list(x, default=None):
class memoized_property(object):
"""A read-only @property that is only evaluated once."""
def __init__(self, fget, doc=None):
self.fget = fget
self.__doc__ = doc or fget.__doc__
@ -81,7 +89,9 @@ class memoized_property(object):
obj.__dict__[self.__name__] = result = self.fget(obj)
return result
class memoized_instancemethod(object):
"""Decorate a method memoize its return value.
Best applied to no-arg methods: memoization is not sensitive to
@ -89,6 +99,7 @@ class memoized_instancemethod(object):
called with different arguments.
"""
def __init__(self, fget, doc=None):
self.fget = fget
self.__doc__ = doc or fget.__doc__
@ -97,19 +108,27 @@ class memoized_instancemethod(object):
def __get__(self, obj, cls):
if obj is None:
return self
def oneshot(*args, **kw):
result = self.fget(obj, *args, **kw)
memo = lambda *a, **kw: result
def memo(*a, **kw):
return result
memo.__name__ = self.__name__
memo.__doc__ = self.__doc__
obj.__dict__[self.__name__] = memo
return result
oneshot.__name__ = self.__name__
oneshot.__doc__ = self.__doc__
return oneshot
class SetLikeDict(dict):
"""a dictionary that has some setlike methods on it"""
def union(self, other):
"""produce a 'union' of this dict and another (at the key level).
@ -118,17 +137,19 @@ class SetLikeDict(dict):
x.update(other)
return x
class FastEncodingBuffer(object):
"""a very rudimentary buffer that is faster than StringIO,
but doesn't crash on unicode data like cStringIO."""
def __init__(self, encoding=None, errors='strict', as_unicode=False):
def __init__(self, encoding=None, errors="strict", as_unicode=False):
self.data = collections.deque()
self.encoding = encoding
if as_unicode:
self.delim = compat.u('')
self.delim = compat.u("")
else:
self.delim = ''
self.delim = ""
self.as_unicode = as_unicode
self.errors = errors
self.write = self.data.append
@ -139,12 +160,15 @@ class FastEncodingBuffer(object):
def getvalue(self):
if self.encoding:
return self.delim.join(self.data).encode(self.encoding,
self.errors)
return self.delim.join(self.data).encode(
self.encoding, self.errors
)
else:
return self.delim.join(self.data)
class LRUCache(dict):
"""A dictionary-like object that stores a limited number of items,
discarding lesser used items periodically.
@ -157,17 +181,18 @@ class LRUCache(dict):
def __init__(self, key, value):
self.key = key
self.value = value
self.timestamp = compat.time_func()
self.timestamp = timeit.default_timer()
def __repr__(self):
return repr(self.value)
def __init__(self, capacity, threshold=.5):
def __init__(self, capacity, threshold=0.5):
self.capacity = capacity
self.threshold = threshold
def __getitem__(self, key):
item = dict.__getitem__(self, key)
item.timestamp = compat.time_func()
item.timestamp = timeit.default_timer()
return item.value
def values(self):
@ -191,9 +216,12 @@ class LRUCache(dict):
def _manage_size(self):
while len(self) > self.capacity + self.capacity * self.threshold:
bytime = sorted(dict.values(self),
key=operator.attrgetter('timestamp'), reverse=True)
for item in bytime[self.capacity:]:
bytime = sorted(
dict.values(self),
key=operator.attrgetter("timestamp"),
reverse=True,
)
for item in bytime[self.capacity :]:
try:
del self[item.key]
except KeyError:
@ -201,10 +229,12 @@ class LRUCache(dict):
# broke in on us. loop around and try again
break
# Regexp to match python magic encoding line
_PYTHON_MAGIC_COMMENT_re = re.compile(
r'[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)',
re.VERBOSE)
r"[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)", re.VERBOSE
)
def parse_encoding(fp):
"""Deduce the encoding of a Python source file (binary mode) from magic
@ -222,13 +252,14 @@ def parse_encoding(fp):
line1 = fp.readline()
has_bom = line1.startswith(codecs.BOM_UTF8)
if has_bom:
line1 = line1[len(codecs.BOM_UTF8):]
line1 = line1[len(codecs.BOM_UTF8) :]
m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode('ascii', 'ignore'))
m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode("ascii", "ignore"))
if not m:
try:
import parser
parser.suite(line1.decode('ascii', 'ignore'))
parser.suite(line1.decode("ascii", "ignore"))
except (ImportError, SyntaxError):
# Either it's a real syntax error, in which case the source
# is not valid python source, or line2 is a continuation of
@ -238,13 +269,16 @@ def parse_encoding(fp):
else:
line2 = fp.readline()
m = _PYTHON_MAGIC_COMMENT_re.match(
line2.decode('ascii', 'ignore'))
line2.decode("ascii", "ignore")
)
if has_bom:
if m:
raise SyntaxError("python refuses to compile code with both a UTF8" \
" byte-order-mark and a magic encoding comment")
return 'utf_8'
raise SyntaxError(
"python refuses to compile code with both a UTF8"
" byte-order-mark and a magic encoding comment"
)
return "utf_8"
elif m:
return m.group(1)
else:
@ -252,6 +286,7 @@ def parse_encoding(fp):
finally:
fp.seek(pos)
def sorted_dict_repr(d):
"""repr() a dictionary with the keys in order.
@ -262,14 +297,16 @@ def sorted_dict_repr(d):
keys.sort()
return "{" + ", ".join(["%r: %r" % (k, d[k]) for k in keys]) + "}"
def restore__ast(_ast):
"""Attempt to restore the required classes to the _ast module if it
appears to be missing them
"""
if hasattr(_ast, 'AST'):
if hasattr(_ast, "AST"):
return
_ast.PyCF_ONLY_AST = 2 << 9
m = compile("""\
m = compile(
"""\
def foo(): pass
class Bar(object): pass
if False: pass
@ -282,13 +319,17 @@ baz = 'mako'
baz and 'foo' or 'bar'
(mako is baz == baz) is not baz != mako
mako > baz < mako >= baz <= mako
mako in baz not in mako""", '<unknown>', 'exec', _ast.PyCF_ONLY_AST)
mako in baz not in mako""",
"<unknown>",
"exec",
_ast.PyCF_ONLY_AST,
)
_ast.Module = type(m)
for cls in _ast.Module.__mro__:
if cls.__name__ == 'mod':
if cls.__name__ == "mod":
_ast.mod = cls
elif cls.__name__ == 'AST':
elif cls.__name__ == "AST":
_ast.AST = cls
_ast.FunctionDef = type(m.body[0])
@ -338,8 +379,7 @@ mako in baz not in mako""", '<unknown>', 'exec', _ast.PyCF_ONLY_AST)
_ast.NotIn = type(m.body[12].value.ops[1])
def read_file(path, mode='rb'):
def read_file(path, mode="rb"):
fp = open(path, mode)
try:
data = fp.read()
@ -347,6 +387,7 @@ def read_file(path, mode='rb'):
finally:
fp.close()
def read_python_file(path):
fp = open(path, "rb")
try:
@ -357,4 +398,3 @@ def read_python_file(path):
return data
finally:
fp.close()