diff options
Diffstat (limited to 'mako/ext')
-rw-r--r-- | mako/ext/autohandler.py | 38 | ||||
-rw-r--r-- | mako/ext/babelplugin.py | 28 | ||||
-rw-r--r-- | mako/ext/beaker_cache.py | 32 | ||||
-rw-r--r-- | mako/ext/extract.py | 47 | ||||
-rw-r--r-- | mako/ext/linguaplugin.py | 52 | ||||
-rw-r--r-- | mako/ext/preprocessors.py | 2 | ||||
-rw-r--r-- | mako/ext/pygmentplugin.py | 166 | ||||
-rw-r--r-- | mako/ext/turbogears.py | 17 |
8 files changed, 222 insertions, 160 deletions
diff --git a/mako/ext/autohandler.py b/mako/ext/autohandler.py index 9d1c911..f262b13 100644 --- a/mako/ext/autohandler.py +++ b/mako/ext/autohandler.py @@ -8,29 +8,29 @@ requires that the TemplateLookup class is used with templates. -usage: +usage:: -<%! - from mako.ext.autohandler import autohandler -%> -<%inherit file="${autohandler(template, context)}"/> + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context)}"/> -or with custom autohandler filename: +or with custom autohandler filename:: -<%! - from mako.ext.autohandler import autohandler -%> -<%inherit file="${autohandler(template, context, name='somefilename')}"/> + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context, name='somefilename')}"/> """ -import posixpath import os +import posixpath import re -def autohandler(template, context, name='autohandler'): +def autohandler(template, context, name="autohandler"): lookup = context.lookup _template_uri = template.module._template_uri if not lookup.filesystem_checks: @@ -39,13 +39,14 @@ def autohandler(template, context, name='autohandler'): except KeyError: pass - tokens = re.findall(r'([^/]+)', posixpath.dirname(_template_uri)) + [name] + tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name] while len(tokens): - path = '/' + '/'.join(tokens) + path = "/" + "/".join(tokens) if path != _template_uri and _file_exists(lookup, path): if not lookup.filesystem_checks: return lookup._uri_cache.setdefault( - (autohandler, _template_uri, name), path) + (autohandler, _template_uri, name), path + ) else: return path if len(tokens) == 1: @@ -54,15 +55,16 @@ def autohandler(template, context, name='autohandler'): if not lookup.filesystem_checks: return lookup._uri_cache.setdefault( - (autohandler, _template_uri, name), None) + (autohandler, _template_uri, name), None + ) else: return None def _file_exists(lookup, path): - psub = re.sub(r'^/', '', path) + psub = re.sub(r"^/", "", path) for d in lookup.directories: - if os.path.exists(d + '/' + psub): + if os.path.exists(d + "/" + psub): return True else: return False diff --git a/mako/ext/babelplugin.py b/mako/ext/babelplugin.py index 0b5e84f..e7e93f5 100644 --- a/mako/ext/babelplugin.py +++ b/mako/ext/babelplugin.py @@ -6,18 +6,19 @@ """gettext message extraction via Babel: http://babel.edgewall.org/""" from babel.messages.extract import extract_python + from mako.ext.extract import MessageExtractor class BabelMakoExtractor(MessageExtractor): - def __init__(self, keywords, comment_tags, options): self.keywords = keywords self.options = options self.config = { - 'comment-tags': u' '.join(comment_tags), - 'encoding': options.get('input_encoding', - options.get('encoding', None)), + "comment-tags": u" ".join(comment_tags), + "encoding": options.get( + "input_encoding", options.get("encoding", None) + ), } super(BabelMakoExtractor, self).__init__() @@ -25,12 +26,19 @@ class BabelMakoExtractor(MessageExtractor): return self.process_file(fileobj) def process_python(self, code, code_lineno, translator_strings): - comment_tags = self.config['comment-tags'] - for lineno, funcname, messages, python_translator_comments \ - in extract_python(code, - self.keywords, comment_tags, self.options): - yield (code_lineno + (lineno - 1), funcname, messages, - translator_strings + python_translator_comments) + comment_tags = self.config["comment-tags"] + for ( + lineno, + funcname, + messages, + python_translator_comments, + ) in extract_python(code, self.keywords, comment_tags, self.options): + yield ( + code_lineno + (lineno - 1), + funcname, + messages, + translator_strings + python_translator_comments, + ) def extract(fileobj, keywords, comment_tags, options): diff --git a/mako/ext/beaker_cache.py b/mako/ext/beaker_cache.py index c7c260d..ebca8a9 100644 --- a/mako/ext/beaker_cache.py +++ b/mako/ext/beaker_cache.py @@ -1,7 +1,6 @@ """Provide a :class:`.CacheImpl` for the Beaker caching system.""" from mako import exceptions - from mako.cache import CacheImpl try: @@ -27,36 +26,37 @@ class BeakerCacheImpl(CacheImpl): def __init__(self, cache): if not has_beaker: raise exceptions.RuntimeException( - "Can't initialize Beaker plugin; Beaker is not installed.") + "Can't initialize Beaker plugin; Beaker is not installed." + ) global _beaker_cache if _beaker_cache is None: - if 'manager' in cache.template.cache_args: - _beaker_cache = cache.template.cache_args['manager'] + if "manager" in cache.template.cache_args: + _beaker_cache = cache.template.cache_args["manager"] else: _beaker_cache = beaker_cache.CacheManager() super(BeakerCacheImpl, self).__init__(cache) def _get_cache(self, **kw): - expiretime = kw.pop('timeout', None) - if 'dir' in kw: - kw['data_dir'] = kw.pop('dir') + expiretime = kw.pop("timeout", None) + if "dir" in kw: + kw["data_dir"] = kw.pop("dir") elif self.cache.template.module_directory: - kw['data_dir'] = self.cache.template.module_directory + kw["data_dir"] = self.cache.template.module_directory - if 'manager' in kw: - kw.pop('manager') + if "manager" in kw: + kw.pop("manager") - if kw.get('type') == 'memcached': - kw['type'] = 'ext:memcached' + if kw.get("type") == "memcached": + kw["type"] = "ext:memcached" - if 'region' in kw: - region = kw.pop('region') + if "region" in kw: + region = kw.pop("region") cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw) else: cache = _beaker_cache.get_cache(self.cache.id, **kw) - cache_args = {'starttime': self.cache.starttime} + cache_args = {"starttime": self.cache.starttime} if expiretime: - cache_args['expiretime'] = expiretime + cache_args["expiretime"] = expiretime return cache, cache_args def get_or_create(self, key, creation_function, **kw): diff --git a/mako/ext/extract.py b/mako/ext/extract.py index d777ea8..766129b 100644 --- a/mako/ext/extract.py +++ b/mako/ext/extract.py @@ -1,30 +1,33 @@ import re + from mako import compat from mako import lexer from mako import parsetree class MessageExtractor(object): - def process_file(self, fileobj): template_node = lexer.Lexer( - fileobj.read(), - input_encoding=self.config['encoding']).parse() + fileobj.read(), input_encoding=self.config["encoding"] + ).parse() for extracted in self.extract_nodes(template_node.get_children()): yield extracted def extract_nodes(self, nodes): translator_comments = [] in_translator_comments = False - input_encoding = self.config['encoding'] or 'ascii' + input_encoding = self.config["encoding"] or "ascii" comment_tags = list( - filter(None, re.split(r'\s+', self.config['comment-tags']))) + filter(None, re.split(r"\s+", self.config["comment-tags"])) + ) for node in nodes: child_nodes = None - if in_translator_comments and \ - isinstance(node, parsetree.Text) and \ - not node.content.strip(): + if ( + in_translator_comments + and isinstance(node, parsetree.Text) + and not node.content.strip() + ): # Ignore whitespace within translator comments continue @@ -32,13 +35,15 @@ class MessageExtractor(object): value = node.text.strip() if in_translator_comments: translator_comments.extend( - self._split_comment(node.lineno, value)) + self._split_comment(node.lineno, value) + ) continue for comment_tag in comment_tags: if value.startswith(comment_tag): in_translator_comments = True translator_comments.extend( - self._split_comment(node.lineno, value)) + self._split_comment(node.lineno, value) + ) continue if isinstance(node, parsetree.DefTag): @@ -69,15 +74,18 @@ class MessageExtractor(object): continue # Comments don't apply unless they immediately precede the message - if translator_comments and \ - translator_comments[-1][0] < node.lineno - 1: + if ( + translator_comments + and translator_comments[-1][0] < node.lineno - 1 + ): translator_comments = [] translator_strings = [ - comment[1] for comment in translator_comments] + comment[1] for comment in translator_comments + ] if isinstance(code, compat.text_type): - code = code.encode(input_encoding, 'backslashreplace') + code = code.encode(input_encoding, "backslashreplace") used_translator_comments = False # We add extra newline to work around a pybabel bug @@ -85,10 +93,11 @@ class MessageExtractor(object): # input string of the input is non-ascii) # Also, because we added it, we have to subtract one from # node.lineno - code = compat.byte_buffer(compat.b('\n') + code) + code = compat.byte_buffer(compat.b("\n") + code) for message in self.process_python( - code, node.lineno - 1, translator_strings): + code, node.lineno - 1, translator_strings + ): yield message used_translator_comments = True @@ -104,5 +113,7 @@ class MessageExtractor(object): def _split_comment(lineno, comment): """Return the multiline comment at lineno split into a list of comment line numbers and the accompanying comment line""" - return [(lineno + index, line) for index, line in - enumerate(comment.splitlines())] + return [ + (lineno + index, line) + for index, line in enumerate(comment.splitlines()) + ] diff --git a/mako/ext/linguaplugin.py b/mako/ext/linguaplugin.py index 46b0d6a..dda3422 100644 --- a/mako/ext/linguaplugin.py +++ b/mako/ext/linguaplugin.py @@ -1,43 +1,51 @@ import io + from lingua.extractors import Extractor -from lingua.extractors import Message from lingua.extractors import get_extractor -from mako.ext.extract import MessageExtractor +from lingua.extractors import Message + from mako import compat +from mako.ext.extract import MessageExtractor class LinguaMakoExtractor(Extractor, MessageExtractor): - '''Mako templates''' - extensions = ['.mako'] - default_config = { - 'encoding': 'utf-8', - 'comment-tags': '', - } + """Mako templates""" + + extensions = [".mako"] + default_config = {"encoding": "utf-8", "comment-tags": ""} def __call__(self, filename, options, fileobj=None): self.options = options self.filename = filename - self.python_extractor = get_extractor('x.py') + self.python_extractor = get_extractor("x.py") if fileobj is None: - fileobj = open(filename, 'rb') + fileobj = open(filename, "rb") return self.process_file(fileobj) def process_python(self, code, code_lineno, translator_strings): source = code.getvalue().strip() - if source.endswith(compat.b(':')): - if source in (compat.b('try:'), compat.b('else:')) or source.startswith(compat.b('except')): - source = compat.b('') # Ignore try/except and else - elif source.startswith(compat.b('elif')): - source = source[2:] # Replace "elif" with "if" - source += compat.b('pass') + if source.endswith(compat.b(":")): + if source in ( + compat.b("try:"), + compat.b("else:"), + ) or source.startswith(compat.b("except")): + source = compat.b("") # Ignore try/except and else + elif source.startswith(compat.b("elif")): + source = source[2:] # Replace "elif" with "if" + source += compat.b("pass") code = io.BytesIO(source) for msg in self.python_extractor( - self.filename, self.options, code, code_lineno -1): + self.filename, self.options, code, code_lineno - 1 + ): if translator_strings: - msg = Message(msg.msgctxt, msg.msgid, msg.msgid_plural, - msg.flags, - compat.u(' ').join( - translator_strings + [msg.comment]), - msg.tcomment, msg.location) + msg = Message( + msg.msgctxt, + msg.msgid, + msg.msgid_plural, + msg.flags, + compat.u(" ").join(translator_strings + [msg.comment]), + msg.tcomment, + msg.location, + ) yield msg diff --git a/mako/ext/preprocessors.py b/mako/ext/preprocessors.py index 9b700d1..524b87f 100644 --- a/mako/ext/preprocessors.py +++ b/mako/ext/preprocessors.py @@ -17,4 +17,4 @@ def convert_comments(text): from mako.ext.preprocessors import convert_comments t = Template(..., preprocessor=convert_comments)""" - return re.sub(r'(?<=\n)\s*#[^#]', "##", text) + return re.sub(r"(?<=\n)\s*#[^#]", "##", text) diff --git a/mako/ext/pygmentplugin.py b/mako/ext/pygmentplugin.py index 4057caa..809e696 100644 --- a/mako/ext/pygmentplugin.py +++ b/mako/ext/pygmentplugin.py @@ -4,42 +4,70 @@ # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from pygments.lexers.web import \ - HtmlLexer, XmlLexer, JavascriptLexer, CssLexer -from pygments.lexers.agile import PythonLexer, Python3Lexer -from pygments.lexer import DelegatingLexer, RegexLexer, bygroups, \ - include, using -from pygments.token import \ - Text, Comment, Operator, Keyword, Name, String, Other -from pygments.formatters.html import HtmlFormatter from pygments import highlight +from pygments.formatters.html import HtmlFormatter +from pygments.lexer import bygroups +from pygments.lexer import DelegatingLexer +from pygments.lexer import include +from pygments.lexer import RegexLexer +from pygments.lexer import using +from pygments.lexers.agile import Python3Lexer +from pygments.lexers.agile import PythonLexer +from pygments.lexers.web import CssLexer +from pygments.lexers.web import HtmlLexer +from pygments.lexers.web import JavascriptLexer +from pygments.lexers.web import XmlLexer +from pygments.token import Comment +from pygments.token import Keyword +from pygments.token import Name +from pygments.token import Operator +from pygments.token import Other +from pygments.token import String +from pygments.token import Text + from mako import compat class MakoLexer(RegexLexer): - name = 'Mako' - aliases = ['mako'] - filenames = ['*.mao'] + name = "Mako" + aliases = ["mako"] + filenames = ["*.mao"] tokens = { - 'root': [ - (r'(\s*)(\%)(\s*end(?:\w+))(\n|\Z)', - bygroups(Text, Comment.Preproc, Keyword, Other)), - (r'(\s*)(\%(?!%))([^\n]*)(\n|\Z)', - bygroups(Text, Comment.Preproc, using(PythonLexer), Other)), - (r'(\s*)(##[^\n]*)(\n|\Z)', - bygroups(Text, Comment.Preproc, Other)), - (r'''(?s)<%doc>.*?</%doc>''', Comment.Preproc), - (r'(<%)([\w\.\:]+)', - bygroups(Comment.Preproc, Name.Builtin), 'tag'), - (r'(</%)([\w\.\:]+)(>)', - bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)), - (r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'), - (r'(<%(?:!?))(.*?)(%>)(?s)', - bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), - (r'(\$\{)(.*?)(\})', - bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), - (r'''(?sx) + "root": [ + ( + r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)", + bygroups(Text, Comment.Preproc, Keyword, Other), + ), + ( + r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, using(PythonLexer), Other), + ), + ( + r"(\s*)(##[^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, Other), + ), + (r"""(?s)<%doc>.*?</%doc>""", Comment.Preproc), + ( + r"(<%)([\w\.\:]+)", + bygroups(Comment.Preproc, Name.Builtin), + "tag", + ), + ( + r"(</%)([\w\.\:]+)(>)", + bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc), + ), + (r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"), + ( + r"(<%(?:!?))(.*?)(%>)(?s)", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"(\$\{)(.*?)(\})", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"""(?sx) (.+?) # anything, followed by: (?: (?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line @@ -52,76 +80,78 @@ class MakoLexer(RegexLexer): (\\\n) | # an escaped newline \Z # end of string ) - ''', bygroups(Other, Operator)), - (r'\s+', Text), + """, + bygroups(Other, Operator), + ), + (r"\s+", Text), ], - 'ondeftags': [ - (r'<%', Comment.Preproc), - (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin), - include('tag'), + "ondeftags": [ + (r"<%", Comment.Preproc), + (r"(?<=<%)(include|inherit|namespace|page)", Name.Builtin), + include("tag"), ], - 'tag': [ - (r'((?:\w+)\s*=)\s*(".*?")', - bygroups(Name.Attribute, String)), - (r'/?\s*>', Comment.Preproc, '#pop'), - (r'\s+', Text), + "tag": [ + (r'((?:\w+)\s*=)\s*(".*?")', bygroups(Name.Attribute, String)), + (r"/?\s*>", Comment.Preproc, "#pop"), + (r"\s+", Text), ], - 'attr': [ - ('".*?"', String, '#pop'), - ("'.*?'", String, '#pop'), - (r'[^\s>]+', String, '#pop'), + "attr": [ + ('".*?"', String, "#pop"), + ("'.*?'", String, "#pop"), + (r"[^\s>]+", String, "#pop"), ], } class MakoHtmlLexer(DelegatingLexer): - name = 'HTML+Mako' - aliases = ['html+mako'] + name = "HTML+Mako" + aliases = ["html+mako"] def __init__(self, **options): - super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, - **options) + super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, **options) class MakoXmlLexer(DelegatingLexer): - name = 'XML+Mako' - aliases = ['xml+mako'] + name = "XML+Mako" + aliases = ["xml+mako"] def __init__(self, **options): - super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, - **options) + super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, **options) class MakoJavascriptLexer(DelegatingLexer): - name = 'JavaScript+Mako' - aliases = ['js+mako', 'javascript+mako'] + name = "JavaScript+Mako" + aliases = ["js+mako", "javascript+mako"] def __init__(self, **options): - super(MakoJavascriptLexer, self).__init__(JavascriptLexer, - MakoLexer, **options) + super(MakoJavascriptLexer, self).__init__( + JavascriptLexer, MakoLexer, **options + ) class MakoCssLexer(DelegatingLexer): - name = 'CSS+Mako' - aliases = ['css+mako'] + name = "CSS+Mako" + aliases = ["css+mako"] def __init__(self, **options): - super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, - **options) + super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, **options) -pygments_html_formatter = HtmlFormatter(cssclass='syntax-highlighted', - linenos=True) +pygments_html_formatter = HtmlFormatter( + cssclass="syntax-highlighted", linenos=True +) -def syntax_highlight(filename='', language=None): +def syntax_highlight(filename="", language=None): mako_lexer = MakoLexer() if compat.py3k: python_lexer = Python3Lexer() else: python_lexer = PythonLexer() - if filename.startswith('memory:') or language == 'mako': - return lambda string: highlight(string, mako_lexer, - pygments_html_formatter) - return lambda string: highlight(string, python_lexer, - pygments_html_formatter) + if filename.startswith("memory:") or language == "mako": + return lambda string: highlight( + string, mako_lexer, pygments_html_formatter + ) + return lambda string: highlight( + string, python_lexer, pygments_html_formatter + ) diff --git a/mako/ext/turbogears.py b/mako/ext/turbogears.py index eaa2d78..ee1147d 100644 --- a/mako/ext/turbogears.py +++ b/mako/ext/turbogears.py @@ -13,7 +13,7 @@ class TGPlugin(object): """TurboGears compatible Template Plugin.""" - def __init__(self, extra_vars_func=None, options=None, extension='mak'): + def __init__(self, extra_vars_func=None, options=None, extension="mak"): self.extra_vars_func = extra_vars_func self.extension = extension if not options: @@ -22,9 +22,9 @@ class TGPlugin(object): # Pull the options out and initialize the lookup lookup_options = {} for k, v in options.items(): - if k.startswith('mako.'): + if k.startswith("mako."): lookup_options[k[5:]] = v - elif k in ['directories', 'filesystem_checks', 'module_directory']: + elif k in ["directories", "filesystem_checks", "module_directory"]: lookup_options[k] = v self.lookup = TemplateLookup(**lookup_options) @@ -40,14 +40,17 @@ class TGPlugin(object): if template_string is not None: return Template(template_string, **self.tmpl_options) # Translate TG dot notation to normal / template path - if '/' not in templatename: - templatename = '/' + templatename.replace('.', '/') + '.' +\ - self.extension + if "/" not in templatename: + templatename = ( + "/" + templatename.replace(".", "/") + "." + self.extension + ) # Lookup template return self.lookup.get_template(templatename) - def render(self, info, format="html", fragment=False, template=None): + def render( + self, info, format="html", fragment=False, template=None # noqa + ): if isinstance(template, compat.string_types): template = self.load_template(template) |