aboutsummaryrefslogtreecommitdiffstats
path: root/mako/lexer.py
diff options
context:
space:
mode:
Diffstat (limited to 'mako/lexer.py')
-rw-r--r--mako/lexer.py124
1 files changed, 63 insertions, 61 deletions
diff --git a/mako/lexer.py b/mako/lexer.py
index 1dda398..2fa08e4 100644
--- a/mako/lexer.py
+++ b/mako/lexer.py
@@ -13,10 +13,12 @@ from mako.pygen import adjust_whitespace
_regexp_cache = {}
+
class Lexer(object):
+
def __init__(self, text, filename=None,
- disable_unicode=False,
- input_encoding=None, preprocessor=None):
+ disable_unicode=False,
+ input_encoding=None, preprocessor=None):
self.text = text
self.filename = filename
self.template = parsetree.TemplateNode(self.filename)
@@ -32,8 +34,8 @@ class Lexer(object):
if compat.py3k and disable_unicode:
raise exceptions.UnsupportedError(
- "Mako for Python 3 does not "
- "support disabling Unicode")
+ "Mako for Python 3 does not "
+ "support disabling Unicode")
if preprocessor is None:
self.preprocessor = []
@@ -87,9 +89,9 @@ class Lexer(object):
cp -= 1
self.matched_charpos = mp - cp
self.lineno += len(lines)
- #print "MATCHED:", match.group(0), "LINE START:",
+ # print "MATCHED:", match.group(0), "LINE START:",
# self.matched_lineno, "LINE END:", self.lineno
- #print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \
+ # print "MATCH:", regexp, "\n", self.text[mp : mp + 15], \
# (match and "TRUE" or "FALSE")
return match
@@ -120,9 +122,9 @@ class Lexer(object):
brace_level -= match.group(1).count('}')
continue
raise exceptions.SyntaxException(
- "Expected: %s" %
- ','.join(text),
- **self.exception_kwargs)
+ "Expected: %s" %
+ ','.join(text),
+ **self.exception_kwargs)
def append_node(self, nodecls, *args, **kwargs):
kwargs.setdefault('source', self.text)
@@ -162,9 +164,9 @@ class Lexer(object):
elif self.control_line and \
not self.control_line[-1].is_ternary(node.keyword):
raise exceptions.SyntaxException(
- "Keyword '%s' not a legal ternary for keyword '%s'" %
- (node.keyword, self.control_line[-1].keyword),
- **self.exception_kwargs)
+ "Keyword '%s' not a legal ternary for keyword '%s'" %
+ (node.keyword, self.control_line[-1].keyword),
+ **self.exception_kwargs)
_coding_re = re.compile(r'#.*coding[:=]\s*([-\w.]+).*\r?\n')
@@ -185,10 +187,10 @@ class Lexer(object):
m = self._coding_re.match(text.decode('utf-8', 'ignore'))
if m is not None and m.group(1) != 'utf-8':
raise exceptions.CompileException(
- "Found utf-8 BOM in file, with conflicting "
- "magic encoding comment of '%s'" % m.group(1),
- text.decode('utf-8', 'ignore'),
- 0, 0, filename)
+ "Found utf-8 BOM in file, with conflicting "
+ "magic encoding comment of '%s'" % m.group(1),
+ text.decode('utf-8', 'ignore'),
+ 0, 0, filename)
else:
m = self._coding_re.match(text.decode('utf-8', 'ignore'))
if m:
@@ -201,18 +203,19 @@ class Lexer(object):
text = text.decode(parsed_encoding)
except UnicodeDecodeError:
raise exceptions.CompileException(
- "Unicode decode operation of encoding '%s' failed" %
- parsed_encoding,
- text.decode('utf-8', 'ignore'),
- 0, 0, filename)
+ "Unicode decode operation of encoding '%s' failed" %
+ parsed_encoding,
+ text.decode('utf-8', 'ignore'),
+ 0, 0, filename)
return parsed_encoding, text
def parse(self):
- self.encoding, self.text = self.decode_raw_stream(self.text,
- not self.disable_unicode,
- self.encoding,
- self.filename,)
+ self.encoding, self.text = self.decode_raw_stream(
+ self.text,
+ not self.disable_unicode,
+ self.encoding,
+ self.filename)
for preproc in self.preprocessor:
self.text = preproc(self.text)
@@ -250,15 +253,15 @@ class Lexer(object):
if len(self.tag):
raise exceptions.SyntaxException("Unclosed tag: <%%%s>" %
- self.tag[-1].keyword,
- **self.exception_kwargs)
+ self.tag[-1].keyword,
+ **self.exception_kwargs)
if len(self.control_line):
raise exceptions.SyntaxException(
- "Unterminated control keyword: '%s'" %
- self.control_line[-1].keyword,
- self.text,
- self.control_line[-1].lineno,
- self.control_line[-1].pos, self.filename)
+ "Unterminated control keyword: '%s'" %
+ self.control_line[-1].keyword,
+ self.text,
+ self.control_line[-1].lineno,
+ self.control_line[-1].pos, self.filename)
return self.template
def match_tag_start(self):
@@ -276,7 +279,7 @@ class Lexer(object):
''',
- re.I | re.S | re.X)
+ re.I | re.S | re.X)
if match:
keyword, attr, isend = match.groups()
@@ -284,7 +287,7 @@ class Lexer(object):
attributes = {}
if attr:
for att in re.findall(
- r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr):
+ r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr):
key, val1, val2 = att
text = val1 or val2
text = text.replace('\r\n', '\n')
@@ -294,12 +297,12 @@ class Lexer(object):
self.tag.pop()
else:
if keyword == 'text':
- match = self.match(r'(.*?)(?=\</%text>)', re.S)
+ match = self.match(r'(.*?)(?=\</%text>)', re.S)
if not match:
raise exceptions.SyntaxException(
- "Unclosed tag: <%%%s>" %
- self.tag[-1].keyword,
- **self.exception_kwargs)
+ "Unclosed tag: <%%%s>" %
+ self.tag[-1].keyword,
+ **self.exception_kwargs)
self.append_node(parsetree.Text, match.group(1))
return self.match_tag_end()
return True
@@ -311,14 +314,14 @@ class Lexer(object):
if match:
if not len(self.tag):
raise exceptions.SyntaxException(
- "Closing tag without opening tag: </%%%s>" %
- match.group(1),
- **self.exception_kwargs)
+ "Closing tag without opening tag: </%%%s>" %
+ match.group(1),
+ **self.exception_kwargs)
elif self.tag[-1].keyword != match.group(1):
raise exceptions.SyntaxException(
- "Closing tag </%%%s> does not match tag: <%%%s>" %
- (match.group(1), self.tag[-1].keyword),
- **self.exception_kwargs)
+ "Closing tag </%%%s> does not match tag: <%%%s>" %
+ (match.group(1), self.tag[-1].keyword),
+ **self.exception_kwargs)
self.tag.pop()
return True
else:
@@ -370,9 +373,9 @@ class Lexer(object):
# compiler.parse() not complain about indentation
text = adjust_whitespace(text) + "\n"
self.append_node(
- parsetree.Code,
- text,
- match.group(1) == '!', lineno=line, pos=pos)
+ parsetree.Code,
+ text,
+ match.group(1) == '!', lineno=line, pos=pos)
return True
else:
return False
@@ -388,17 +391,17 @@ class Lexer(object):
escapes = ""
text = text.replace('\r\n', '\n')
self.append_node(
- parsetree.Expression,
- text, escapes.strip(),
- lineno=line, pos=pos)
+ parsetree.Expression,
+ text, escapes.strip(),
+ lineno=line, pos=pos)
return True
else:
return False
def match_control_line(self):
match = self.match(
- r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)"
- r"(?:\r?\n|\Z)", re.M)
+ r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\r?\n)|[^\r\n])*)"
+ r"(?:\r?\n|\Z)", re.M)
if match:
operator = match.group(1)
text = match.group(2)
@@ -406,23 +409,23 @@ class Lexer(object):
m2 = re.match(r'(end)?(\w+)\s*(.*)', text)
if not m2:
raise exceptions.SyntaxException(
- "Invalid control line: '%s'" %
- text,
- **self.exception_kwargs)
+ "Invalid control line: '%s'" %
+ text,
+ **self.exception_kwargs)
isend, keyword = m2.group(1, 2)
isend = (isend is not None)
if isend:
if not len(self.control_line):
raise exceptions.SyntaxException(
- "No starting keyword '%s' for '%s'" %
- (keyword, text),
- **self.exception_kwargs)
+ "No starting keyword '%s' for '%s'" %
+ (keyword, text),
+ **self.exception_kwargs)
elif self.control_line[-1].keyword != keyword:
raise exceptions.SyntaxException(
- "Keyword '%s' doesn't match keyword '%s'" %
- (text, self.control_line[-1].keyword),
- **self.exception_kwargs)
+ "Keyword '%s' doesn't match keyword '%s'" %
+ (text, self.control_line[-1].keyword),
+ **self.exception_kwargs)
self.append_node(parsetree.ControlLine, keyword, isend, text)
else:
self.append_node(parsetree.Comment, text)
@@ -438,4 +441,3 @@ class Lexer(object):
return True
else:
return False
-