Added extension ordering, this fixes #376.
--HG--
branch : trunk
diff --git a/jinja2/environment.py b/jinja2/environment.py
index 25cec88..529c14c 100644
--- a/jinja2/environment.py
+++ b/jinja2/environment.py
@@ -334,6 +334,11 @@
lexer = property(get_lexer, doc="The lexer for this environment.")
+ def iter_extensions(self):
+ """Iterates over the extensions by priority."""
+ return iter(sorted(self.extensions.values(),
+ key=lambda x: x.priority))
+
def getitem(self, obj, argument):
"""Get an item or attribute of an object but prefer the item."""
try:
@@ -407,7 +412,7 @@
because there you usually only want the actual source tokenized.
"""
return reduce(lambda s, e: e.preprocess(s, name, filename),
- self.extensions.itervalues(), unicode(source))
+ self.iter_extensions(), unicode(source))
def _tokenize(self, source, name, filename=None, state=None):
"""Called by the parser to do the preprocessing and filtering
@@ -415,7 +420,7 @@
"""
source = self.preprocess(source, name, filename)
stream = self.lexer.tokenize(source, name, filename, state)
- for ext in self.extensions.itervalues():
+ for ext in self.iter_extensions():
stream = ext.filter_stream(stream)
if not isinstance(stream, TokenStream):
stream = TokenStream(stream, name, filename)