Show More
@@ -80,8 +80,8 b' def tokenize(program):' | |||
|
80 | 80 | yield ('end', None, pos) |
|
81 | 81 | |
|
82 | 82 | def parse(expr): |
|
83 |
p = parser.parser( |
|
|
84 | tree, pos = p.parse(expr) | |
|
83 | p = parser.parser(elements) | |
|
84 | tree, pos = p.parse(tokenize(expr)) | |
|
85 | 85 | if pos != len(expr): |
|
86 | 86 | raise error.ParseError(_("invalid token"), pos) |
|
87 | 87 | return tree |
@@ -19,8 +19,7 b' import error' | |||
|
19 | 19 | from i18n import _ |
|
20 | 20 | |
|
21 | 21 | class parser(object): |
|
22 |
def __init__(self |
|
|
23 | self._tokenizer = tokenizer | |
|
22 | def __init__(self, elements, methods=None): | |
|
24 | 23 | self._elements = elements |
|
25 | 24 | self._methods = methods |
|
26 | 25 | self.current = None |
@@ -72,12 +71,9 b' class parser(object):' | |||
|
72 | 71 | if len(infix) == 3: |
|
73 | 72 | self._match(infix[2], pos) |
|
74 | 73 | return expr |
|
75 |
def parse(self, |
|
|
76 |
'generate a parse tree from |
|
|
77 | if lookup: | |
|
78 | self._iter = self._tokenizer(message, lookup) | |
|
79 | else: | |
|
80 | self._iter = self._tokenizer(message) | |
|
74 | def parse(self, tokeniter): | |
|
75 | 'generate a parse tree from tokens' | |
|
76 | self._iter = tokeniter | |
|
81 | 77 | self._advance() |
|
82 | 78 | res = self._parse() |
|
83 | 79 | token, value, pos = self.current |
@@ -87,9 +83,9 b' class parser(object):' | |||
|
87 | 83 | if not isinstance(tree, tuple): |
|
88 | 84 | return tree |
|
89 | 85 | return self._methods[tree[0]](*[self.eval(t) for t in tree[1:]]) |
|
90 |
def __call__(self, |
|
|
91 |
'parse |
|
|
92 |
t = self.parse( |
|
|
86 | def __call__(self, tokeniter): | |
|
87 | 'parse tokens into a parse tree and evaluate if methods given' | |
|
88 | t = self.parse(tokeniter) | |
|
93 | 89 | if self._methods: |
|
94 | 90 | return self.eval(t) |
|
95 | 91 | return t |
@@ -2387,9 +2387,9 b' def _parsealiasdecl(decl):' | |||
|
2387 | 2387 | >>> _parsealiasdecl('foo($1, $2, $1)') |
|
2388 | 2388 | ('foo', None, None, 'argument names collide with each other') |
|
2389 | 2389 | """ |
|
2390 |
p = parser.parser( |
|
|
2390 | p = parser.parser(elements) | |
|
2391 | 2391 | try: |
|
2392 | tree, pos = p.parse(decl) | |
|
2392 | tree, pos = p.parse(_tokenizealias(decl)) | |
|
2393 | 2393 | if (pos != len(decl)): |
|
2394 | 2394 | raise error.ParseError(_('invalid token'), pos) |
|
2395 | 2395 | |
@@ -2478,8 +2478,8 b' def _parsealiasdefn(defn, args):' | |||
|
2478 | 2478 | pos) |
|
2479 | 2479 | yield (t, value, pos) |
|
2480 | 2480 | |
|
2481 |
p = parser.parser( |
|
|
2482 | tree, pos = p.parse(defn) | |
|
2481 | p = parser.parser(elements) | |
|
2482 | tree, pos = p.parse(tokenizedefn(defn)) | |
|
2483 | 2483 | if pos != len(defn): |
|
2484 | 2484 | raise error.ParseError(_('invalid token'), pos) |
|
2485 | 2485 | return parser.simplifyinfixops(tree, ('or',)) |
@@ -2609,8 +2609,8 b' def foldconcat(tree):' | |||
|
2609 | 2609 | return tuple(foldconcat(t) for t in tree) |
|
2610 | 2610 | |
|
2611 | 2611 | def parse(spec, lookup=None): |
|
2612 |
p = parser.parser( |
|
|
2613 | tree, pos = p.parse(spec, lookup=lookup) | |
|
2612 | p = parser.parser(elements) | |
|
2613 | tree, pos = p.parse(tokenize(spec, lookup=lookup)) | |
|
2614 | 2614 | if pos != len(spec): |
|
2615 | 2615 | raise error.ParseError(_("invalid token"), pos) |
|
2616 | 2616 | return parser.simplifyinfixops(tree, ('or',)) |
@@ -27,8 +27,7 b' elements = {' | |||
|
27 | 27 | "end": (0, None, None), |
|
28 | 28 | } |
|
29 | 29 | |
|
30 |
def tokenize |
|
|
31 | program, start, end = data | |
|
30 | def tokenize(program, start, end): | |
|
32 | 31 | pos = start |
|
33 | 32 | while pos < end: |
|
34 | 33 | c = program[pos] |
@@ -96,7 +95,7 b' def tokenizer(data):' | |||
|
96 | 95 | def compiletemplate(tmpl, context): |
|
97 | 96 | parsed = [] |
|
98 | 97 | pos, stop = 0, len(tmpl) |
|
99 |
p = parser.parser( |
|
|
98 | p = parser.parser(elements) | |
|
100 | 99 | while pos < stop: |
|
101 | 100 | n = tmpl.find('{', pos) |
|
102 | 101 | if n < 0: |
@@ -111,8 +110,7 b' def compiletemplate(tmpl, context):' | |||
|
111 | 110 | if n > pos: |
|
112 | 111 | parsed.append(('string', tmpl[pos:n])) |
|
113 | 112 | |
|
114 |
p |
|
|
115 | parseres, pos = p.parse(pd) | |
|
113 | parseres, pos = p.parse(tokenize(tmpl, n + 1, stop)) | |
|
116 | 114 | parsed.append(parseres) |
|
117 | 115 | |
|
118 | 116 | return [compileexp(e, context, methods) for e in parsed] |
General Comments 0
You need to be logged in to leave comments.
Login now