##// END OF EJS Templates
parser: accept iterator of tokens instead of tokenizer function and program...
Yuya Nishihara -
r25654:af329a84 default
parent child Browse files
Show More
@@ -80,8 +80,8 b' def tokenize(program):'
80 yield ('end', None, pos)
80 yield ('end', None, pos)
81
81
82 def parse(expr):
82 def parse(expr):
83 p = parser.parser(tokenize, elements)
83 p = parser.parser(elements)
84 tree, pos = p.parse(expr)
84 tree, pos = p.parse(tokenize(expr))
85 if pos != len(expr):
85 if pos != len(expr):
86 raise error.ParseError(_("invalid token"), pos)
86 raise error.ParseError(_("invalid token"), pos)
87 return tree
87 return tree
@@ -19,8 +19,7 b' import error'
19 from i18n import _
19 from i18n import _
20
20
21 class parser(object):
21 class parser(object):
22 def __init__(self, tokenizer, elements, methods=None):
22 def __init__(self, elements, methods=None):
23 self._tokenizer = tokenizer
24 self._elements = elements
23 self._elements = elements
25 self._methods = methods
24 self._methods = methods
26 self.current = None
25 self.current = None
@@ -72,12 +71,9 b' class parser(object):'
72 if len(infix) == 3:
71 if len(infix) == 3:
73 self._match(infix[2], pos)
72 self._match(infix[2], pos)
74 return expr
73 return expr
75 def parse(self, message, lookup=None):
74 def parse(self, tokeniter):
76 'generate a parse tree from a message'
75 'generate a parse tree from tokens'
77 if lookup:
76 self._iter = tokeniter
78 self._iter = self._tokenizer(message, lookup)
79 else:
80 self._iter = self._tokenizer(message)
81 self._advance()
77 self._advance()
82 res = self._parse()
78 res = self._parse()
83 token, value, pos = self.current
79 token, value, pos = self.current
@@ -87,9 +83,9 b' class parser(object):'
87 if not isinstance(tree, tuple):
83 if not isinstance(tree, tuple):
88 return tree
84 return tree
89 return self._methods[tree[0]](*[self.eval(t) for t in tree[1:]])
85 return self._methods[tree[0]](*[self.eval(t) for t in tree[1:]])
90 def __call__(self, message):
86 def __call__(self, tokeniter):
91 'parse a message into a parse tree and evaluate if methods given'
87 'parse tokens into a parse tree and evaluate if methods given'
92 t = self.parse(message)
88 t = self.parse(tokeniter)
93 if self._methods:
89 if self._methods:
94 return self.eval(t)
90 return self.eval(t)
95 return t
91 return t
@@ -2387,9 +2387,9 b' def _parsealiasdecl(decl):'
2387 >>> _parsealiasdecl('foo($1, $2, $1)')
2387 >>> _parsealiasdecl('foo($1, $2, $1)')
2388 ('foo', None, None, 'argument names collide with each other')
2388 ('foo', None, None, 'argument names collide with each other')
2389 """
2389 """
2390 p = parser.parser(_tokenizealias, elements)
2390 p = parser.parser(elements)
2391 try:
2391 try:
2392 tree, pos = p.parse(decl)
2392 tree, pos = p.parse(_tokenizealias(decl))
2393 if (pos != len(decl)):
2393 if (pos != len(decl)):
2394 raise error.ParseError(_('invalid token'), pos)
2394 raise error.ParseError(_('invalid token'), pos)
2395
2395
@@ -2478,8 +2478,8 b' def _parsealiasdefn(defn, args):'
2478 pos)
2478 pos)
2479 yield (t, value, pos)
2479 yield (t, value, pos)
2480
2480
2481 p = parser.parser(tokenizedefn, elements)
2481 p = parser.parser(elements)
2482 tree, pos = p.parse(defn)
2482 tree, pos = p.parse(tokenizedefn(defn))
2483 if pos != len(defn):
2483 if pos != len(defn):
2484 raise error.ParseError(_('invalid token'), pos)
2484 raise error.ParseError(_('invalid token'), pos)
2485 return parser.simplifyinfixops(tree, ('or',))
2485 return parser.simplifyinfixops(tree, ('or',))
@@ -2609,8 +2609,8 b' def foldconcat(tree):'
2609 return tuple(foldconcat(t) for t in tree)
2609 return tuple(foldconcat(t) for t in tree)
2610
2610
2611 def parse(spec, lookup=None):
2611 def parse(spec, lookup=None):
2612 p = parser.parser(tokenize, elements)
2612 p = parser.parser(elements)
2613 tree, pos = p.parse(spec, lookup=lookup)
2613 tree, pos = p.parse(tokenize(spec, lookup=lookup))
2614 if pos != len(spec):
2614 if pos != len(spec):
2615 raise error.ParseError(_("invalid token"), pos)
2615 raise error.ParseError(_("invalid token"), pos)
2616 return parser.simplifyinfixops(tree, ('or',))
2616 return parser.simplifyinfixops(tree, ('or',))
@@ -27,8 +27,7 b' elements = {'
27 "end": (0, None, None),
27 "end": (0, None, None),
28 }
28 }
29
29
30 def tokenizer(data):
30 def tokenize(program, start, end):
31 program, start, end = data
32 pos = start
31 pos = start
33 while pos < end:
32 while pos < end:
34 c = program[pos]
33 c = program[pos]
@@ -96,7 +95,7 b' def tokenizer(data):'
96 def compiletemplate(tmpl, context):
95 def compiletemplate(tmpl, context):
97 parsed = []
96 parsed = []
98 pos, stop = 0, len(tmpl)
97 pos, stop = 0, len(tmpl)
99 p = parser.parser(tokenizer, elements)
98 p = parser.parser(elements)
100 while pos < stop:
99 while pos < stop:
101 n = tmpl.find('{', pos)
100 n = tmpl.find('{', pos)
102 if n < 0:
101 if n < 0:
@@ -111,8 +110,7 b' def compiletemplate(tmpl, context):'
111 if n > pos:
110 if n > pos:
112 parsed.append(('string', tmpl[pos:n]))
111 parsed.append(('string', tmpl[pos:n]))
113
112
114 pd = [tmpl, n + 1, stop]
113 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop))
115 parseres, pos = p.parse(pd)
116 parsed.append(parseres)
114 parsed.append(parseres)
117
115
118 return [compileexp(e, context, methods) for e in parsed]
116 return [compileexp(e, context, methods) for e in parsed]
General Comments 0
You need to be logged in to leave comments. Login now