##// END OF EJS Templates
revset: add support for prefix and suffix versions of : and ::
Matt Mackall -
r11278:7df88cdf default
parent child Browse files
Show More
@@ -23,7 +23,10 b' class parser(object):'
23 23 def _advance(self):
24 24 'advance the tokenizer'
25 25 t = self.current
26 self.current = self._iter.next()
26 try:
27 self.current = self._iter.next()
28 except StopIteration:
29 pass
27 30 return t
28 31 def _match(self, m):
29 32 'make sure the tokenizer matches an end condition'
@@ -49,17 +52,23 b' class parser(object):'
49 52 # gather tokens until we meet a lower binding strength
50 53 while bind < self._elements[self.current[0]][0]:
51 54 token, value = self._advance()
52 # handle infix rules
53 infix = self._elements[token][2]
54 if len(infix) == 3 and infix[2] == self.current[0]:
55 self._match(infix[2])
56 expr = (infix[0], expr, (None))
55 e = self._elements[token]
56 # check for suffix - next token isn't a valid prefix
57 if len(e) == 4 and not self._elements[self.current[0]][1]:
58 suffix = e[3]
59 expr = (suffix[0], expr)
57 60 else:
58 if not infix[0]:
59 raise SyntaxError("not an infix")
60 expr = (infix[0], expr, self._parse(infix[1]))
61 if len(infix) == 3:
61 # handle infix rules
62 infix = self._elements[token][2]
63 if len(infix) == 3 and infix[2] == self.current[0]:
62 64 self._match(infix[2])
65 expr = (infix[0], expr, (None))
66 else:
67 if not infix[0]:
68 raise SyntaxError("not an infix")
69 expr = (infix[0], expr, self._parse(infix[1]))
70 if len(infix) == 3:
71 self._match(infix[2])
63 72 return expr
64 73 def parse(self, message):
65 74 'generate a parse tree from a message'
@@ -12,8 +12,11 b' import match as _match'
12 12 elements = {
13 13 "(": (20, ("group", 1, ")"), ("func", 1, ")")),
14 14 "-": (19, ("negate", 19), ("minus", 19)),
15 "..": (17, None, ("dagrange", 17)),
16 ":": (15, None, ("range", 15)),
15 "::": (17, ("dagrangepre", 17), ("dagrange", 17),
16 ("dagrangepost", 17)),
17 "..": (17, ("dagrangepre", 17), ("dagrange", 17),
18 ("dagrangepost", 17)),
19 ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
17 20 "not": (10, ("not", 10)),
18 21 "!": (10, ("not", 10)),
19 22 "and": (5, None, ("and", 5)),
@@ -36,11 +39,14 b' def tokenize(program):'
36 39 c = program[pos]
37 40 if c.isspace(): # skip inter-token whitespace
38 41 pass
39 elif c in "():,-|&+!": # handle simple operators
40 yield (c, None)
42 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
43 yield ('::', None)
44 pos += 1 # skip ahead
41 45 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
42 46 yield ('..', None)
43 47 pos += 1 # skip ahead
48 elif c in "():,-|&+!": # handle simple operators
49 yield (c, None)
44 50 elif c in '"\'': # handle quoted strings
45 51 pos += 1
46 52 s = pos
@@ -126,6 +132,12 b' def rangeset(repo, subset, x, y):'
126 132 return range(m, n + 1)
127 133 return range(m, n - 1, -1)
128 134
135 def rangepreset(repo, subset, x):
136 return range(0, getset(repo, subset, x)[-1] + 1)
137
138 def rangepostset(repo, subset, x):
139 return range(getset(repo, subset, x)[0], len(repo))
140
129 141 def dagrangeset(repo, subset, x, y):
130 142 return andset(repo, subset,
131 143 ('func', ('symbol', 'descendants'), x),
@@ -469,7 +481,11 b' methods = {'
469 481 "negate": negate,
470 482 "minus": minusset,
471 483 "range": rangeset,
484 "rangepre": rangepreset,
485 "rangepost": rangepostset,
472 486 "dagrange": dagrangeset,
487 "dagrangepre": ancestors,
488 "dagrangepost": descendants,
473 489 "string": stringset,
474 490 "symbol": symbolset,
475 491 "and": andset,
General Comments 0
You need to be logged in to leave comments. Login now