##// END OF EJS Templates
graphlog: pass changesets to revset.match() in changelog order...
Patrick Mezard -
r16406:4aa4f50c default
parent child Browse files
Show More
@@ -1,535 +1,539 b''
1 1 # ASCII graph log extension for Mercurial
2 2 #
3 3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to view revision graphs from a shell
9 9
10 10 This extension adds a --graph option to the incoming, outgoing and log
11 11 commands. When this options is given, an ASCII representation of the
12 12 revision graph is also shown.
13 13 '''
14 14
15 15 from mercurial.cmdutil import show_changeset
16 16 from mercurial.commands import templateopts
17 17 from mercurial.i18n import _
18 18 from mercurial.node import nullrev
19 19 from mercurial import cmdutil, commands, extensions, scmutil
20 20 from mercurial import hg, util, graphmod, templatekw
21 21 from mercurial import revset as revsetmod
22 22
23 23 cmdtable = {}
24 24 command = cmdutil.command(cmdtable)
25 25
26 26 ASCIIDATA = 'ASC'
27 27
28 28 def asciiedges(type, char, lines, seen, rev, parents):
29 29 """adds edge info to changelog DAG walk suitable for ascii()"""
30 30 if rev not in seen:
31 31 seen.append(rev)
32 32 nodeidx = seen.index(rev)
33 33
34 34 knownparents = []
35 35 newparents = []
36 36 for parent in parents:
37 37 if parent in seen:
38 38 knownparents.append(parent)
39 39 else:
40 40 newparents.append(parent)
41 41
42 42 ncols = len(seen)
43 43 nextseen = seen[:]
44 44 nextseen[nodeidx:nodeidx + 1] = newparents
45 45 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
46 46
47 47 while len(newparents) > 2:
48 48 # ascii() only knows how to add or remove a single column between two
49 49 # calls. Nodes with more than two parents break this constraint so we
50 50 # introduce intermediate expansion lines to grow the active node list
51 51 # slowly.
52 52 edges.append((nodeidx, nodeidx))
53 53 edges.append((nodeidx, nodeidx + 1))
54 54 nmorecols = 1
55 55 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
56 56 char = '\\'
57 57 lines = []
58 58 nodeidx += 1
59 59 ncols += 1
60 60 edges = []
61 61 del newparents[0]
62 62
63 63 if len(newparents) > 0:
64 64 edges.append((nodeidx, nodeidx))
65 65 if len(newparents) > 1:
66 66 edges.append((nodeidx, nodeidx + 1))
67 67 nmorecols = len(nextseen) - ncols
68 68 seen[:] = nextseen
69 69 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
70 70
71 71 def fix_long_right_edges(edges):
72 72 for (i, (start, end)) in enumerate(edges):
73 73 if end > start:
74 74 edges[i] = (start, end + 1)
75 75
76 76 def get_nodeline_edges_tail(
77 77 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
78 78 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
79 79 # Still going in the same non-vertical direction.
80 80 if n_columns_diff == -1:
81 81 start = max(node_index + 1, p_node_index)
82 82 tail = ["|", " "] * (start - node_index - 1)
83 83 tail.extend(["/", " "] * (n_columns - start))
84 84 return tail
85 85 else:
86 86 return ["\\", " "] * (n_columns - node_index - 1)
87 87 else:
88 88 return ["|", " "] * (n_columns - node_index - 1)
89 89
90 90 def draw_edges(edges, nodeline, interline):
91 91 for (start, end) in edges:
92 92 if start == end + 1:
93 93 interline[2 * end + 1] = "/"
94 94 elif start == end - 1:
95 95 interline[2 * start + 1] = "\\"
96 96 elif start == end:
97 97 interline[2 * start] = "|"
98 98 else:
99 99 if 2 * end >= len(nodeline):
100 100 continue
101 101 nodeline[2 * end] = "+"
102 102 if start > end:
103 103 (start, end) = (end, start)
104 104 for i in range(2 * start + 1, 2 * end):
105 105 if nodeline[i] != "+":
106 106 nodeline[i] = "-"
107 107
108 108 def get_padding_line(ni, n_columns, edges):
109 109 line = []
110 110 line.extend(["|", " "] * ni)
111 111 if (ni, ni - 1) in edges or (ni, ni) in edges:
112 112 # (ni, ni - 1) (ni, ni)
113 113 # | | | | | | | |
114 114 # +---o | | o---+
115 115 # | | c | | c | |
116 116 # | |/ / | |/ /
117 117 # | | | | | |
118 118 c = "|"
119 119 else:
120 120 c = " "
121 121 line.extend([c, " "])
122 122 line.extend(["|", " "] * (n_columns - ni - 1))
123 123 return line
124 124
125 125 def asciistate():
126 126 """returns the initial value for the "state" argument to ascii()"""
127 127 return [0, 0]
128 128
129 129 def ascii(ui, state, type, char, text, coldata):
130 130 """prints an ASCII graph of the DAG
131 131
132 132 takes the following arguments (one call per node in the graph):
133 133
134 134 - ui to write to
135 135 - Somewhere to keep the needed state in (init to asciistate())
136 136 - Column of the current node in the set of ongoing edges.
137 137 - Type indicator of node data == ASCIIDATA.
138 138 - Payload: (char, lines):
139 139 - Character to use as node's symbol.
140 140 - List of lines to display as the node's text.
141 141 - Edges; a list of (col, next_col) indicating the edges between
142 142 the current node and its parents.
143 143 - Number of columns (ongoing edges) in the current revision.
144 144 - The difference between the number of columns (ongoing edges)
145 145 in the next revision and the number of columns (ongoing edges)
146 146 in the current revision. That is: -1 means one column removed;
147 147 0 means no columns added or removed; 1 means one column added.
148 148 """
149 149
150 150 idx, edges, ncols, coldiff = coldata
151 151 assert -2 < coldiff < 2
152 152 if coldiff == -1:
153 153 # Transform
154 154 #
155 155 # | | | | | |
156 156 # o | | into o---+
157 157 # |X / |/ /
158 158 # | | | |
159 159 fix_long_right_edges(edges)
160 160
161 161 # add_padding_line says whether to rewrite
162 162 #
163 163 # | | | | | | | |
164 164 # | o---+ into | o---+
165 165 # | / / | | | # <--- padding line
166 166 # o | | | / /
167 167 # o | |
168 168 add_padding_line = (len(text) > 2 and coldiff == -1 and
169 169 [x for (x, y) in edges if x + 1 < y])
170 170
171 171 # fix_nodeline_tail says whether to rewrite
172 172 #
173 173 # | | o | | | | o | |
174 174 # | | |/ / | | |/ /
175 175 # | o | | into | o / / # <--- fixed nodeline tail
176 176 # | |/ / | |/ /
177 177 # o | | o | |
178 178 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
179 179
180 180 # nodeline is the line containing the node character (typically o)
181 181 nodeline = ["|", " "] * idx
182 182 nodeline.extend([char, " "])
183 183
184 184 nodeline.extend(
185 185 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
186 186 state[0], fix_nodeline_tail))
187 187
188 188 # shift_interline is the line containing the non-vertical
189 189 # edges between this entry and the next
190 190 shift_interline = ["|", " "] * idx
191 191 if coldiff == -1:
192 192 n_spaces = 1
193 193 edge_ch = "/"
194 194 elif coldiff == 0:
195 195 n_spaces = 2
196 196 edge_ch = "|"
197 197 else:
198 198 n_spaces = 3
199 199 edge_ch = "\\"
200 200 shift_interline.extend(n_spaces * [" "])
201 201 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
202 202
203 203 # draw edges from the current node to its parents
204 204 draw_edges(edges, nodeline, shift_interline)
205 205
206 206 # lines is the list of all graph lines to print
207 207 lines = [nodeline]
208 208 if add_padding_line:
209 209 lines.append(get_padding_line(idx, ncols, edges))
210 210 lines.append(shift_interline)
211 211
212 212 # make sure that there are as many graph lines as there are
213 213 # log strings
214 214 while len(text) < len(lines):
215 215 text.append("")
216 216 if len(lines) < len(text):
217 217 extra_interline = ["|", " "] * (ncols + coldiff)
218 218 while len(lines) < len(text):
219 219 lines.append(extra_interline)
220 220
221 221 # print lines
222 222 indentation_level = max(ncols, ncols + coldiff)
223 223 for (line, logstr) in zip(lines, text):
224 224 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
225 225 ui.write(ln.rstrip() + '\n')
226 226
227 227 # ... and start over
228 228 state[0] = coldiff
229 229 state[1] = idx
230 230
231 231 def get_revs(repo, rev_opt):
232 232 if rev_opt:
233 233 revs = scmutil.revrange(repo, rev_opt)
234 234 if len(revs) == 0:
235 235 return (nullrev, nullrev)
236 236 return (max(revs), min(revs))
237 237 else:
238 238 return (len(repo) - 1, 0)
239 239
240 240 def check_unsupported_flags(pats, opts):
241 241 for op in ["newest_first"]:
242 242 if op in opts and opts[op]:
243 243 raise util.Abort(_("-G/--graph option is incompatible with --%s")
244 244 % op.replace("_", "-"))
245 245
246 246 def makefilematcher(repo, pats, followfirst):
247 247 # When displaying a revision with --patch --follow FILE, we have
248 248 # to know which file of the revision must be diffed. With
249 249 # --follow, we want the names of the ancestors of FILE in the
250 250 # revision, stored in "fcache". "fcache" is populated by
251 251 # reproducing the graph traversal already done by --follow revset
252 252 # and relating linkrevs to file names (which is not "correct" but
253 253 # good enough).
254 254 fcache = {}
255 255 fcacheready = [False]
256 256 pctx = repo['.']
257 257 wctx = repo[None]
258 258
259 259 def populate():
260 260 for fn in pats:
261 261 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
262 262 for c in i:
263 263 fcache.setdefault(c.linkrev(), set()).add(c.path())
264 264
265 265 def filematcher(rev):
266 266 if not fcacheready[0]:
267 267 # Lazy initialization
268 268 fcacheready[0] = True
269 269 populate()
270 270 return scmutil.match(wctx, fcache.get(rev, []), default='path')
271 271
272 272 return filematcher
273 273
274 274 def _makelogrevset(repo, pats, opts, revs):
275 275 """Return (expr, filematcher) where expr is a revset string built
276 276 from log options and file patterns or None. If --stat or --patch
277 277 are not passed filematcher is None. Otherwise it is a callable
278 278 taking a revision number and returning a match objects filtering
279 279 the files to be detailed when displaying the revision.
280 280 """
281 281 opt2revset = {
282 282 'follow': ('follow()', None),
283 283 'follow_first': ('_followfirst()', None),
284 284 'no_merges': ('not merge()', None),
285 285 'only_merges': ('merge()', None),
286 286 '_matchfiles': ('_matchfiles(%(val)s)', None),
287 287 'date': ('date(%(val)r)', None),
288 288 'branch': ('branch(%(val)r)', ' or '),
289 289 '_patslog': ('filelog(%(val)r)', ' or '),
290 290 '_patsfollow': ('follow(%(val)r)', ' or '),
291 291 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
292 292 'keyword': ('keyword(%(val)r)', ' or '),
293 293 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
294 294 'user': ('user(%(val)r)', ' or '),
295 295 }
296 296
297 297 opts = dict(opts)
298 298 # follow or not follow?
299 299 follow = opts.get('follow') or opts.get('follow_first')
300 300 followfirst = opts.get('follow_first')
301 301 if 'follow' in opts:
302 302 del opts['follow']
303 303 if 'follow_first' in opts:
304 304 del opts['follow_first']
305 305
306 306 # branch and only_branch are really aliases and must be handled at
307 307 # the same time
308 308 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
309 309 # pats/include/exclude are passed to match.match() directly in
310 310 # _matchfile() revset but walkchangerevs() builds its matcher with
311 311 # scmutil.match(). The difference is input pats are globbed on
312 312 # platforms without shell expansion (windows).
313 313 pctx = repo[None]
314 314 match, pats = scmutil.matchandpats(pctx, pats, opts)
315 315 slowpath = match.anypats() or (match.files() and opts.get('removed'))
316 316 if not slowpath:
317 317 for f in match.files():
318 318 if follow and f not in pctx:
319 319 raise util.Abort(_('cannot follow file not in parent '
320 320 'revision: "%s"') % f)
321 321 filelog = repo.file(f)
322 322 if not len(filelog):
323 323 # A zero count may be a directory or deleted file, so
324 324 # try to find matching entries on the slow path.
325 325 if follow:
326 326 raise util.Abort(
327 327 _('cannot follow nonexistent file: "%s"') % f)
328 328 slowpath = True
329 329 if slowpath:
330 330 # See cmdutil.walkchangerevs() slow path.
331 331 #
332 332 if follow:
333 333 raise util.Abort(_('can only follow copies/renames for explicit '
334 334 'filenames'))
335 335 # pats/include/exclude cannot be represented as separate
336 336 # revset expressions as their filtering logic applies at file
337 337 # level. For instance "-I a -X a" matches a revision touching
338 338 # "a" and "b" while "file(a) and not file(b)" does
339 339 # not. Besides, filesets are evaluated against the working
340 340 # directory.
341 341 matchargs = ['r:']
342 342 for p in pats:
343 343 matchargs.append('p:' + p)
344 344 for p in opts.get('include', []):
345 345 matchargs.append('i:' + p)
346 346 for p in opts.get('exclude', []):
347 347 matchargs.append('x:' + p)
348 348 matchargs = ','.join(('%r' % p) for p in matchargs)
349 349 opts['_matchfiles'] = matchargs
350 350 else:
351 351 if follow:
352 352 if followfirst:
353 353 if pats:
354 354 opts['_patsfollowfirst'] = list(pats)
355 355 else:
356 356 opts['follow_first'] = True
357 357 else:
358 358 if pats:
359 359 opts['_patsfollow'] = list(pats)
360 360 else:
361 361 opts['follow'] = True
362 362 else:
363 363 opts['_patslog'] = list(pats)
364 364
365 365 filematcher = None
366 366 if opts.get('patch') or opts.get('stat'):
367 367 if follow:
368 368 filematcher = makefilematcher(repo, pats, followfirst)
369 369 else:
370 370 filematcher = lambda rev: match
371 371
372 372 revset = []
373 373 for op, val in opts.iteritems():
374 374 if not val:
375 375 continue
376 376 if op not in opt2revset:
377 377 continue
378 378 revop, andor = opt2revset[op]
379 379 if '%(val)' not in revop:
380 380 revset.append(revop)
381 381 else:
382 382 if not isinstance(val, list):
383 383 expr = revop % {'val': val}
384 384 else:
385 385 expr = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
386 386 revset.append(expr)
387 387
388 388 if revset:
389 389 revset = '(' + ' and '.join(revset) + ')'
390 390 else:
391 391 revset = None
392 392 return revset, filematcher
393 393
394 394 def getlogrevs(repo, pats, opts):
395 395 """Return (revs, expr, filematcher) where revs is a list of
396 396 revision numbers, expr is a revset string built from log options
397 397 and file patterns or None, and used to filter 'revs'. If --stat or
398 398 --patch are not passed filematcher is None. Otherwise it is a
399 399 callable taking a revision number and returning a match objects
400 400 filtering the files to be detailed when displaying the revision.
401 401 """
402 402 if not len(repo):
403 403 return [], None, None
404 404 if opts.get('rev'):
405 405 revs = scmutil.revrange(repo, opts['rev'])
406 406 else:
407 407 revs = range(len(repo))
408 408 if not revs:
409 409 return [], None, None
410 410 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
411 411 if expr:
412 revs = revsetmod.match(repo.ui, expr)(repo, revs)
412 # Evaluate revisions in changelog order for performance
413 # reasons but preserve the original sequence order in the
414 # filtered result.
415 matched = set(revsetmod.match(repo.ui, expr)(repo, sorted(revs)))
416 revs = [r for r in revs if r in matched]
413 417 return revs, expr, filematcher
414 418
415 419 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
416 420 filematcher=None):
417 421 seen, state = [], asciistate()
418 422 for rev, type, ctx, parents in dag:
419 423 char = ctx.node() in showparents and '@' or 'o'
420 424 copies = None
421 425 if getrenamed and ctx.rev():
422 426 copies = []
423 427 for fn in ctx.files():
424 428 rename = getrenamed(fn, ctx.rev())
425 429 if rename:
426 430 copies.append((fn, rename[0]))
427 431 revmatchfn = None
428 432 if filematcher is not None:
429 433 revmatchfn = filematcher(ctx.rev())
430 434 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
431 435 lines = displayer.hunk.pop(rev).split('\n')[:-1]
432 436 displayer.flush(rev)
433 437 edges = edgefn(type, char, lines, seen, rev, parents)
434 438 for type, char, lines, coldata in edges:
435 439 ascii(ui, state, type, char, lines, coldata)
436 440 displayer.close()
437 441
438 442 @command('glog',
439 443 [('l', 'limit', '',
440 444 _('limit number of changes displayed'), _('NUM')),
441 445 ('p', 'patch', False, _('show patch')),
442 446 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
443 447 ] + templateopts,
444 448 _('hg glog [OPTION]... [FILE]'))
445 449 def graphlog(ui, repo, *pats, **opts):
446 450 """show revision history alongside an ASCII revision graph
447 451
448 452 Print a revision history alongside a revision graph drawn with
449 453 ASCII characters.
450 454
451 455 Nodes printed as an @ character are parents of the working
452 456 directory.
453 457 """
454 458
455 459 check_unsupported_flags(pats, opts)
456 460
457 461 revs, expr, filematcher = getlogrevs(repo, pats, opts)
458 462 revs = sorted(revs, reverse=1)
459 463 limit = cmdutil.loglimit(opts)
460 464 if limit is not None:
461 465 revs = revs[:limit]
462 466 revdag = graphmod.dagwalker(repo, revs)
463 467
464 468 getrenamed = None
465 469 if opts.get('copies'):
466 470 endrev = None
467 471 if opts.get('rev'):
468 472 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
469 473 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
470 474 displayer = show_changeset(ui, repo, opts, buffered=True)
471 475 showparents = [ctx.node() for ctx in repo[None].parents()]
472 476 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
473 477 filematcher)
474 478
475 479 def graphrevs(repo, nodes, opts):
476 480 limit = cmdutil.loglimit(opts)
477 481 nodes.reverse()
478 482 if limit is not None:
479 483 nodes = nodes[:limit]
480 484 return graphmod.nodes(repo, nodes)
481 485
482 486 def goutgoing(ui, repo, dest=None, **opts):
483 487 """show the outgoing changesets alongside an ASCII revision graph
484 488
485 489 Print the outgoing changesets alongside a revision graph drawn with
486 490 ASCII characters.
487 491
488 492 Nodes printed as an @ character are parents of the working
489 493 directory.
490 494 """
491 495
492 496 check_unsupported_flags([], opts)
493 497 o = hg._outgoing(ui, repo, dest, opts)
494 498 if o is None:
495 499 return
496 500
497 501 revdag = graphrevs(repo, o, opts)
498 502 displayer = show_changeset(ui, repo, opts, buffered=True)
499 503 showparents = [ctx.node() for ctx in repo[None].parents()]
500 504 generate(ui, revdag, displayer, showparents, asciiedges)
501 505
502 506 def gincoming(ui, repo, source="default", **opts):
503 507 """show the incoming changesets alongside an ASCII revision graph
504 508
505 509 Print the incoming changesets alongside a revision graph drawn with
506 510 ASCII characters.
507 511
508 512 Nodes printed as an @ character are parents of the working
509 513 directory.
510 514 """
511 515 def subreporecurse():
512 516 return 1
513 517
514 518 check_unsupported_flags([], opts)
515 519 def display(other, chlist, displayer):
516 520 revdag = graphrevs(other, chlist, opts)
517 521 showparents = [ctx.node() for ctx in repo[None].parents()]
518 522 generate(ui, revdag, displayer, showparents, asciiedges)
519 523
520 524 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
521 525
522 526 def uisetup(ui):
523 527 '''Initialize the extension.'''
524 528 _wrapcmd('log', commands.table, graphlog)
525 529 _wrapcmd('incoming', commands.table, gincoming)
526 530 _wrapcmd('outgoing', commands.table, goutgoing)
527 531
528 532 def _wrapcmd(cmd, table, wrapfn):
529 533 '''wrap the command'''
530 534 def graph(orig, *args, **kwargs):
531 535 if kwargs['graph']:
532 536 return wrapfn(*args, **kwargs)
533 537 return orig(*args, **kwargs)
534 538 entry = extensions.wrapcommand(table, cmd, graph)
535 539 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
General Comments 0
You need to be logged in to leave comments. Login now