##// END OF EJS Templates
graphlog: reduce duplication in --follow code
Patrick Mezard -
r16433:e38b2993 default
parent child Browse files
Show More
@@ -1,569 +1,560 b''
1 1 # ASCII graph log extension for Mercurial
2 2 #
3 3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to view revision graphs from a shell
9 9
10 10 This extension adds a --graph option to the incoming, outgoing and log
11 11 commands. When this options is given, an ASCII representation of the
12 12 revision graph is also shown.
13 13 '''
14 14
15 15 from mercurial.cmdutil import show_changeset
16 16 from mercurial.i18n import _
17 17 from mercurial.node import nullrev
18 18 from mercurial import cmdutil, commands, extensions, scmutil
19 19 from mercurial import hg, util, graphmod, templatekw, revset
20 20
21 21 cmdtable = {}
22 22 command = cmdutil.command(cmdtable)
23 23
24 24 ASCIIDATA = 'ASC'
25 25
26 26 def asciiedges(type, char, lines, seen, rev, parents):
27 27 """adds edge info to changelog DAG walk suitable for ascii()"""
28 28 if rev not in seen:
29 29 seen.append(rev)
30 30 nodeidx = seen.index(rev)
31 31
32 32 knownparents = []
33 33 newparents = []
34 34 for parent in parents:
35 35 if parent in seen:
36 36 knownparents.append(parent)
37 37 else:
38 38 newparents.append(parent)
39 39
40 40 ncols = len(seen)
41 41 nextseen = seen[:]
42 42 nextseen[nodeidx:nodeidx + 1] = newparents
43 43 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
44 44
45 45 while len(newparents) > 2:
46 46 # ascii() only knows how to add or remove a single column between two
47 47 # calls. Nodes with more than two parents break this constraint so we
48 48 # introduce intermediate expansion lines to grow the active node list
49 49 # slowly.
50 50 edges.append((nodeidx, nodeidx))
51 51 edges.append((nodeidx, nodeidx + 1))
52 52 nmorecols = 1
53 53 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
54 54 char = '\\'
55 55 lines = []
56 56 nodeidx += 1
57 57 ncols += 1
58 58 edges = []
59 59 del newparents[0]
60 60
61 61 if len(newparents) > 0:
62 62 edges.append((nodeidx, nodeidx))
63 63 if len(newparents) > 1:
64 64 edges.append((nodeidx, nodeidx + 1))
65 65 nmorecols = len(nextseen) - ncols
66 66 seen[:] = nextseen
67 67 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
68 68
69 69 def fix_long_right_edges(edges):
70 70 for (i, (start, end)) in enumerate(edges):
71 71 if end > start:
72 72 edges[i] = (start, end + 1)
73 73
74 74 def get_nodeline_edges_tail(
75 75 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
76 76 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
77 77 # Still going in the same non-vertical direction.
78 78 if n_columns_diff == -1:
79 79 start = max(node_index + 1, p_node_index)
80 80 tail = ["|", " "] * (start - node_index - 1)
81 81 tail.extend(["/", " "] * (n_columns - start))
82 82 return tail
83 83 else:
84 84 return ["\\", " "] * (n_columns - node_index - 1)
85 85 else:
86 86 return ["|", " "] * (n_columns - node_index - 1)
87 87
88 88 def draw_edges(edges, nodeline, interline):
89 89 for (start, end) in edges:
90 90 if start == end + 1:
91 91 interline[2 * end + 1] = "/"
92 92 elif start == end - 1:
93 93 interline[2 * start + 1] = "\\"
94 94 elif start == end:
95 95 interline[2 * start] = "|"
96 96 else:
97 97 if 2 * end >= len(nodeline):
98 98 continue
99 99 nodeline[2 * end] = "+"
100 100 if start > end:
101 101 (start, end) = (end, start)
102 102 for i in range(2 * start + 1, 2 * end):
103 103 if nodeline[i] != "+":
104 104 nodeline[i] = "-"
105 105
106 106 def get_padding_line(ni, n_columns, edges):
107 107 line = []
108 108 line.extend(["|", " "] * ni)
109 109 if (ni, ni - 1) in edges or (ni, ni) in edges:
110 110 # (ni, ni - 1) (ni, ni)
111 111 # | | | | | | | |
112 112 # +---o | | o---+
113 113 # | | c | | c | |
114 114 # | |/ / | |/ /
115 115 # | | | | | |
116 116 c = "|"
117 117 else:
118 118 c = " "
119 119 line.extend([c, " "])
120 120 line.extend(["|", " "] * (n_columns - ni - 1))
121 121 return line
122 122
123 123 def asciistate():
124 124 """returns the initial value for the "state" argument to ascii()"""
125 125 return [0, 0]
126 126
127 127 def ascii(ui, state, type, char, text, coldata):
128 128 """prints an ASCII graph of the DAG
129 129
130 130 takes the following arguments (one call per node in the graph):
131 131
132 132 - ui to write to
133 133 - Somewhere to keep the needed state in (init to asciistate())
134 134 - Column of the current node in the set of ongoing edges.
135 135 - Type indicator of node data == ASCIIDATA.
136 136 - Payload: (char, lines):
137 137 - Character to use as node's symbol.
138 138 - List of lines to display as the node's text.
139 139 - Edges; a list of (col, next_col) indicating the edges between
140 140 the current node and its parents.
141 141 - Number of columns (ongoing edges) in the current revision.
142 142 - The difference between the number of columns (ongoing edges)
143 143 in the next revision and the number of columns (ongoing edges)
144 144 in the current revision. That is: -1 means one column removed;
145 145 0 means no columns added or removed; 1 means one column added.
146 146 """
147 147
148 148 idx, edges, ncols, coldiff = coldata
149 149 assert -2 < coldiff < 2
150 150 if coldiff == -1:
151 151 # Transform
152 152 #
153 153 # | | | | | |
154 154 # o | | into o---+
155 155 # |X / |/ /
156 156 # | | | |
157 157 fix_long_right_edges(edges)
158 158
159 159 # add_padding_line says whether to rewrite
160 160 #
161 161 # | | | | | | | |
162 162 # | o---+ into | o---+
163 163 # | / / | | | # <--- padding line
164 164 # o | | | / /
165 165 # o | |
166 166 add_padding_line = (len(text) > 2 and coldiff == -1 and
167 167 [x for (x, y) in edges if x + 1 < y])
168 168
169 169 # fix_nodeline_tail says whether to rewrite
170 170 #
171 171 # | | o | | | | o | |
172 172 # | | |/ / | | |/ /
173 173 # | o | | into | o / / # <--- fixed nodeline tail
174 174 # | |/ / | |/ /
175 175 # o | | o | |
176 176 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
177 177
178 178 # nodeline is the line containing the node character (typically o)
179 179 nodeline = ["|", " "] * idx
180 180 nodeline.extend([char, " "])
181 181
182 182 nodeline.extend(
183 183 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
184 184 state[0], fix_nodeline_tail))
185 185
186 186 # shift_interline is the line containing the non-vertical
187 187 # edges between this entry and the next
188 188 shift_interline = ["|", " "] * idx
189 189 if coldiff == -1:
190 190 n_spaces = 1
191 191 edge_ch = "/"
192 192 elif coldiff == 0:
193 193 n_spaces = 2
194 194 edge_ch = "|"
195 195 else:
196 196 n_spaces = 3
197 197 edge_ch = "\\"
198 198 shift_interline.extend(n_spaces * [" "])
199 199 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
200 200
201 201 # draw edges from the current node to its parents
202 202 draw_edges(edges, nodeline, shift_interline)
203 203
204 204 # lines is the list of all graph lines to print
205 205 lines = [nodeline]
206 206 if add_padding_line:
207 207 lines.append(get_padding_line(idx, ncols, edges))
208 208 lines.append(shift_interline)
209 209
210 210 # make sure that there are as many graph lines as there are
211 211 # log strings
212 212 while len(text) < len(lines):
213 213 text.append("")
214 214 if len(lines) < len(text):
215 215 extra_interline = ["|", " "] * (ncols + coldiff)
216 216 while len(lines) < len(text):
217 217 lines.append(extra_interline)
218 218
219 219 # print lines
220 220 indentation_level = max(ncols, ncols + coldiff)
221 221 for (line, logstr) in zip(lines, text):
222 222 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
223 223 ui.write(ln.rstrip() + '\n')
224 224
225 225 # ... and start over
226 226 state[0] = coldiff
227 227 state[1] = idx
228 228
229 229 def get_revs(repo, rev_opt):
230 230 if rev_opt:
231 231 revs = scmutil.revrange(repo, rev_opt)
232 232 if len(revs) == 0:
233 233 return (nullrev, nullrev)
234 234 return (max(revs), min(revs))
235 235 else:
236 236 return (len(repo) - 1, 0)
237 237
238 238 def check_unsupported_flags(pats, opts):
239 239 for op in ["newest_first"]:
240 240 if op in opts and opts[op]:
241 241 raise util.Abort(_("-G/--graph option is incompatible with --%s")
242 242 % op.replace("_", "-"))
243 243
244 244 def _makefilematcher(repo, pats, followfirst):
245 245 # When displaying a revision with --patch --follow FILE, we have
246 246 # to know which file of the revision must be diffed. With
247 247 # --follow, we want the names of the ancestors of FILE in the
248 248 # revision, stored in "fcache". "fcache" is populated by
249 249 # reproducing the graph traversal already done by --follow revset
250 250 # and relating linkrevs to file names (which is not "correct" but
251 251 # good enough).
252 252 fcache = {}
253 253 fcacheready = [False]
254 254 pctx = repo['.']
255 255 wctx = repo[None]
256 256
257 257 def populate():
258 258 for fn in pats:
259 259 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
260 260 for c in i:
261 261 fcache.setdefault(c.linkrev(), set()).add(c.path())
262 262
263 263 def filematcher(rev):
264 264 if not fcacheready[0]:
265 265 # Lazy initialization
266 266 fcacheready[0] = True
267 267 populate()
268 268 return scmutil.match(wctx, fcache.get(rev, []), default='path')
269 269
270 270 return filematcher
271 271
272 272 def _makelogrevset(repo, pats, opts, revs):
273 273 """Return (expr, filematcher) where expr is a revset string built
274 274 from log options and file patterns or None. If --stat or --patch
275 275 are not passed filematcher is None. Otherwise it is a callable
276 276 taking a revision number and returning a match objects filtering
277 277 the files to be detailed when displaying the revision.
278 278 """
279 279 opt2revset = {
280 280 'no_merges': ('not merge()', None),
281 281 'only_merges': ('merge()', None),
282 282 '_ancestors': ('ancestors(%(val)s)', None),
283 283 '_fancestors': ('_firstancestors(%(val)s)', None),
284 284 '_descendants': ('descendants(%(val)s)', None),
285 285 '_fdescendants': ('_firstdescendants(%(val)s)', None),
286 286 '_matchfiles': ('_matchfiles(%(val)s)', None),
287 287 'date': ('date(%(val)r)', None),
288 288 'branch': ('branch(%(val)r)', ' or '),
289 289 '_patslog': ('filelog(%(val)r)', ' or '),
290 290 '_patsfollow': ('follow(%(val)r)', ' or '),
291 291 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
292 292 'keyword': ('keyword(%(val)r)', ' or '),
293 293 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
294 294 'user': ('user(%(val)r)', ' or '),
295 295 }
296 296
297 297 opts = dict(opts)
298 298 # follow or not follow?
299 299 follow = opts.get('follow') or opts.get('follow_first')
300 followfirst = opts.get('follow_first')
300 followfirst = opts.get('follow_first') and 1 or 0
301 301 # --follow with FILE behaviour depends on revs...
302 302 startrev = revs[0]
303 followdescendants = len(revs) > 1 and revs[0] < revs[1]
303 followdescendants = (len(revs) > 1 and revs[0] < revs[1]) and 1 or 0
304 304
305 305 # branch and only_branch are really aliases and must be handled at
306 306 # the same time
307 307 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
308 308 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
309 309 # pats/include/exclude are passed to match.match() directly in
310 310 # _matchfile() revset but walkchangerevs() builds its matcher with
311 311 # scmutil.match(). The difference is input pats are globbed on
312 312 # platforms without shell expansion (windows).
313 313 pctx = repo[None]
314 314 match, pats = scmutil.matchandpats(pctx, pats, opts)
315 315 slowpath = match.anypats() or (match.files() and opts.get('removed'))
316 316 if not slowpath:
317 317 for f in match.files():
318 318 if follow and f not in pctx:
319 319 raise util.Abort(_('cannot follow file not in parent '
320 320 'revision: "%s"') % f)
321 321 filelog = repo.file(f)
322 322 if not len(filelog):
323 323 # A zero count may be a directory or deleted file, so
324 324 # try to find matching entries on the slow path.
325 325 if follow:
326 326 raise util.Abort(
327 327 _('cannot follow nonexistent file: "%s"') % f)
328 328 slowpath = True
329 329 if slowpath:
330 330 # See cmdutil.walkchangerevs() slow path.
331 331 #
332 332 if follow:
333 333 raise util.Abort(_('can only follow copies/renames for explicit '
334 334 'filenames'))
335 335 # pats/include/exclude cannot be represented as separate
336 336 # revset expressions as their filtering logic applies at file
337 337 # level. For instance "-I a -X a" matches a revision touching
338 338 # "a" and "b" while "file(a) and not file(b)" does
339 339 # not. Besides, filesets are evaluated against the working
340 340 # directory.
341 341 matchargs = ['r:', 'd:relpath']
342 342 for p in pats:
343 343 matchargs.append('p:' + p)
344 344 for p in opts.get('include', []):
345 345 matchargs.append('i:' + p)
346 346 for p in opts.get('exclude', []):
347 347 matchargs.append('x:' + p)
348 348 matchargs = ','.join(('%r' % p) for p in matchargs)
349 349 opts['_matchfiles'] = matchargs
350 350 else:
351 351 if follow:
352 if followfirst:
353 if pats:
354 opts['_patsfollowfirst'] = list(pats)
355 else:
356 if followdescendants:
357 opts['_fdescendants'] = str(startrev)
358 else:
359 opts['_fancestors'] = str(startrev)
352 fpats = ('_patsfollow', '_patsfollowfirst')
353 fnopats = (('_ancestors', '_fancestors'),
354 ('_descendants', '_fdescendants'))
355 if pats:
356 opts[fpats[followfirst]] = list(pats)
360 357 else:
361 if pats:
362 opts['_patsfollow'] = list(pats)
363 else:
364 if followdescendants:
365 opts['_descendants'] = str(startrev)
366 else:
367 opts['_ancestors'] = str(startrev)
358 opts[fnopats[followdescendants][followfirst]] = str(startrev)
368 359 else:
369 360 opts['_patslog'] = list(pats)
370 361
371 362 filematcher = None
372 363 if opts.get('patch') or opts.get('stat'):
373 364 if follow:
374 365 filematcher = _makefilematcher(repo, pats, followfirst)
375 366 else:
376 367 filematcher = lambda rev: match
377 368
378 369 expr = []
379 370 for op, val in opts.iteritems():
380 371 if not val:
381 372 continue
382 373 if op not in opt2revset:
383 374 continue
384 375 revop, andor = opt2revset[op]
385 376 if '%(val)' not in revop:
386 377 expr.append(revop)
387 378 else:
388 379 if not isinstance(val, list):
389 380 e = revop % {'val': val}
390 381 else:
391 382 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
392 383 expr.append(e)
393 384
394 385 if expr:
395 386 expr = '(' + ' and '.join(expr) + ')'
396 387 else:
397 388 expr = None
398 389 return expr, filematcher
399 390
400 391 def getlogrevs(repo, pats, opts):
401 392 """Return (revs, expr, filematcher) where revs is a list of
402 393 revision numbers, expr is a revset string built from log options
403 394 and file patterns or None, and used to filter 'revs'. If --stat or
404 395 --patch are not passed filematcher is None. Otherwise it is a
405 396 callable taking a revision number and returning a match objects
406 397 filtering the files to be detailed when displaying the revision.
407 398 """
408 399 if not len(repo):
409 400 return [], None, None
410 401 # Default --rev value depends on --follow but --follow behaviour
411 402 # depends on revisions resolved from --rev...
412 403 follow = opts.get('follow') or opts.get('follow_first')
413 404 if opts.get('rev'):
414 405 revs = scmutil.revrange(repo, opts['rev'])
415 406 else:
416 407 if follow and len(repo) > 0:
417 408 revs = scmutil.revrange(repo, ['.:0'])
418 409 else:
419 410 revs = range(len(repo) - 1, -1, -1)
420 411 if not revs:
421 412 return [], None, None
422 413 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
423 414 if expr:
424 415 # Evaluate revisions in changelog order for performance
425 416 # reasons but preserve the original sequence order in the
426 417 # filtered result.
427 418 matched = set(revset.match(repo.ui, expr)(repo, sorted(revs)))
428 419 revs = [r for r in revs if r in matched]
429 420 if not opts.get('hidden'):
430 421 # --hidden is still experimental and not worth a dedicated revset
431 422 # yet. Fortunately, filtering revision number is fast.
432 423 revs = [r for r in revs if r not in repo.changelog.hiddenrevs]
433 424 return revs, expr, filematcher
434 425
435 426 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
436 427 filematcher=None):
437 428 seen, state = [], asciistate()
438 429 for rev, type, ctx, parents in dag:
439 430 char = ctx.node() in showparents and '@' or 'o'
440 431 copies = None
441 432 if getrenamed and ctx.rev():
442 433 copies = []
443 434 for fn in ctx.files():
444 435 rename = getrenamed(fn, ctx.rev())
445 436 if rename:
446 437 copies.append((fn, rename[0]))
447 438 revmatchfn = None
448 439 if filematcher is not None:
449 440 revmatchfn = filematcher(ctx.rev())
450 441 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
451 442 lines = displayer.hunk.pop(rev).split('\n')[:-1]
452 443 displayer.flush(rev)
453 444 edges = edgefn(type, char, lines, seen, rev, parents)
454 445 for type, char, lines, coldata in edges:
455 446 ascii(ui, state, type, char, lines, coldata)
456 447 displayer.close()
457 448
458 449 @command('glog',
459 450 [('f', 'follow', None,
460 451 _('follow changeset history, or file history across copies and renames')),
461 452 ('', 'follow-first', None,
462 453 _('only follow the first parent of merge changesets (DEPRECATED)')),
463 454 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
464 455 ('C', 'copies', None, _('show copied files')),
465 456 ('k', 'keyword', [],
466 457 _('do case-insensitive search for a given text'), _('TEXT')),
467 458 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
468 459 ('', 'removed', None, _('include revisions where files were removed')),
469 460 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
470 461 ('u', 'user', [], _('revisions committed by user'), _('USER')),
471 462 ('', 'only-branch', [],
472 463 _('show only changesets within the given named branch (DEPRECATED)'),
473 464 _('BRANCH')),
474 465 ('b', 'branch', [],
475 466 _('show changesets within the given named branch'), _('BRANCH')),
476 467 ('P', 'prune', [],
477 468 _('do not display revision or any of its ancestors'), _('REV')),
478 469 ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
479 470 ] + commands.logopts + commands.walkopts,
480 471 _('[OPTION]... [FILE]'))
481 472 def graphlog(ui, repo, *pats, **opts):
482 473 """show revision history alongside an ASCII revision graph
483 474
484 475 Print a revision history alongside a revision graph drawn with
485 476 ASCII characters.
486 477
487 478 Nodes printed as an @ character are parents of the working
488 479 directory.
489 480 """
490 481
491 482 revs, expr, filematcher = getlogrevs(repo, pats, opts)
492 483 revs = sorted(revs, reverse=1)
493 484 limit = cmdutil.loglimit(opts)
494 485 if limit is not None:
495 486 revs = revs[:limit]
496 487 revdag = graphmod.dagwalker(repo, revs)
497 488
498 489 getrenamed = None
499 490 if opts.get('copies'):
500 491 endrev = None
501 492 if opts.get('rev'):
502 493 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
503 494 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
504 495 displayer = show_changeset(ui, repo, opts, buffered=True)
505 496 showparents = [ctx.node() for ctx in repo[None].parents()]
506 497 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
507 498 filematcher)
508 499
509 500 def graphrevs(repo, nodes, opts):
510 501 limit = cmdutil.loglimit(opts)
511 502 nodes.reverse()
512 503 if limit is not None:
513 504 nodes = nodes[:limit]
514 505 return graphmod.nodes(repo, nodes)
515 506
516 507 def goutgoing(ui, repo, dest=None, **opts):
517 508 """show the outgoing changesets alongside an ASCII revision graph
518 509
519 510 Print the outgoing changesets alongside a revision graph drawn with
520 511 ASCII characters.
521 512
522 513 Nodes printed as an @ character are parents of the working
523 514 directory.
524 515 """
525 516
526 517 check_unsupported_flags([], opts)
527 518 o = hg._outgoing(ui, repo, dest, opts)
528 519 if o is None:
529 520 return
530 521
531 522 revdag = graphrevs(repo, o, opts)
532 523 displayer = show_changeset(ui, repo, opts, buffered=True)
533 524 showparents = [ctx.node() for ctx in repo[None].parents()]
534 525 generate(ui, revdag, displayer, showparents, asciiedges)
535 526
536 527 def gincoming(ui, repo, source="default", **opts):
537 528 """show the incoming changesets alongside an ASCII revision graph
538 529
539 530 Print the incoming changesets alongside a revision graph drawn with
540 531 ASCII characters.
541 532
542 533 Nodes printed as an @ character are parents of the working
543 534 directory.
544 535 """
545 536 def subreporecurse():
546 537 return 1
547 538
548 539 check_unsupported_flags([], opts)
549 540 def display(other, chlist, displayer):
550 541 revdag = graphrevs(other, chlist, opts)
551 542 showparents = [ctx.node() for ctx in repo[None].parents()]
552 543 generate(ui, revdag, displayer, showparents, asciiedges)
553 544
554 545 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
555 546
556 547 def uisetup(ui):
557 548 '''Initialize the extension.'''
558 549 _wrapcmd('log', commands.table, graphlog)
559 550 _wrapcmd('incoming', commands.table, gincoming)
560 551 _wrapcmd('outgoing', commands.table, goutgoing)
561 552
562 553 def _wrapcmd(cmd, table, wrapfn):
563 554 '''wrap the command'''
564 555 def graph(orig, *args, **kwargs):
565 556 if kwargs['graph']:
566 557 return wrapfn(*args, **kwargs)
567 558 return orig(*args, **kwargs)
568 559 entry = extensions.wrapcommand(table, cmd, graph)
569 560 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
General Comments 0
You need to be logged in to leave comments. Login now