##// END OF EJS Templates
graphlog: extract ascii drawing code into graphmod
Patrick Mezard -
r17179:0849d725 default
parent child Browse files
Show More
@@ -1,582 +1,379 b''
1 1 # ASCII graph log extension for Mercurial
2 2 #
3 3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to view revision graphs from a shell
9 9
10 10 This extension adds a --graph option to the incoming, outgoing and log
11 11 commands. When this options is given, an ASCII representation of the
12 12 revision graph is also shown.
13 13 '''
14 14
15 15 from mercurial.cmdutil import show_changeset
16 16 from mercurial.i18n import _
17 17 from mercurial import cmdutil, commands, extensions, scmutil
18 18 from mercurial import hg, util, graphmod, templatekw, revset
19 19
20 20 cmdtable = {}
21 21 command = cmdutil.command(cmdtable)
22 22 testedwith = 'internal'
23 23
24 def asciiedges(type, char, lines, seen, rev, parents):
25 """adds edge info to changelog DAG walk suitable for ascii()"""
26 if rev not in seen:
27 seen.append(rev)
28 nodeidx = seen.index(rev)
29
30 knownparents = []
31 newparents = []
32 for parent in parents:
33 if parent in seen:
34 knownparents.append(parent)
35 else:
36 newparents.append(parent)
37
38 ncols = len(seen)
39 nextseen = seen[:]
40 nextseen[nodeidx:nodeidx + 1] = newparents
41 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
42
43 while len(newparents) > 2:
44 # ascii() only knows how to add or remove a single column between two
45 # calls. Nodes with more than two parents break this constraint so we
46 # introduce intermediate expansion lines to grow the active node list
47 # slowly.
48 edges.append((nodeidx, nodeidx))
49 edges.append((nodeidx, nodeidx + 1))
50 nmorecols = 1
51 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
52 char = '\\'
53 lines = []
54 nodeidx += 1
55 ncols += 1
56 edges = []
57 del newparents[0]
58
59 if len(newparents) > 0:
60 edges.append((nodeidx, nodeidx))
61 if len(newparents) > 1:
62 edges.append((nodeidx, nodeidx + 1))
63 nmorecols = len(nextseen) - ncols
64 seen[:] = nextseen
65 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
66
67 def _fixlongrightedges(edges):
68 for (i, (start, end)) in enumerate(edges):
69 if end > start:
70 edges[i] = (start, end + 1)
71
72 def _getnodelineedgestail(
73 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
74 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
75 # Still going in the same non-vertical direction.
76 if n_columns_diff == -1:
77 start = max(node_index + 1, p_node_index)
78 tail = ["|", " "] * (start - node_index - 1)
79 tail.extend(["/", " "] * (n_columns - start))
80 return tail
81 else:
82 return ["\\", " "] * (n_columns - node_index - 1)
83 else:
84 return ["|", " "] * (n_columns - node_index - 1)
85
86 def _drawedges(edges, nodeline, interline):
87 for (start, end) in edges:
88 if start == end + 1:
89 interline[2 * end + 1] = "/"
90 elif start == end - 1:
91 interline[2 * start + 1] = "\\"
92 elif start == end:
93 interline[2 * start] = "|"
94 else:
95 if 2 * end >= len(nodeline):
96 continue
97 nodeline[2 * end] = "+"
98 if start > end:
99 (start, end) = (end, start)
100 for i in range(2 * start + 1, 2 * end):
101 if nodeline[i] != "+":
102 nodeline[i] = "-"
103
104 def _getpaddingline(ni, n_columns, edges):
105 line = []
106 line.extend(["|", " "] * ni)
107 if (ni, ni - 1) in edges or (ni, ni) in edges:
108 # (ni, ni - 1) (ni, ni)
109 # | | | | | | | |
110 # +---o | | o---+
111 # | | c | | c | |
112 # | |/ / | |/ /
113 # | | | | | |
114 c = "|"
115 else:
116 c = " "
117 line.extend([c, " "])
118 line.extend(["|", " "] * (n_columns - ni - 1))
119 return line
120
121 def asciistate():
122 """returns the initial value for the "state" argument to ascii()"""
123 return [0, 0]
124
125 def ascii(ui, state, type, char, text, coldata):
126 """prints an ASCII graph of the DAG
127
128 takes the following arguments (one call per node in the graph):
129
130 - ui to write to
131 - Somewhere to keep the needed state in (init to asciistate())
132 - Column of the current node in the set of ongoing edges.
133 - Type indicator of node data, usually 'C' for changesets.
134 - Payload: (char, lines):
135 - Character to use as node's symbol.
136 - List of lines to display as the node's text.
137 - Edges; a list of (col, next_col) indicating the edges between
138 the current node and its parents.
139 - Number of columns (ongoing edges) in the current revision.
140 - The difference between the number of columns (ongoing edges)
141 in the next revision and the number of columns (ongoing edges)
142 in the current revision. That is: -1 means one column removed;
143 0 means no columns added or removed; 1 means one column added.
144 """
145
146 idx, edges, ncols, coldiff = coldata
147 assert -2 < coldiff < 2
148 if coldiff == -1:
149 # Transform
150 #
151 # | | | | | |
152 # o | | into o---+
153 # |X / |/ /
154 # | | | |
155 _fixlongrightedges(edges)
156
157 # add_padding_line says whether to rewrite
158 #
159 # | | | | | | | |
160 # | o---+ into | o---+
161 # | / / | | | # <--- padding line
162 # o | | | / /
163 # o | |
164 add_padding_line = (len(text) > 2 and coldiff == -1 and
165 [x for (x, y) in edges if x + 1 < y])
166
167 # fix_nodeline_tail says whether to rewrite
168 #
169 # | | o | | | | o | |
170 # | | |/ / | | |/ /
171 # | o | | into | o / / # <--- fixed nodeline tail
172 # | |/ / | |/ /
173 # o | | o | |
174 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
175
176 # nodeline is the line containing the node character (typically o)
177 nodeline = ["|", " "] * idx
178 nodeline.extend([char, " "])
179
180 nodeline.extend(
181 _getnodelineedgestail(idx, state[1], ncols, coldiff,
182 state[0], fix_nodeline_tail))
183
184 # shift_interline is the line containing the non-vertical
185 # edges between this entry and the next
186 shift_interline = ["|", " "] * idx
187 if coldiff == -1:
188 n_spaces = 1
189 edge_ch = "/"
190 elif coldiff == 0:
191 n_spaces = 2
192 edge_ch = "|"
193 else:
194 n_spaces = 3
195 edge_ch = "\\"
196 shift_interline.extend(n_spaces * [" "])
197 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
198
199 # draw edges from the current node to its parents
200 _drawedges(edges, nodeline, shift_interline)
201
202 # lines is the list of all graph lines to print
203 lines = [nodeline]
204 if add_padding_line:
205 lines.append(_getpaddingline(idx, ncols, edges))
206 lines.append(shift_interline)
207
208 # make sure that there are as many graph lines as there are
209 # log strings
210 while len(text) < len(lines):
211 text.append("")
212 if len(lines) < len(text):
213 extra_interline = ["|", " "] * (ncols + coldiff)
214 while len(lines) < len(text):
215 lines.append(extra_interline)
216
217 # print lines
218 indentation_level = max(ncols, ncols + coldiff)
219 for (line, logstr) in zip(lines, text):
220 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
221 ui.write(ln.rstrip() + '\n')
222
223 # ... and start over
224 state[0] = coldiff
225 state[1] = idx
226
227 24 def _checkunsupportedflags(pats, opts):
228 25 for op in ["newest_first"]:
229 26 if op in opts and opts[op]:
230 27 raise util.Abort(_("-G/--graph option is incompatible with --%s")
231 28 % op.replace("_", "-"))
232 29
233 30 def _makefilematcher(repo, pats, followfirst):
234 31 # When displaying a revision with --patch --follow FILE, we have
235 32 # to know which file of the revision must be diffed. With
236 33 # --follow, we want the names of the ancestors of FILE in the
237 34 # revision, stored in "fcache". "fcache" is populated by
238 35 # reproducing the graph traversal already done by --follow revset
239 36 # and relating linkrevs to file names (which is not "correct" but
240 37 # good enough).
241 38 fcache = {}
242 39 fcacheready = [False]
243 40 pctx = repo['.']
244 41 wctx = repo[None]
245 42
246 43 def populate():
247 44 for fn in pats:
248 45 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
249 46 for c in i:
250 47 fcache.setdefault(c.linkrev(), set()).add(c.path())
251 48
252 49 def filematcher(rev):
253 50 if not fcacheready[0]:
254 51 # Lazy initialization
255 52 fcacheready[0] = True
256 53 populate()
257 54 return scmutil.match(wctx, fcache.get(rev, []), default='path')
258 55
259 56 return filematcher
260 57
261 58 def _makelogrevset(repo, pats, opts, revs):
262 59 """Return (expr, filematcher) where expr is a revset string built
263 60 from log options and file patterns or None. If --stat or --patch
264 61 are not passed filematcher is None. Otherwise it is a callable
265 62 taking a revision number and returning a match objects filtering
266 63 the files to be detailed when displaying the revision.
267 64 """
268 65 opt2revset = {
269 66 'no_merges': ('not merge()', None),
270 67 'only_merges': ('merge()', None),
271 68 '_ancestors': ('ancestors(%(val)s)', None),
272 69 '_fancestors': ('_firstancestors(%(val)s)', None),
273 70 '_descendants': ('descendants(%(val)s)', None),
274 71 '_fdescendants': ('_firstdescendants(%(val)s)', None),
275 72 '_matchfiles': ('_matchfiles(%(val)s)', None),
276 73 'date': ('date(%(val)r)', None),
277 74 'branch': ('branch(%(val)r)', ' or '),
278 75 '_patslog': ('filelog(%(val)r)', ' or '),
279 76 '_patsfollow': ('follow(%(val)r)', ' or '),
280 77 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
281 78 'keyword': ('keyword(%(val)r)', ' or '),
282 79 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
283 80 'user': ('user(%(val)r)', ' or '),
284 81 }
285 82
286 83 opts = dict(opts)
287 84 # follow or not follow?
288 85 follow = opts.get('follow') or opts.get('follow_first')
289 86 followfirst = opts.get('follow_first') and 1 or 0
290 87 # --follow with FILE behaviour depends on revs...
291 88 startrev = revs[0]
292 89 followdescendants = (len(revs) > 1 and revs[0] < revs[1]) and 1 or 0
293 90
294 91 # branch and only_branch are really aliases and must be handled at
295 92 # the same time
296 93 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
297 94 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
298 95 # pats/include/exclude are passed to match.match() directly in
299 96 # _matchfile() revset but walkchangerevs() builds its matcher with
300 97 # scmutil.match(). The difference is input pats are globbed on
301 98 # platforms without shell expansion (windows).
302 99 pctx = repo[None]
303 100 match, pats = scmutil.matchandpats(pctx, pats, opts)
304 101 slowpath = match.anypats() or (match.files() and opts.get('removed'))
305 102 if not slowpath:
306 103 for f in match.files():
307 104 if follow and f not in pctx:
308 105 raise util.Abort(_('cannot follow file not in parent '
309 106 'revision: "%s"') % f)
310 107 filelog = repo.file(f)
311 108 if not len(filelog):
312 109 # A zero count may be a directory or deleted file, so
313 110 # try to find matching entries on the slow path.
314 111 if follow:
315 112 raise util.Abort(
316 113 _('cannot follow nonexistent file: "%s"') % f)
317 114 slowpath = True
318 115 if slowpath:
319 116 # See cmdutil.walkchangerevs() slow path.
320 117 #
321 118 if follow:
322 119 raise util.Abort(_('can only follow copies/renames for explicit '
323 120 'filenames'))
324 121 # pats/include/exclude cannot be represented as separate
325 122 # revset expressions as their filtering logic applies at file
326 123 # level. For instance "-I a -X a" matches a revision touching
327 124 # "a" and "b" while "file(a) and not file(b)" does
328 125 # not. Besides, filesets are evaluated against the working
329 126 # directory.
330 127 matchargs = ['r:', 'd:relpath']
331 128 for p in pats:
332 129 matchargs.append('p:' + p)
333 130 for p in opts.get('include', []):
334 131 matchargs.append('i:' + p)
335 132 for p in opts.get('exclude', []):
336 133 matchargs.append('x:' + p)
337 134 matchargs = ','.join(('%r' % p) for p in matchargs)
338 135 opts['_matchfiles'] = matchargs
339 136 else:
340 137 if follow:
341 138 fpats = ('_patsfollow', '_patsfollowfirst')
342 139 fnopats = (('_ancestors', '_fancestors'),
343 140 ('_descendants', '_fdescendants'))
344 141 if pats:
345 142 # follow() revset inteprets its file argument as a
346 143 # manifest entry, so use match.files(), not pats.
347 144 opts[fpats[followfirst]] = list(match.files())
348 145 else:
349 146 opts[fnopats[followdescendants][followfirst]] = str(startrev)
350 147 else:
351 148 opts['_patslog'] = list(pats)
352 149
353 150 filematcher = None
354 151 if opts.get('patch') or opts.get('stat'):
355 152 if follow:
356 153 filematcher = _makefilematcher(repo, pats, followfirst)
357 154 else:
358 155 filematcher = lambda rev: match
359 156
360 157 expr = []
361 158 for op, val in opts.iteritems():
362 159 if not val:
363 160 continue
364 161 if op not in opt2revset:
365 162 continue
366 163 revop, andor = opt2revset[op]
367 164 if '%(val)' not in revop:
368 165 expr.append(revop)
369 166 else:
370 167 if not isinstance(val, list):
371 168 e = revop % {'val': val}
372 169 else:
373 170 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
374 171 expr.append(e)
375 172
376 173 if expr:
377 174 expr = '(' + ' and '.join(expr) + ')'
378 175 else:
379 176 expr = None
380 177 return expr, filematcher
381 178
382 179 def getlogrevs(repo, pats, opts):
383 180 """Return (revs, expr, filematcher) where revs is an iterable of
384 181 revision numbers, expr is a revset string built from log options
385 182 and file patterns or None, and used to filter 'revs'. If --stat or
386 183 --patch are not passed filematcher is None. Otherwise it is a
387 184 callable taking a revision number and returning a match objects
388 185 filtering the files to be detailed when displaying the revision.
389 186 """
390 187 def increasingrevs(repo, revs, matcher):
391 188 # The sorted input rev sequence is chopped in sub-sequences
392 189 # which are sorted in ascending order and passed to the
393 190 # matcher. The filtered revs are sorted again as they were in
394 191 # the original sub-sequence. This achieve several things:
395 192 #
396 193 # - getlogrevs() now returns a generator which behaviour is
397 194 # adapted to log need. First results come fast, last ones
398 195 # are batched for performances.
399 196 #
400 197 # - revset matchers often operate faster on revision in
401 198 # changelog order, because most filters deal with the
402 199 # changelog.
403 200 #
404 201 # - revset matchers can reorder revisions. "A or B" typically
405 202 # returns returns the revision matching A then the revision
406 203 # matching B. We want to hide this internal implementation
407 204 # detail from the caller, and sorting the filtered revision
408 205 # again achieves this.
409 206 for i, window in cmdutil.increasingwindows(0, len(revs), windowsize=1):
410 207 orevs = revs[i:i + window]
411 208 nrevs = set(matcher(repo, sorted(orevs)))
412 209 for rev in orevs:
413 210 if rev in nrevs:
414 211 yield rev
415 212
416 213 if not len(repo):
417 214 return iter([]), None, None
418 215 # Default --rev value depends on --follow but --follow behaviour
419 216 # depends on revisions resolved from --rev...
420 217 follow = opts.get('follow') or opts.get('follow_first')
421 218 if opts.get('rev'):
422 219 revs = scmutil.revrange(repo, opts['rev'])
423 220 else:
424 221 if follow and len(repo) > 0:
425 222 revs = scmutil.revrange(repo, ['.:0'])
426 223 else:
427 224 revs = range(len(repo) - 1, -1, -1)
428 225 if not revs:
429 226 return iter([]), None, None
430 227 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
431 228 if expr:
432 229 matcher = revset.match(repo.ui, expr)
433 230 revs = increasingrevs(repo, revs, matcher)
434 231 if not opts.get('hidden'):
435 232 # --hidden is still experimental and not worth a dedicated revset
436 233 # yet. Fortunately, filtering revision number is fast.
437 234 revs = (r for r in revs if r not in repo.changelog.hiddenrevs)
438 235 else:
439 236 revs = iter(revs)
440 237 return revs, expr, filematcher
441 238
442 239 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
443 240 filematcher=None):
444 seen, state = [], asciistate()
241 seen, state = [], graphmod.asciistate()
445 242 for rev, type, ctx, parents in dag:
446 243 char = 'o'
447 244 if ctx.node() in showparents:
448 245 char = '@'
449 246 elif ctx.obsolete():
450 247 char = 'x'
451 248 copies = None
452 249 if getrenamed and ctx.rev():
453 250 copies = []
454 251 for fn in ctx.files():
455 252 rename = getrenamed(fn, ctx.rev())
456 253 if rename:
457 254 copies.append((fn, rename[0]))
458 255 revmatchfn = None
459 256 if filematcher is not None:
460 257 revmatchfn = filematcher(ctx.rev())
461 258 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
462 259 lines = displayer.hunk.pop(rev).split('\n')
463 260 if not lines[-1]:
464 261 del lines[-1]
465 262 displayer.flush(rev)
466 263 edges = edgefn(type, char, lines, seen, rev, parents)
467 264 for type, char, lines, coldata in edges:
468 ascii(ui, state, type, char, lines, coldata)
265 graphmod.ascii(ui, state, type, char, lines, coldata)
469 266 displayer.close()
470 267
471 268 @command('glog',
472 269 [('f', 'follow', None,
473 270 _('follow changeset history, or file history across copies and renames')),
474 271 ('', 'follow-first', None,
475 272 _('only follow the first parent of merge changesets (DEPRECATED)')),
476 273 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
477 274 ('C', 'copies', None, _('show copied files')),
478 275 ('k', 'keyword', [],
479 276 _('do case-insensitive search for a given text'), _('TEXT')),
480 277 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
481 278 ('', 'removed', None, _('include revisions where files were removed')),
482 279 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
483 280 ('u', 'user', [], _('revisions committed by user'), _('USER')),
484 281 ('', 'only-branch', [],
485 282 _('show only changesets within the given named branch (DEPRECATED)'),
486 283 _('BRANCH')),
487 284 ('b', 'branch', [],
488 285 _('show changesets within the given named branch'), _('BRANCH')),
489 286 ('P', 'prune', [],
490 287 _('do not display revision or any of its ancestors'), _('REV')),
491 288 ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
492 289 ] + commands.logopts + commands.walkopts,
493 290 _('[OPTION]... [FILE]'))
494 291 def graphlog(ui, repo, *pats, **opts):
495 292 """show revision history alongside an ASCII revision graph
496 293
497 294 Print a revision history alongside a revision graph drawn with
498 295 ASCII characters.
499 296
500 297 Nodes printed as an @ character are parents of the working
501 298 directory.
502 299 """
503 300
504 301 revs, expr, filematcher = getlogrevs(repo, pats, opts)
505 302 revs = sorted(revs, reverse=1)
506 303 limit = cmdutil.loglimit(opts)
507 304 if limit is not None:
508 305 revs = revs[:limit]
509 306 revdag = graphmod.dagwalker(repo, revs)
510 307
511 308 getrenamed = None
512 309 if opts.get('copies'):
513 310 endrev = None
514 311 if opts.get('rev'):
515 312 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
516 313 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
517 314 displayer = show_changeset(ui, repo, opts, buffered=True)
518 315 showparents = [ctx.node() for ctx in repo[None].parents()]
519 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
520 filematcher)
316 generate(ui, revdag, displayer, showparents, graphmod.asciiedges,
317 getrenamed, filematcher)
521 318
522 319 def graphrevs(repo, nodes, opts):
523 320 limit = cmdutil.loglimit(opts)
524 321 nodes.reverse()
525 322 if limit is not None:
526 323 nodes = nodes[:limit]
527 324 return graphmod.nodes(repo, nodes)
528 325
529 326 def goutgoing(ui, repo, dest=None, **opts):
530 327 """show the outgoing changesets alongside an ASCII revision graph
531 328
532 329 Print the outgoing changesets alongside a revision graph drawn with
533 330 ASCII characters.
534 331
535 332 Nodes printed as an @ character are parents of the working
536 333 directory.
537 334 """
538 335
539 336 _checkunsupportedflags([], opts)
540 337 o = hg._outgoing(ui, repo, dest, opts)
541 338 if o is None:
542 339 return
543 340
544 341 revdag = graphrevs(repo, o, opts)
545 342 displayer = show_changeset(ui, repo, opts, buffered=True)
546 343 showparents = [ctx.node() for ctx in repo[None].parents()]
547 generate(ui, revdag, displayer, showparents, asciiedges)
344 generate(ui, revdag, displayer, showparents, graphmod.asciiedges)
548 345
549 346 def gincoming(ui, repo, source="default", **opts):
550 347 """show the incoming changesets alongside an ASCII revision graph
551 348
552 349 Print the incoming changesets alongside a revision graph drawn with
553 350 ASCII characters.
554 351
555 352 Nodes printed as an @ character are parents of the working
556 353 directory.
557 354 """
558 355 def subreporecurse():
559 356 return 1
560 357
561 358 _checkunsupportedflags([], opts)
562 359 def display(other, chlist, displayer):
563 360 revdag = graphrevs(other, chlist, opts)
564 361 showparents = [ctx.node() for ctx in repo[None].parents()]
565 generate(ui, revdag, displayer, showparents, asciiedges)
362 generate(ui, revdag, displayer, showparents, graphmod.asciiedges)
566 363
567 364 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
568 365
569 366 def uisetup(ui):
570 367 '''Initialize the extension.'''
571 368 _wrapcmd('log', commands.table, graphlog)
572 369 _wrapcmd('incoming', commands.table, gincoming)
573 370 _wrapcmd('outgoing', commands.table, goutgoing)
574 371
575 372 def _wrapcmd(cmd, table, wrapfn):
576 373 '''wrap the command'''
577 374 def graph(orig, *args, **kwargs):
578 375 if kwargs['graph']:
579 376 return wrapfn(*args, **kwargs)
580 377 return orig(*args, **kwargs)
581 378 entry = extensions.wrapcommand(table, cmd, graph)
582 379 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
@@ -1,165 +1,368 b''
1 1 # Revision graph generator for Mercurial
2 2 #
3 3 # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl>
4 4 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 """supports walking the history as DAGs suitable for graphical output
10 10
11 11 The most basic format we use is that of::
12 12
13 13 (id, type, data, [parentids])
14 14
15 15 The node and parent ids are arbitrary integers which identify a node in the
16 16 context of the graph returned. Type is a constant specifying the node type.
17 17 Data depends on type.
18 18 """
19 19
20 20 from mercurial.node import nullrev
21 21 import util
22 22
23 23 CHANGESET = 'C'
24 24
25 25 def dagwalker(repo, revs):
26 26 """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
27 27
28 28 This generator function walks through revisions (which should be ordered
29 29 from bigger to lower). It returns a tuple for each node. The node and parent
30 30 ids are arbitrary integers which identify a node in the context of the graph
31 31 returned.
32 32 """
33 33 if not revs:
34 34 return
35 35
36 36 cl = repo.changelog
37 37 lowestrev = min(revs)
38 38 gpcache = {}
39 39
40 40 knownrevs = set(revs)
41 41 for rev in revs:
42 42 ctx = repo[rev]
43 43 parents = sorted(set([p.rev() for p in ctx.parents()
44 44 if p.rev() in knownrevs]))
45 45 mpars = [p.rev() for p in ctx.parents() if
46 46 p.rev() != nullrev and p.rev() not in parents]
47 47
48 48 for mpar in mpars:
49 49 gp = gpcache.get(mpar)
50 50 if gp is None:
51 51 gp = gpcache[mpar] = grandparent(cl, lowestrev, revs, mpar)
52 52 if not gp:
53 53 parents.append(mpar)
54 54 else:
55 55 parents.extend(g for g in gp if g not in parents)
56 56
57 57 yield (ctx.rev(), CHANGESET, ctx, parents)
58 58
59 59 def nodes(repo, nodes):
60 60 """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
61 61
62 62 This generator function walks the given nodes. It only returns parents
63 63 that are in nodes, too.
64 64 """
65 65 include = set(nodes)
66 66 for node in nodes:
67 67 ctx = repo[node]
68 68 parents = set([p.rev() for p in ctx.parents() if p.node() in include])
69 69 yield (ctx.rev(), CHANGESET, ctx, sorted(parents))
70 70
71 71 def colored(dag, repo):
72 72 """annotates a DAG with colored edge information
73 73
74 74 For each DAG node this function emits tuples::
75 75
76 76 (id, type, data, (col, color), [(col, nextcol, color)])
77 77
78 78 with the following new elements:
79 79
80 80 - Tuple (col, color) with column and color index for the current node
81 81 - A list of tuples indicating the edges between the current node and its
82 82 parents.
83 83 """
84 84 seen = []
85 85 colors = {}
86 86 newcolor = 1
87 87 config = {}
88 88
89 89 for key, val in repo.ui.configitems('graph'):
90 90 if '.' in key:
91 91 branch, setting = key.rsplit('.', 1)
92 92 # Validation
93 93 if setting == "width" and val.isdigit():
94 94 config.setdefault(branch, {})[setting] = int(val)
95 95 elif setting == "color" and val.isalnum():
96 96 config.setdefault(branch, {})[setting] = val
97 97
98 98 if config:
99 99 getconf = util.lrucachefunc(
100 100 lambda rev: config.get(repo[rev].branch(), {}))
101 101 else:
102 102 getconf = lambda rev: {}
103 103
104 104 for (cur, type, data, parents) in dag:
105 105
106 106 # Compute seen and next
107 107 if cur not in seen:
108 108 seen.append(cur) # new head
109 109 colors[cur] = newcolor
110 110 newcolor += 1
111 111
112 112 col = seen.index(cur)
113 113 color = colors.pop(cur)
114 114 next = seen[:]
115 115
116 116 # Add parents to next
117 117 addparents = [p for p in parents if p not in next]
118 118 next[col:col + 1] = addparents
119 119
120 120 # Set colors for the parents
121 121 for i, p in enumerate(addparents):
122 122 if not i:
123 123 colors[p] = color
124 124 else:
125 125 colors[p] = newcolor
126 126 newcolor += 1
127 127
128 128 # Add edges to the graph
129 129 edges = []
130 130 for ecol, eid in enumerate(seen):
131 131 if eid in next:
132 132 bconf = getconf(eid)
133 133 edges.append((
134 134 ecol, next.index(eid), colors[eid],
135 135 bconf.get('width', -1),
136 136 bconf.get('color', '')))
137 137 elif eid == cur:
138 138 for p in parents:
139 139 bconf = getconf(p)
140 140 edges.append((
141 141 ecol, next.index(p), color,
142 142 bconf.get('width', -1),
143 143 bconf.get('color', '')))
144 144
145 145 # Yield and move on
146 146 yield (cur, type, data, (col, color), edges)
147 147 seen = next
148 148
149 149 def grandparent(cl, lowestrev, roots, head):
150 150 """Return all ancestors of head in roots which revision is
151 151 greater or equal to lowestrev.
152 152 """
153 153 pending = set([head])
154 154 seen = set()
155 155 kept = set()
156 156 llowestrev = max(nullrev, lowestrev)
157 157 while pending:
158 158 r = pending.pop()
159 159 if r >= llowestrev and r not in seen:
160 160 if r in roots:
161 161 kept.add(r)
162 162 else:
163 163 pending.update([p for p in cl.parentrevs(r)])
164 164 seen.add(r)
165 165 return sorted(kept)
166
167 def asciiedges(type, char, lines, seen, rev, parents):
168 """adds edge info to changelog DAG walk suitable for ascii()"""
169 if rev not in seen:
170 seen.append(rev)
171 nodeidx = seen.index(rev)
172
173 knownparents = []
174 newparents = []
175 for parent in parents:
176 if parent in seen:
177 knownparents.append(parent)
178 else:
179 newparents.append(parent)
180
181 ncols = len(seen)
182 nextseen = seen[:]
183 nextseen[nodeidx:nodeidx + 1] = newparents
184 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
185
186 while len(newparents) > 2:
187 # ascii() only knows how to add or remove a single column between two
188 # calls. Nodes with more than two parents break this constraint so we
189 # introduce intermediate expansion lines to grow the active node list
190 # slowly.
191 edges.append((nodeidx, nodeidx))
192 edges.append((nodeidx, nodeidx + 1))
193 nmorecols = 1
194 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
195 char = '\\'
196 lines = []
197 nodeidx += 1
198 ncols += 1
199 edges = []
200 del newparents[0]
201
202 if len(newparents) > 0:
203 edges.append((nodeidx, nodeidx))
204 if len(newparents) > 1:
205 edges.append((nodeidx, nodeidx + 1))
206 nmorecols = len(nextseen) - ncols
207 seen[:] = nextseen
208 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
209
210 def _fixlongrightedges(edges):
211 for (i, (start, end)) in enumerate(edges):
212 if end > start:
213 edges[i] = (start, end + 1)
214
215 def _getnodelineedgestail(
216 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
217 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
218 # Still going in the same non-vertical direction.
219 if n_columns_diff == -1:
220 start = max(node_index + 1, p_node_index)
221 tail = ["|", " "] * (start - node_index - 1)
222 tail.extend(["/", " "] * (n_columns - start))
223 return tail
224 else:
225 return ["\\", " "] * (n_columns - node_index - 1)
226 else:
227 return ["|", " "] * (n_columns - node_index - 1)
228
229 def _drawedges(edges, nodeline, interline):
230 for (start, end) in edges:
231 if start == end + 1:
232 interline[2 * end + 1] = "/"
233 elif start == end - 1:
234 interline[2 * start + 1] = "\\"
235 elif start == end:
236 interline[2 * start] = "|"
237 else:
238 if 2 * end >= len(nodeline):
239 continue
240 nodeline[2 * end] = "+"
241 if start > end:
242 (start, end) = (end, start)
243 for i in range(2 * start + 1, 2 * end):
244 if nodeline[i] != "+":
245 nodeline[i] = "-"
246
247 def _getpaddingline(ni, n_columns, edges):
248 line = []
249 line.extend(["|", " "] * ni)
250 if (ni, ni - 1) in edges or (ni, ni) in edges:
251 # (ni, ni - 1) (ni, ni)
252 # | | | | | | | |
253 # +---o | | o---+
254 # | | c | | c | |
255 # | |/ / | |/ /
256 # | | | | | |
257 c = "|"
258 else:
259 c = " "
260 line.extend([c, " "])
261 line.extend(["|", " "] * (n_columns - ni - 1))
262 return line
263
264 def asciistate():
265 """returns the initial value for the "state" argument to ascii()"""
266 return [0, 0]
267
268 def ascii(ui, state, type, char, text, coldata):
269 """prints an ASCII graph of the DAG
270
271 takes the following arguments (one call per node in the graph):
272
273 - ui to write to
274 - Somewhere to keep the needed state in (init to asciistate())
275 - Column of the current node in the set of ongoing edges.
276 - Type indicator of node data, usually 'C' for changesets.
277 - Payload: (char, lines):
278 - Character to use as node's symbol.
279 - List of lines to display as the node's text.
280 - Edges; a list of (col, next_col) indicating the edges between
281 the current node and its parents.
282 - Number of columns (ongoing edges) in the current revision.
283 - The difference between the number of columns (ongoing edges)
284 in the next revision and the number of columns (ongoing edges)
285 in the current revision. That is: -1 means one column removed;
286 0 means no columns added or removed; 1 means one column added.
287 """
288
289 idx, edges, ncols, coldiff = coldata
290 assert -2 < coldiff < 2
291 if coldiff == -1:
292 # Transform
293 #
294 # | | | | | |
295 # o | | into o---+
296 # |X / |/ /
297 # | | | |
298 _fixlongrightedges(edges)
299
300 # add_padding_line says whether to rewrite
301 #
302 # | | | | | | | |
303 # | o---+ into | o---+
304 # | / / | | | # <--- padding line
305 # o | | | / /
306 # o | |
307 add_padding_line = (len(text) > 2 and coldiff == -1 and
308 [x for (x, y) in edges if x + 1 < y])
309
310 # fix_nodeline_tail says whether to rewrite
311 #
312 # | | o | | | | o | |
313 # | | |/ / | | |/ /
314 # | o | | into | o / / # <--- fixed nodeline tail
315 # | |/ / | |/ /
316 # o | | o | |
317 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
318
319 # nodeline is the line containing the node character (typically o)
320 nodeline = ["|", " "] * idx
321 nodeline.extend([char, " "])
322
323 nodeline.extend(
324 _getnodelineedgestail(idx, state[1], ncols, coldiff,
325 state[0], fix_nodeline_tail))
326
327 # shift_interline is the line containing the non-vertical
328 # edges between this entry and the next
329 shift_interline = ["|", " "] * idx
330 if coldiff == -1:
331 n_spaces = 1
332 edge_ch = "/"
333 elif coldiff == 0:
334 n_spaces = 2
335 edge_ch = "|"
336 else:
337 n_spaces = 3
338 edge_ch = "\\"
339 shift_interline.extend(n_spaces * [" "])
340 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
341
342 # draw edges from the current node to its parents
343 _drawedges(edges, nodeline, shift_interline)
344
345 # lines is the list of all graph lines to print
346 lines = [nodeline]
347 if add_padding_line:
348 lines.append(_getpaddingline(idx, ncols, edges))
349 lines.append(shift_interline)
350
351 # make sure that there are as many graph lines as there are
352 # log strings
353 while len(text) < len(lines):
354 text.append("")
355 if len(lines) < len(text):
356 extra_interline = ["|", " "] * (ncols + coldiff)
357 while len(lines) < len(text):
358 lines.append(extra_interline)
359
360 # print lines
361 indentation_level = max(ncols, ncols + coldiff)
362 for (line, logstr) in zip(lines, text):
363 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
364 ui.write(ln.rstrip() + '\n')
365
366 # ... and start over
367 state[0] = coldiff
368 state[1] = idx
General Comments 0
You need to be logged in to leave comments. Login now