##// END OF EJS Templates
graphlog: cleanup before code move...
Patrick Mezard -
r16412:1a10bee8 default
parent child Browse files
Show More
@@ -1,553 +1,552 b''
1 1 # ASCII graph log extension for Mercurial
2 2 #
3 3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to view revision graphs from a shell
9 9
10 10 This extension adds a --graph option to the incoming, outgoing and log
11 11 commands. When this options is given, an ASCII representation of the
12 12 revision graph is also shown.
13 13 '''
14 14
15 15 from mercurial.cmdutil import show_changeset
16 16 from mercurial.commands import templateopts
17 17 from mercurial.i18n import _
18 18 from mercurial.node import nullrev
19 19 from mercurial import cmdutil, commands, extensions, scmutil
20 from mercurial import hg, util, graphmod, templatekw
21 from mercurial import revset as revsetmod
20 from mercurial import hg, util, graphmod, templatekw, revset
22 21
23 22 cmdtable = {}
24 23 command = cmdutil.command(cmdtable)
25 24
26 25 ASCIIDATA = 'ASC'
27 26
28 27 def asciiedges(type, char, lines, seen, rev, parents):
29 28 """adds edge info to changelog DAG walk suitable for ascii()"""
30 29 if rev not in seen:
31 30 seen.append(rev)
32 31 nodeidx = seen.index(rev)
33 32
34 33 knownparents = []
35 34 newparents = []
36 35 for parent in parents:
37 36 if parent in seen:
38 37 knownparents.append(parent)
39 38 else:
40 39 newparents.append(parent)
41 40
42 41 ncols = len(seen)
43 42 nextseen = seen[:]
44 43 nextseen[nodeidx:nodeidx + 1] = newparents
45 44 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
46 45
47 46 while len(newparents) > 2:
48 47 # ascii() only knows how to add or remove a single column between two
49 48 # calls. Nodes with more than two parents break this constraint so we
50 49 # introduce intermediate expansion lines to grow the active node list
51 50 # slowly.
52 51 edges.append((nodeidx, nodeidx))
53 52 edges.append((nodeidx, nodeidx + 1))
54 53 nmorecols = 1
55 54 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
56 55 char = '\\'
57 56 lines = []
58 57 nodeidx += 1
59 58 ncols += 1
60 59 edges = []
61 60 del newparents[0]
62 61
63 62 if len(newparents) > 0:
64 63 edges.append((nodeidx, nodeidx))
65 64 if len(newparents) > 1:
66 65 edges.append((nodeidx, nodeidx + 1))
67 66 nmorecols = len(nextseen) - ncols
68 67 seen[:] = nextseen
69 68 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
70 69
71 70 def fix_long_right_edges(edges):
72 71 for (i, (start, end)) in enumerate(edges):
73 72 if end > start:
74 73 edges[i] = (start, end + 1)
75 74
76 75 def get_nodeline_edges_tail(
77 76 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
78 77 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
79 78 # Still going in the same non-vertical direction.
80 79 if n_columns_diff == -1:
81 80 start = max(node_index + 1, p_node_index)
82 81 tail = ["|", " "] * (start - node_index - 1)
83 82 tail.extend(["/", " "] * (n_columns - start))
84 83 return tail
85 84 else:
86 85 return ["\\", " "] * (n_columns - node_index - 1)
87 86 else:
88 87 return ["|", " "] * (n_columns - node_index - 1)
89 88
90 89 def draw_edges(edges, nodeline, interline):
91 90 for (start, end) in edges:
92 91 if start == end + 1:
93 92 interline[2 * end + 1] = "/"
94 93 elif start == end - 1:
95 94 interline[2 * start + 1] = "\\"
96 95 elif start == end:
97 96 interline[2 * start] = "|"
98 97 else:
99 98 if 2 * end >= len(nodeline):
100 99 continue
101 100 nodeline[2 * end] = "+"
102 101 if start > end:
103 102 (start, end) = (end, start)
104 103 for i in range(2 * start + 1, 2 * end):
105 104 if nodeline[i] != "+":
106 105 nodeline[i] = "-"
107 106
108 107 def get_padding_line(ni, n_columns, edges):
109 108 line = []
110 109 line.extend(["|", " "] * ni)
111 110 if (ni, ni - 1) in edges or (ni, ni) in edges:
112 111 # (ni, ni - 1) (ni, ni)
113 112 # | | | | | | | |
114 113 # +---o | | o---+
115 114 # | | c | | c | |
116 115 # | |/ / | |/ /
117 116 # | | | | | |
118 117 c = "|"
119 118 else:
120 119 c = " "
121 120 line.extend([c, " "])
122 121 line.extend(["|", " "] * (n_columns - ni - 1))
123 122 return line
124 123
125 124 def asciistate():
126 125 """returns the initial value for the "state" argument to ascii()"""
127 126 return [0, 0]
128 127
129 128 def ascii(ui, state, type, char, text, coldata):
130 129 """prints an ASCII graph of the DAG
131 130
132 131 takes the following arguments (one call per node in the graph):
133 132
134 133 - ui to write to
135 134 - Somewhere to keep the needed state in (init to asciistate())
136 135 - Column of the current node in the set of ongoing edges.
137 136 - Type indicator of node data == ASCIIDATA.
138 137 - Payload: (char, lines):
139 138 - Character to use as node's symbol.
140 139 - List of lines to display as the node's text.
141 140 - Edges; a list of (col, next_col) indicating the edges between
142 141 the current node and its parents.
143 142 - Number of columns (ongoing edges) in the current revision.
144 143 - The difference between the number of columns (ongoing edges)
145 144 in the next revision and the number of columns (ongoing edges)
146 145 in the current revision. That is: -1 means one column removed;
147 146 0 means no columns added or removed; 1 means one column added.
148 147 """
149 148
150 149 idx, edges, ncols, coldiff = coldata
151 150 assert -2 < coldiff < 2
152 151 if coldiff == -1:
153 152 # Transform
154 153 #
155 154 # | | | | | |
156 155 # o | | into o---+
157 156 # |X / |/ /
158 157 # | | | |
159 158 fix_long_right_edges(edges)
160 159
161 160 # add_padding_line says whether to rewrite
162 161 #
163 162 # | | | | | | | |
164 163 # | o---+ into | o---+
165 164 # | / / | | | # <--- padding line
166 165 # o | | | / /
167 166 # o | |
168 167 add_padding_line = (len(text) > 2 and coldiff == -1 and
169 168 [x for (x, y) in edges if x + 1 < y])
170 169
171 170 # fix_nodeline_tail says whether to rewrite
172 171 #
173 172 # | | o | | | | o | |
174 173 # | | |/ / | | |/ /
175 174 # | o | | into | o / / # <--- fixed nodeline tail
176 175 # | |/ / | |/ /
177 176 # o | | o | |
178 177 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
179 178
180 179 # nodeline is the line containing the node character (typically o)
181 180 nodeline = ["|", " "] * idx
182 181 nodeline.extend([char, " "])
183 182
184 183 nodeline.extend(
185 184 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
186 185 state[0], fix_nodeline_tail))
187 186
188 187 # shift_interline is the line containing the non-vertical
189 188 # edges between this entry and the next
190 189 shift_interline = ["|", " "] * idx
191 190 if coldiff == -1:
192 191 n_spaces = 1
193 192 edge_ch = "/"
194 193 elif coldiff == 0:
195 194 n_spaces = 2
196 195 edge_ch = "|"
197 196 else:
198 197 n_spaces = 3
199 198 edge_ch = "\\"
200 199 shift_interline.extend(n_spaces * [" "])
201 200 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
202 201
203 202 # draw edges from the current node to its parents
204 203 draw_edges(edges, nodeline, shift_interline)
205 204
206 205 # lines is the list of all graph lines to print
207 206 lines = [nodeline]
208 207 if add_padding_line:
209 208 lines.append(get_padding_line(idx, ncols, edges))
210 209 lines.append(shift_interline)
211 210
212 211 # make sure that there are as many graph lines as there are
213 212 # log strings
214 213 while len(text) < len(lines):
215 214 text.append("")
216 215 if len(lines) < len(text):
217 216 extra_interline = ["|", " "] * (ncols + coldiff)
218 217 while len(lines) < len(text):
219 218 lines.append(extra_interline)
220 219
221 220 # print lines
222 221 indentation_level = max(ncols, ncols + coldiff)
223 222 for (line, logstr) in zip(lines, text):
224 223 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
225 224 ui.write(ln.rstrip() + '\n')
226 225
227 226 # ... and start over
228 227 state[0] = coldiff
229 228 state[1] = idx
230 229
231 230 def get_revs(repo, rev_opt):
232 231 if rev_opt:
233 232 revs = scmutil.revrange(repo, rev_opt)
234 233 if len(revs) == 0:
235 234 return (nullrev, nullrev)
236 235 return (max(revs), min(revs))
237 236 else:
238 237 return (len(repo) - 1, 0)
239 238
240 239 def check_unsupported_flags(pats, opts):
241 240 for op in ["newest_first"]:
242 241 if op in opts and opts[op]:
243 242 raise util.Abort(_("-G/--graph option is incompatible with --%s")
244 243 % op.replace("_", "-"))
245 244
246 def makefilematcher(repo, pats, followfirst):
245 def _makefilematcher(repo, pats, followfirst):
247 246 # When displaying a revision with --patch --follow FILE, we have
248 247 # to know which file of the revision must be diffed. With
249 248 # --follow, we want the names of the ancestors of FILE in the
250 249 # revision, stored in "fcache". "fcache" is populated by
251 250 # reproducing the graph traversal already done by --follow revset
252 251 # and relating linkrevs to file names (which is not "correct" but
253 252 # good enough).
254 253 fcache = {}
255 254 fcacheready = [False]
256 255 pctx = repo['.']
257 256 wctx = repo[None]
258 257
259 258 def populate():
260 259 for fn in pats:
261 260 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
262 261 for c in i:
263 262 fcache.setdefault(c.linkrev(), set()).add(c.path())
264 263
265 264 def filematcher(rev):
266 265 if not fcacheready[0]:
267 266 # Lazy initialization
268 267 fcacheready[0] = True
269 268 populate()
270 269 return scmutil.match(wctx, fcache.get(rev, []), default='path')
271 270
272 271 return filematcher
273 272
274 273 def _makelogrevset(repo, pats, opts, revs):
275 274 """Return (expr, filematcher) where expr is a revset string built
276 275 from log options and file patterns or None. If --stat or --patch
277 276 are not passed filematcher is None. Otherwise it is a callable
278 277 taking a revision number and returning a match objects filtering
279 278 the files to be detailed when displaying the revision.
280 279 """
281 280 opt2revset = {
282 281 'no_merges': ('not merge()', None),
283 282 'only_merges': ('merge()', None),
284 283 '_ancestors': ('ancestors(%(val)s)', None),
285 284 '_fancestors': ('_firstancestors(%(val)s)', None),
286 285 '_descendants': ('descendants(%(val)s)', None),
287 286 '_fdescendants': ('_firstdescendants(%(val)s)', None),
288 287 '_matchfiles': ('_matchfiles(%(val)s)', None),
289 288 'date': ('date(%(val)r)', None),
290 289 'branch': ('branch(%(val)r)', ' or '),
291 290 '_patslog': ('filelog(%(val)r)', ' or '),
292 291 '_patsfollow': ('follow(%(val)r)', ' or '),
293 292 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
294 293 'keyword': ('keyword(%(val)r)', ' or '),
295 294 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
296 295 'user': ('user(%(val)r)', ' or '),
297 296 }
298 297
299 298 opts = dict(opts)
300 299 # follow or not follow?
301 300 follow = opts.get('follow') or opts.get('follow_first')
302 301 followfirst = opts.get('follow_first')
303 302 # --follow with FILE behaviour depends on revs...
304 303 startrev = revs[0]
305 304 followdescendants = len(revs) > 1 and revs[0] < revs[1]
306 305
307 306 # branch and only_branch are really aliases and must be handled at
308 307 # the same time
309 308 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
310 309 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
311 310 # pats/include/exclude are passed to match.match() directly in
312 311 # _matchfile() revset but walkchangerevs() builds its matcher with
313 312 # scmutil.match(). The difference is input pats are globbed on
314 313 # platforms without shell expansion (windows).
315 314 pctx = repo[None]
316 315 match, pats = scmutil.matchandpats(pctx, pats, opts)
317 316 slowpath = match.anypats() or (match.files() and opts.get('removed'))
318 317 if not slowpath:
319 318 for f in match.files():
320 319 if follow and f not in pctx:
321 320 raise util.Abort(_('cannot follow file not in parent '
322 321 'revision: "%s"') % f)
323 322 filelog = repo.file(f)
324 323 if not len(filelog):
325 324 # A zero count may be a directory or deleted file, so
326 325 # try to find matching entries on the slow path.
327 326 if follow:
328 327 raise util.Abort(
329 328 _('cannot follow nonexistent file: "%s"') % f)
330 329 slowpath = True
331 330 if slowpath:
332 331 # See cmdutil.walkchangerevs() slow path.
333 332 #
334 333 if follow:
335 334 raise util.Abort(_('can only follow copies/renames for explicit '
336 335 'filenames'))
337 336 # pats/include/exclude cannot be represented as separate
338 337 # revset expressions as their filtering logic applies at file
339 338 # level. For instance "-I a -X a" matches a revision touching
340 339 # "a" and "b" while "file(a) and not file(b)" does
341 340 # not. Besides, filesets are evaluated against the working
342 341 # directory.
343 342 matchargs = ['r:', 'd:relpath']
344 343 for p in pats:
345 344 matchargs.append('p:' + p)
346 345 for p in opts.get('include', []):
347 346 matchargs.append('i:' + p)
348 347 for p in opts.get('exclude', []):
349 348 matchargs.append('x:' + p)
350 349 matchargs = ','.join(('%r' % p) for p in matchargs)
351 350 opts['_matchfiles'] = matchargs
352 351 else:
353 352 if follow:
354 353 if followfirst:
355 354 if pats:
356 355 opts['_patsfollowfirst'] = list(pats)
357 356 else:
358 357 if followdescendants:
359 358 opts['_fdescendants'] = str(startrev)
360 359 else:
361 360 opts['_fancestors'] = str(startrev)
362 361 else:
363 362 if pats:
364 363 opts['_patsfollow'] = list(pats)
365 364 else:
366 365 if followdescendants:
367 366 opts['_descendants'] = str(startrev)
368 367 else:
369 368 opts['_ancestors'] = str(startrev)
370 369 else:
371 370 opts['_patslog'] = list(pats)
372 371
373 372 filematcher = None
374 373 if opts.get('patch') or opts.get('stat'):
375 374 if follow:
376 filematcher = makefilematcher(repo, pats, followfirst)
375 filematcher = _makefilematcher(repo, pats, followfirst)
377 376 else:
378 377 filematcher = lambda rev: match
379 378
380 revset = []
379 expr = []
381 380 for op, val in opts.iteritems():
382 381 if not val:
383 382 continue
384 383 if op not in opt2revset:
385 384 continue
386 385 revop, andor = opt2revset[op]
387 386 if '%(val)' not in revop:
388 revset.append(revop)
387 expr.append(revop)
389 388 else:
390 389 if not isinstance(val, list):
391 expr = revop % {'val': val}
390 e = revop % {'val': val}
392 391 else:
393 expr = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
394 revset.append(expr)
392 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
393 expr.append(e)
395 394
396 if revset:
397 revset = '(' + ' and '.join(revset) + ')'
395 if expr:
396 expr = '(' + ' and '.join(expr) + ')'
398 397 else:
399 revset = None
400 return revset, filematcher
398 expr = None
399 return expr, filematcher
401 400
402 401 def getlogrevs(repo, pats, opts):
403 402 """Return (revs, expr, filematcher) where revs is a list of
404 403 revision numbers, expr is a revset string built from log options
405 404 and file patterns or None, and used to filter 'revs'. If --stat or
406 405 --patch are not passed filematcher is None. Otherwise it is a
407 406 callable taking a revision number and returning a match objects
408 407 filtering the files to be detailed when displaying the revision.
409 408 """
410 409 if not len(repo):
411 410 return [], None, None
412 411 # Default --rev value depends on --follow but --follow behaviour
413 412 # depends on revisions resolved from --rev...
414 413 follow = opts.get('follow') or opts.get('follow_first')
415 414 if opts.get('rev'):
416 415 revs = scmutil.revrange(repo, opts['rev'])
417 416 else:
418 417 if follow and len(repo) > 0:
419 418 revs = scmutil.revrange(repo, ['.:0'])
420 419 else:
421 420 revs = range(len(repo) - 1, -1, -1)
422 421 if not revs:
423 422 return [], None, None
424 423 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
425 424 if expr:
426 425 # Evaluate revisions in changelog order for performance
427 426 # reasons but preserve the original sequence order in the
428 427 # filtered result.
429 matched = set(revsetmod.match(repo.ui, expr)(repo, sorted(revs)))
428 matched = set(revset.match(repo.ui, expr)(repo, sorted(revs)))
430 429 revs = [r for r in revs if r in matched]
431 430 return revs, expr, filematcher
432 431
433 432 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
434 433 filematcher=None):
435 434 seen, state = [], asciistate()
436 435 for rev, type, ctx, parents in dag:
437 436 char = ctx.node() in showparents and '@' or 'o'
438 437 copies = None
439 438 if getrenamed and ctx.rev():
440 439 copies = []
441 440 for fn in ctx.files():
442 441 rename = getrenamed(fn, ctx.rev())
443 442 if rename:
444 443 copies.append((fn, rename[0]))
445 444 revmatchfn = None
446 445 if filematcher is not None:
447 446 revmatchfn = filematcher(ctx.rev())
448 447 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
449 448 lines = displayer.hunk.pop(rev).split('\n')[:-1]
450 449 displayer.flush(rev)
451 450 edges = edgefn(type, char, lines, seen, rev, parents)
452 451 for type, char, lines, coldata in edges:
453 452 ascii(ui, state, type, char, lines, coldata)
454 453 displayer.close()
455 454
456 455 @command('glog',
457 456 [('l', 'limit', '',
458 457 _('limit number of changes displayed'), _('NUM')),
459 458 ('p', 'patch', False, _('show patch')),
460 459 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
461 460 ] + templateopts,
462 461 _('hg glog [OPTION]... [FILE]'))
463 462 def graphlog(ui, repo, *pats, **opts):
464 463 """show revision history alongside an ASCII revision graph
465 464
466 465 Print a revision history alongside a revision graph drawn with
467 466 ASCII characters.
468 467
469 468 Nodes printed as an @ character are parents of the working
470 469 directory.
471 470 """
472 471
473 472 check_unsupported_flags(pats, opts)
474 473
475 474 revs, expr, filematcher = getlogrevs(repo, pats, opts)
476 475 revs = sorted(revs, reverse=1)
477 476 limit = cmdutil.loglimit(opts)
478 477 if limit is not None:
479 478 revs = revs[:limit]
480 479 revdag = graphmod.dagwalker(repo, revs)
481 480
482 481 getrenamed = None
483 482 if opts.get('copies'):
484 483 endrev = None
485 484 if opts.get('rev'):
486 485 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
487 486 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
488 487 displayer = show_changeset(ui, repo, opts, buffered=True)
489 488 showparents = [ctx.node() for ctx in repo[None].parents()]
490 489 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
491 490 filematcher)
492 491
493 492 def graphrevs(repo, nodes, opts):
494 493 limit = cmdutil.loglimit(opts)
495 494 nodes.reverse()
496 495 if limit is not None:
497 496 nodes = nodes[:limit]
498 497 return graphmod.nodes(repo, nodes)
499 498
500 499 def goutgoing(ui, repo, dest=None, **opts):
501 500 """show the outgoing changesets alongside an ASCII revision graph
502 501
503 502 Print the outgoing changesets alongside a revision graph drawn with
504 503 ASCII characters.
505 504
506 505 Nodes printed as an @ character are parents of the working
507 506 directory.
508 507 """
509 508
510 509 check_unsupported_flags([], opts)
511 510 o = hg._outgoing(ui, repo, dest, opts)
512 511 if o is None:
513 512 return
514 513
515 514 revdag = graphrevs(repo, o, opts)
516 515 displayer = show_changeset(ui, repo, opts, buffered=True)
517 516 showparents = [ctx.node() for ctx in repo[None].parents()]
518 517 generate(ui, revdag, displayer, showparents, asciiedges)
519 518
520 519 def gincoming(ui, repo, source="default", **opts):
521 520 """show the incoming changesets alongside an ASCII revision graph
522 521
523 522 Print the incoming changesets alongside a revision graph drawn with
524 523 ASCII characters.
525 524
526 525 Nodes printed as an @ character are parents of the working
527 526 directory.
528 527 """
529 528 def subreporecurse():
530 529 return 1
531 530
532 531 check_unsupported_flags([], opts)
533 532 def display(other, chlist, displayer):
534 533 revdag = graphrevs(other, chlist, opts)
535 534 showparents = [ctx.node() for ctx in repo[None].parents()]
536 535 generate(ui, revdag, displayer, showparents, asciiedges)
537 536
538 537 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
539 538
540 539 def uisetup(ui):
541 540 '''Initialize the extension.'''
542 541 _wrapcmd('log', commands.table, graphlog)
543 542 _wrapcmd('incoming', commands.table, gincoming)
544 543 _wrapcmd('outgoing', commands.table, goutgoing)
545 544
546 545 def _wrapcmd(cmd, table, wrapfn):
547 546 '''wrap the command'''
548 547 def graph(orig, *args, **kwargs):
549 548 if kwargs['graph']:
550 549 return wrapfn(*args, **kwargs)
551 550 return orig(*args, **kwargs)
552 551 entry = extensions.wrapcommand(table, cmd, graph)
553 552 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
General Comments 0
You need to be logged in to leave comments. Login now