##// END OF EJS Templates
graphlog: remove unused ASCIIDATA constant...
Patrick Mezard -
r17164:8299a9ad default
parent child Browse files
Show More
@@ -1,584 +1,582
1 1 # ASCII graph log extension for Mercurial
2 2 #
3 3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to view revision graphs from a shell
9 9
10 10 This extension adds a --graph option to the incoming, outgoing and log
11 11 commands. When this options is given, an ASCII representation of the
12 12 revision graph is also shown.
13 13 '''
14 14
15 15 from mercurial.cmdutil import show_changeset
16 16 from mercurial.i18n import _
17 17 from mercurial import cmdutil, commands, extensions, scmutil
18 18 from mercurial import hg, util, graphmod, templatekw, revset
19 19
20 20 cmdtable = {}
21 21 command = cmdutil.command(cmdtable)
22 22 testedwith = 'internal'
23 23
24 ASCIIDATA = 'ASC'
25
26 24 def asciiedges(type, char, lines, seen, rev, parents):
27 25 """adds edge info to changelog DAG walk suitable for ascii()"""
28 26 if rev not in seen:
29 27 seen.append(rev)
30 28 nodeidx = seen.index(rev)
31 29
32 30 knownparents = []
33 31 newparents = []
34 32 for parent in parents:
35 33 if parent in seen:
36 34 knownparents.append(parent)
37 35 else:
38 36 newparents.append(parent)
39 37
40 38 ncols = len(seen)
41 39 nextseen = seen[:]
42 40 nextseen[nodeidx:nodeidx + 1] = newparents
43 41 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
44 42
45 43 while len(newparents) > 2:
46 44 # ascii() only knows how to add or remove a single column between two
47 45 # calls. Nodes with more than two parents break this constraint so we
48 46 # introduce intermediate expansion lines to grow the active node list
49 47 # slowly.
50 48 edges.append((nodeidx, nodeidx))
51 49 edges.append((nodeidx, nodeidx + 1))
52 50 nmorecols = 1
53 51 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
54 52 char = '\\'
55 53 lines = []
56 54 nodeidx += 1
57 55 ncols += 1
58 56 edges = []
59 57 del newparents[0]
60 58
61 59 if len(newparents) > 0:
62 60 edges.append((nodeidx, nodeidx))
63 61 if len(newparents) > 1:
64 62 edges.append((nodeidx, nodeidx + 1))
65 63 nmorecols = len(nextseen) - ncols
66 64 seen[:] = nextseen
67 65 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
68 66
69 67 def _fixlongrightedges(edges):
70 68 for (i, (start, end)) in enumerate(edges):
71 69 if end > start:
72 70 edges[i] = (start, end + 1)
73 71
74 72 def _getnodelineedgestail(
75 73 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
76 74 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
77 75 # Still going in the same non-vertical direction.
78 76 if n_columns_diff == -1:
79 77 start = max(node_index + 1, p_node_index)
80 78 tail = ["|", " "] * (start - node_index - 1)
81 79 tail.extend(["/", " "] * (n_columns - start))
82 80 return tail
83 81 else:
84 82 return ["\\", " "] * (n_columns - node_index - 1)
85 83 else:
86 84 return ["|", " "] * (n_columns - node_index - 1)
87 85
88 86 def _drawedges(edges, nodeline, interline):
89 87 for (start, end) in edges:
90 88 if start == end + 1:
91 89 interline[2 * end + 1] = "/"
92 90 elif start == end - 1:
93 91 interline[2 * start + 1] = "\\"
94 92 elif start == end:
95 93 interline[2 * start] = "|"
96 94 else:
97 95 if 2 * end >= len(nodeline):
98 96 continue
99 97 nodeline[2 * end] = "+"
100 98 if start > end:
101 99 (start, end) = (end, start)
102 100 for i in range(2 * start + 1, 2 * end):
103 101 if nodeline[i] != "+":
104 102 nodeline[i] = "-"
105 103
106 104 def _getpaddingline(ni, n_columns, edges):
107 105 line = []
108 106 line.extend(["|", " "] * ni)
109 107 if (ni, ni - 1) in edges or (ni, ni) in edges:
110 108 # (ni, ni - 1) (ni, ni)
111 109 # | | | | | | | |
112 110 # +---o | | o---+
113 111 # | | c | | c | |
114 112 # | |/ / | |/ /
115 113 # | | | | | |
116 114 c = "|"
117 115 else:
118 116 c = " "
119 117 line.extend([c, " "])
120 118 line.extend(["|", " "] * (n_columns - ni - 1))
121 119 return line
122 120
123 121 def asciistate():
124 122 """returns the initial value for the "state" argument to ascii()"""
125 123 return [0, 0]
126 124
127 125 def ascii(ui, state, type, char, text, coldata):
128 126 """prints an ASCII graph of the DAG
129 127
130 128 takes the following arguments (one call per node in the graph):
131 129
132 130 - ui to write to
133 131 - Somewhere to keep the needed state in (init to asciistate())
134 132 - Column of the current node in the set of ongoing edges.
135 - Type indicator of node data == ASCIIDATA.
133 - Type indicator of node data, usually 'C' for changesets.
136 134 - Payload: (char, lines):
137 135 - Character to use as node's symbol.
138 136 - List of lines to display as the node's text.
139 137 - Edges; a list of (col, next_col) indicating the edges between
140 138 the current node and its parents.
141 139 - Number of columns (ongoing edges) in the current revision.
142 140 - The difference between the number of columns (ongoing edges)
143 141 in the next revision and the number of columns (ongoing edges)
144 142 in the current revision. That is: -1 means one column removed;
145 143 0 means no columns added or removed; 1 means one column added.
146 144 """
147 145
148 146 idx, edges, ncols, coldiff = coldata
149 147 assert -2 < coldiff < 2
150 148 if coldiff == -1:
151 149 # Transform
152 150 #
153 151 # | | | | | |
154 152 # o | | into o---+
155 153 # |X / |/ /
156 154 # | | | |
157 155 _fixlongrightedges(edges)
158 156
159 157 # add_padding_line says whether to rewrite
160 158 #
161 159 # | | | | | | | |
162 160 # | o---+ into | o---+
163 161 # | / / | | | # <--- padding line
164 162 # o | | | / /
165 163 # o | |
166 164 add_padding_line = (len(text) > 2 and coldiff == -1 and
167 165 [x for (x, y) in edges if x + 1 < y])
168 166
169 167 # fix_nodeline_tail says whether to rewrite
170 168 #
171 169 # | | o | | | | o | |
172 170 # | | |/ / | | |/ /
173 171 # | o | | into | o / / # <--- fixed nodeline tail
174 172 # | |/ / | |/ /
175 173 # o | | o | |
176 174 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
177 175
178 176 # nodeline is the line containing the node character (typically o)
179 177 nodeline = ["|", " "] * idx
180 178 nodeline.extend([char, " "])
181 179
182 180 nodeline.extend(
183 181 _getnodelineedgestail(idx, state[1], ncols, coldiff,
184 182 state[0], fix_nodeline_tail))
185 183
186 184 # shift_interline is the line containing the non-vertical
187 185 # edges between this entry and the next
188 186 shift_interline = ["|", " "] * idx
189 187 if coldiff == -1:
190 188 n_spaces = 1
191 189 edge_ch = "/"
192 190 elif coldiff == 0:
193 191 n_spaces = 2
194 192 edge_ch = "|"
195 193 else:
196 194 n_spaces = 3
197 195 edge_ch = "\\"
198 196 shift_interline.extend(n_spaces * [" "])
199 197 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
200 198
201 199 # draw edges from the current node to its parents
202 200 _drawedges(edges, nodeline, shift_interline)
203 201
204 202 # lines is the list of all graph lines to print
205 203 lines = [nodeline]
206 204 if add_padding_line:
207 205 lines.append(_getpaddingline(idx, ncols, edges))
208 206 lines.append(shift_interline)
209 207
210 208 # make sure that there are as many graph lines as there are
211 209 # log strings
212 210 while len(text) < len(lines):
213 211 text.append("")
214 212 if len(lines) < len(text):
215 213 extra_interline = ["|", " "] * (ncols + coldiff)
216 214 while len(lines) < len(text):
217 215 lines.append(extra_interline)
218 216
219 217 # print lines
220 218 indentation_level = max(ncols, ncols + coldiff)
221 219 for (line, logstr) in zip(lines, text):
222 220 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
223 221 ui.write(ln.rstrip() + '\n')
224 222
225 223 # ... and start over
226 224 state[0] = coldiff
227 225 state[1] = idx
228 226
229 227 def _checkunsupportedflags(pats, opts):
230 228 for op in ["newest_first"]:
231 229 if op in opts and opts[op]:
232 230 raise util.Abort(_("-G/--graph option is incompatible with --%s")
233 231 % op.replace("_", "-"))
234 232
235 233 def _makefilematcher(repo, pats, followfirst):
236 234 # When displaying a revision with --patch --follow FILE, we have
237 235 # to know which file of the revision must be diffed. With
238 236 # --follow, we want the names of the ancestors of FILE in the
239 237 # revision, stored in "fcache". "fcache" is populated by
240 238 # reproducing the graph traversal already done by --follow revset
241 239 # and relating linkrevs to file names (which is not "correct" but
242 240 # good enough).
243 241 fcache = {}
244 242 fcacheready = [False]
245 243 pctx = repo['.']
246 244 wctx = repo[None]
247 245
248 246 def populate():
249 247 for fn in pats:
250 248 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
251 249 for c in i:
252 250 fcache.setdefault(c.linkrev(), set()).add(c.path())
253 251
254 252 def filematcher(rev):
255 253 if not fcacheready[0]:
256 254 # Lazy initialization
257 255 fcacheready[0] = True
258 256 populate()
259 257 return scmutil.match(wctx, fcache.get(rev, []), default='path')
260 258
261 259 return filematcher
262 260
263 261 def _makelogrevset(repo, pats, opts, revs):
264 262 """Return (expr, filematcher) where expr is a revset string built
265 263 from log options and file patterns or None. If --stat or --patch
266 264 are not passed filematcher is None. Otherwise it is a callable
267 265 taking a revision number and returning a match objects filtering
268 266 the files to be detailed when displaying the revision.
269 267 """
270 268 opt2revset = {
271 269 'no_merges': ('not merge()', None),
272 270 'only_merges': ('merge()', None),
273 271 '_ancestors': ('ancestors(%(val)s)', None),
274 272 '_fancestors': ('_firstancestors(%(val)s)', None),
275 273 '_descendants': ('descendants(%(val)s)', None),
276 274 '_fdescendants': ('_firstdescendants(%(val)s)', None),
277 275 '_matchfiles': ('_matchfiles(%(val)s)', None),
278 276 'date': ('date(%(val)r)', None),
279 277 'branch': ('branch(%(val)r)', ' or '),
280 278 '_patslog': ('filelog(%(val)r)', ' or '),
281 279 '_patsfollow': ('follow(%(val)r)', ' or '),
282 280 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
283 281 'keyword': ('keyword(%(val)r)', ' or '),
284 282 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
285 283 'user': ('user(%(val)r)', ' or '),
286 284 }
287 285
288 286 opts = dict(opts)
289 287 # follow or not follow?
290 288 follow = opts.get('follow') or opts.get('follow_first')
291 289 followfirst = opts.get('follow_first') and 1 or 0
292 290 # --follow with FILE behaviour depends on revs...
293 291 startrev = revs[0]
294 292 followdescendants = (len(revs) > 1 and revs[0] < revs[1]) and 1 or 0
295 293
296 294 # branch and only_branch are really aliases and must be handled at
297 295 # the same time
298 296 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
299 297 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
300 298 # pats/include/exclude are passed to match.match() directly in
301 299 # _matchfile() revset but walkchangerevs() builds its matcher with
302 300 # scmutil.match(). The difference is input pats are globbed on
303 301 # platforms without shell expansion (windows).
304 302 pctx = repo[None]
305 303 match, pats = scmutil.matchandpats(pctx, pats, opts)
306 304 slowpath = match.anypats() or (match.files() and opts.get('removed'))
307 305 if not slowpath:
308 306 for f in match.files():
309 307 if follow and f not in pctx:
310 308 raise util.Abort(_('cannot follow file not in parent '
311 309 'revision: "%s"') % f)
312 310 filelog = repo.file(f)
313 311 if not len(filelog):
314 312 # A zero count may be a directory or deleted file, so
315 313 # try to find matching entries on the slow path.
316 314 if follow:
317 315 raise util.Abort(
318 316 _('cannot follow nonexistent file: "%s"') % f)
319 317 slowpath = True
320 318 if slowpath:
321 319 # See cmdutil.walkchangerevs() slow path.
322 320 #
323 321 if follow:
324 322 raise util.Abort(_('can only follow copies/renames for explicit '
325 323 'filenames'))
326 324 # pats/include/exclude cannot be represented as separate
327 325 # revset expressions as their filtering logic applies at file
328 326 # level. For instance "-I a -X a" matches a revision touching
329 327 # "a" and "b" while "file(a) and not file(b)" does
330 328 # not. Besides, filesets are evaluated against the working
331 329 # directory.
332 330 matchargs = ['r:', 'd:relpath']
333 331 for p in pats:
334 332 matchargs.append('p:' + p)
335 333 for p in opts.get('include', []):
336 334 matchargs.append('i:' + p)
337 335 for p in opts.get('exclude', []):
338 336 matchargs.append('x:' + p)
339 337 matchargs = ','.join(('%r' % p) for p in matchargs)
340 338 opts['_matchfiles'] = matchargs
341 339 else:
342 340 if follow:
343 341 fpats = ('_patsfollow', '_patsfollowfirst')
344 342 fnopats = (('_ancestors', '_fancestors'),
345 343 ('_descendants', '_fdescendants'))
346 344 if pats:
347 345 # follow() revset inteprets its file argument as a
348 346 # manifest entry, so use match.files(), not pats.
349 347 opts[fpats[followfirst]] = list(match.files())
350 348 else:
351 349 opts[fnopats[followdescendants][followfirst]] = str(startrev)
352 350 else:
353 351 opts['_patslog'] = list(pats)
354 352
355 353 filematcher = None
356 354 if opts.get('patch') or opts.get('stat'):
357 355 if follow:
358 356 filematcher = _makefilematcher(repo, pats, followfirst)
359 357 else:
360 358 filematcher = lambda rev: match
361 359
362 360 expr = []
363 361 for op, val in opts.iteritems():
364 362 if not val:
365 363 continue
366 364 if op not in opt2revset:
367 365 continue
368 366 revop, andor = opt2revset[op]
369 367 if '%(val)' not in revop:
370 368 expr.append(revop)
371 369 else:
372 370 if not isinstance(val, list):
373 371 e = revop % {'val': val}
374 372 else:
375 373 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
376 374 expr.append(e)
377 375
378 376 if expr:
379 377 expr = '(' + ' and '.join(expr) + ')'
380 378 else:
381 379 expr = None
382 380 return expr, filematcher
383 381
384 382 def getlogrevs(repo, pats, opts):
385 383 """Return (revs, expr, filematcher) where revs is an iterable of
386 384 revision numbers, expr is a revset string built from log options
387 385 and file patterns or None, and used to filter 'revs'. If --stat or
388 386 --patch are not passed filematcher is None. Otherwise it is a
389 387 callable taking a revision number and returning a match objects
390 388 filtering the files to be detailed when displaying the revision.
391 389 """
392 390 def increasingrevs(repo, revs, matcher):
393 391 # The sorted input rev sequence is chopped in sub-sequences
394 392 # which are sorted in ascending order and passed to the
395 393 # matcher. The filtered revs are sorted again as they were in
396 394 # the original sub-sequence. This achieve several things:
397 395 #
398 396 # - getlogrevs() now returns a generator which behaviour is
399 397 # adapted to log need. First results come fast, last ones
400 398 # are batched for performances.
401 399 #
402 400 # - revset matchers often operate faster on revision in
403 401 # changelog order, because most filters deal with the
404 402 # changelog.
405 403 #
406 404 # - revset matchers can reorder revisions. "A or B" typically
407 405 # returns returns the revision matching A then the revision
408 406 # matching B. We want to hide this internal implementation
409 407 # detail from the caller, and sorting the filtered revision
410 408 # again achieves this.
411 409 for i, window in cmdutil.increasingwindows(0, len(revs), windowsize=1):
412 410 orevs = revs[i:i + window]
413 411 nrevs = set(matcher(repo, sorted(orevs)))
414 412 for rev in orevs:
415 413 if rev in nrevs:
416 414 yield rev
417 415
418 416 if not len(repo):
419 417 return iter([]), None, None
420 418 # Default --rev value depends on --follow but --follow behaviour
421 419 # depends on revisions resolved from --rev...
422 420 follow = opts.get('follow') or opts.get('follow_first')
423 421 if opts.get('rev'):
424 422 revs = scmutil.revrange(repo, opts['rev'])
425 423 else:
426 424 if follow and len(repo) > 0:
427 425 revs = scmutil.revrange(repo, ['.:0'])
428 426 else:
429 427 revs = range(len(repo) - 1, -1, -1)
430 428 if not revs:
431 429 return iter([]), None, None
432 430 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
433 431 if expr:
434 432 matcher = revset.match(repo.ui, expr)
435 433 revs = increasingrevs(repo, revs, matcher)
436 434 if not opts.get('hidden'):
437 435 # --hidden is still experimental and not worth a dedicated revset
438 436 # yet. Fortunately, filtering revision number is fast.
439 437 revs = (r for r in revs if r not in repo.changelog.hiddenrevs)
440 438 else:
441 439 revs = iter(revs)
442 440 return revs, expr, filematcher
443 441
444 442 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
445 443 filematcher=None):
446 444 seen, state = [], asciistate()
447 445 for rev, type, ctx, parents in dag:
448 446 char = 'o'
449 447 if ctx.node() in showparents:
450 448 char = '@'
451 449 elif ctx.obsolete():
452 450 char = 'x'
453 451 copies = None
454 452 if getrenamed and ctx.rev():
455 453 copies = []
456 454 for fn in ctx.files():
457 455 rename = getrenamed(fn, ctx.rev())
458 456 if rename:
459 457 copies.append((fn, rename[0]))
460 458 revmatchfn = None
461 459 if filematcher is not None:
462 460 revmatchfn = filematcher(ctx.rev())
463 461 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
464 462 lines = displayer.hunk.pop(rev).split('\n')
465 463 if not lines[-1]:
466 464 del lines[-1]
467 465 displayer.flush(rev)
468 466 edges = edgefn(type, char, lines, seen, rev, parents)
469 467 for type, char, lines, coldata in edges:
470 468 ascii(ui, state, type, char, lines, coldata)
471 469 displayer.close()
472 470
473 471 @command('glog',
474 472 [('f', 'follow', None,
475 473 _('follow changeset history, or file history across copies and renames')),
476 474 ('', 'follow-first', None,
477 475 _('only follow the first parent of merge changesets (DEPRECATED)')),
478 476 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
479 477 ('C', 'copies', None, _('show copied files')),
480 478 ('k', 'keyword', [],
481 479 _('do case-insensitive search for a given text'), _('TEXT')),
482 480 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
483 481 ('', 'removed', None, _('include revisions where files were removed')),
484 482 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
485 483 ('u', 'user', [], _('revisions committed by user'), _('USER')),
486 484 ('', 'only-branch', [],
487 485 _('show only changesets within the given named branch (DEPRECATED)'),
488 486 _('BRANCH')),
489 487 ('b', 'branch', [],
490 488 _('show changesets within the given named branch'), _('BRANCH')),
491 489 ('P', 'prune', [],
492 490 _('do not display revision or any of its ancestors'), _('REV')),
493 491 ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
494 492 ] + commands.logopts + commands.walkopts,
495 493 _('[OPTION]... [FILE]'))
496 494 def graphlog(ui, repo, *pats, **opts):
497 495 """show revision history alongside an ASCII revision graph
498 496
499 497 Print a revision history alongside a revision graph drawn with
500 498 ASCII characters.
501 499
502 500 Nodes printed as an @ character are parents of the working
503 501 directory.
504 502 """
505 503
506 504 revs, expr, filematcher = getlogrevs(repo, pats, opts)
507 505 revs = sorted(revs, reverse=1)
508 506 limit = cmdutil.loglimit(opts)
509 507 if limit is not None:
510 508 revs = revs[:limit]
511 509 revdag = graphmod.dagwalker(repo, revs)
512 510
513 511 getrenamed = None
514 512 if opts.get('copies'):
515 513 endrev = None
516 514 if opts.get('rev'):
517 515 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
518 516 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
519 517 displayer = show_changeset(ui, repo, opts, buffered=True)
520 518 showparents = [ctx.node() for ctx in repo[None].parents()]
521 519 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
522 520 filematcher)
523 521
524 522 def graphrevs(repo, nodes, opts):
525 523 limit = cmdutil.loglimit(opts)
526 524 nodes.reverse()
527 525 if limit is not None:
528 526 nodes = nodes[:limit]
529 527 return graphmod.nodes(repo, nodes)
530 528
531 529 def goutgoing(ui, repo, dest=None, **opts):
532 530 """show the outgoing changesets alongside an ASCII revision graph
533 531
534 532 Print the outgoing changesets alongside a revision graph drawn with
535 533 ASCII characters.
536 534
537 535 Nodes printed as an @ character are parents of the working
538 536 directory.
539 537 """
540 538
541 539 _checkunsupportedflags([], opts)
542 540 o = hg._outgoing(ui, repo, dest, opts)
543 541 if o is None:
544 542 return
545 543
546 544 revdag = graphrevs(repo, o, opts)
547 545 displayer = show_changeset(ui, repo, opts, buffered=True)
548 546 showparents = [ctx.node() for ctx in repo[None].parents()]
549 547 generate(ui, revdag, displayer, showparents, asciiedges)
550 548
551 549 def gincoming(ui, repo, source="default", **opts):
552 550 """show the incoming changesets alongside an ASCII revision graph
553 551
554 552 Print the incoming changesets alongside a revision graph drawn with
555 553 ASCII characters.
556 554
557 555 Nodes printed as an @ character are parents of the working
558 556 directory.
559 557 """
560 558 def subreporecurse():
561 559 return 1
562 560
563 561 _checkunsupportedflags([], opts)
564 562 def display(other, chlist, displayer):
565 563 revdag = graphrevs(other, chlist, opts)
566 564 showparents = [ctx.node() for ctx in repo[None].parents()]
567 565 generate(ui, revdag, displayer, showparents, asciiedges)
568 566
569 567 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
570 568
571 569 def uisetup(ui):
572 570 '''Initialize the extension.'''
573 571 _wrapcmd('log', commands.table, graphlog)
574 572 _wrapcmd('incoming', commands.table, gincoming)
575 573 _wrapcmd('outgoing', commands.table, goutgoing)
576 574
577 575 def _wrapcmd(cmd, table, wrapfn):
578 576 '''wrap the command'''
579 577 def graph(orig, *args, **kwargs):
580 578 if kwargs['graph']:
581 579 return wrapfn(*args, **kwargs)
582 580 return orig(*args, **kwargs)
583 581 entry = extensions.wrapcommand(table, cmd, graph)
584 582 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
General Comments 0
You need to be logged in to leave comments. Login now