##// END OF EJS Templates
graphlog: remove unused get_revs() function
Patrick Mezard -
r17162:868c256c default
parent child Browse files
Show More
@@ -1,594 +1,584 b''
1 1 # ASCII graph log extension for Mercurial
2 2 #
3 3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to view revision graphs from a shell
9 9
10 10 This extension adds a --graph option to the incoming, outgoing and log
11 11 commands. When this options is given, an ASCII representation of the
12 12 revision graph is also shown.
13 13 '''
14 14
15 15 from mercurial.cmdutil import show_changeset
16 16 from mercurial.i18n import _
17 from mercurial.node import nullrev
18 17 from mercurial import cmdutil, commands, extensions, scmutil
19 18 from mercurial import hg, util, graphmod, templatekw, revset
20 19
21 20 cmdtable = {}
22 21 command = cmdutil.command(cmdtable)
23 22 testedwith = 'internal'
24 23
25 24 ASCIIDATA = 'ASC'
26 25
27 26 def asciiedges(type, char, lines, seen, rev, parents):
28 27 """adds edge info to changelog DAG walk suitable for ascii()"""
29 28 if rev not in seen:
30 29 seen.append(rev)
31 30 nodeidx = seen.index(rev)
32 31
33 32 knownparents = []
34 33 newparents = []
35 34 for parent in parents:
36 35 if parent in seen:
37 36 knownparents.append(parent)
38 37 else:
39 38 newparents.append(parent)
40 39
41 40 ncols = len(seen)
42 41 nextseen = seen[:]
43 42 nextseen[nodeidx:nodeidx + 1] = newparents
44 43 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
45 44
46 45 while len(newparents) > 2:
47 46 # ascii() only knows how to add or remove a single column between two
48 47 # calls. Nodes with more than two parents break this constraint so we
49 48 # introduce intermediate expansion lines to grow the active node list
50 49 # slowly.
51 50 edges.append((nodeidx, nodeidx))
52 51 edges.append((nodeidx, nodeidx + 1))
53 52 nmorecols = 1
54 53 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
55 54 char = '\\'
56 55 lines = []
57 56 nodeidx += 1
58 57 ncols += 1
59 58 edges = []
60 59 del newparents[0]
61 60
62 61 if len(newparents) > 0:
63 62 edges.append((nodeidx, nodeidx))
64 63 if len(newparents) > 1:
65 64 edges.append((nodeidx, nodeidx + 1))
66 65 nmorecols = len(nextseen) - ncols
67 66 seen[:] = nextseen
68 67 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
69 68
70 69 def fix_long_right_edges(edges):
71 70 for (i, (start, end)) in enumerate(edges):
72 71 if end > start:
73 72 edges[i] = (start, end + 1)
74 73
75 74 def get_nodeline_edges_tail(
76 75 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
77 76 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
78 77 # Still going in the same non-vertical direction.
79 78 if n_columns_diff == -1:
80 79 start = max(node_index + 1, p_node_index)
81 80 tail = ["|", " "] * (start - node_index - 1)
82 81 tail.extend(["/", " "] * (n_columns - start))
83 82 return tail
84 83 else:
85 84 return ["\\", " "] * (n_columns - node_index - 1)
86 85 else:
87 86 return ["|", " "] * (n_columns - node_index - 1)
88 87
89 88 def draw_edges(edges, nodeline, interline):
90 89 for (start, end) in edges:
91 90 if start == end + 1:
92 91 interline[2 * end + 1] = "/"
93 92 elif start == end - 1:
94 93 interline[2 * start + 1] = "\\"
95 94 elif start == end:
96 95 interline[2 * start] = "|"
97 96 else:
98 97 if 2 * end >= len(nodeline):
99 98 continue
100 99 nodeline[2 * end] = "+"
101 100 if start > end:
102 101 (start, end) = (end, start)
103 102 for i in range(2 * start + 1, 2 * end):
104 103 if nodeline[i] != "+":
105 104 nodeline[i] = "-"
106 105
107 106 def get_padding_line(ni, n_columns, edges):
108 107 line = []
109 108 line.extend(["|", " "] * ni)
110 109 if (ni, ni - 1) in edges or (ni, ni) in edges:
111 110 # (ni, ni - 1) (ni, ni)
112 111 # | | | | | | | |
113 112 # +---o | | o---+
114 113 # | | c | | c | |
115 114 # | |/ / | |/ /
116 115 # | | | | | |
117 116 c = "|"
118 117 else:
119 118 c = " "
120 119 line.extend([c, " "])
121 120 line.extend(["|", " "] * (n_columns - ni - 1))
122 121 return line
123 122
124 123 def asciistate():
125 124 """returns the initial value for the "state" argument to ascii()"""
126 125 return [0, 0]
127 126
128 127 def ascii(ui, state, type, char, text, coldata):
129 128 """prints an ASCII graph of the DAG
130 129
131 130 takes the following arguments (one call per node in the graph):
132 131
133 132 - ui to write to
134 133 - Somewhere to keep the needed state in (init to asciistate())
135 134 - Column of the current node in the set of ongoing edges.
136 135 - Type indicator of node data == ASCIIDATA.
137 136 - Payload: (char, lines):
138 137 - Character to use as node's symbol.
139 138 - List of lines to display as the node's text.
140 139 - Edges; a list of (col, next_col) indicating the edges between
141 140 the current node and its parents.
142 141 - Number of columns (ongoing edges) in the current revision.
143 142 - The difference between the number of columns (ongoing edges)
144 143 in the next revision and the number of columns (ongoing edges)
145 144 in the current revision. That is: -1 means one column removed;
146 145 0 means no columns added or removed; 1 means one column added.
147 146 """
148 147
149 148 idx, edges, ncols, coldiff = coldata
150 149 assert -2 < coldiff < 2
151 150 if coldiff == -1:
152 151 # Transform
153 152 #
154 153 # | | | | | |
155 154 # o | | into o---+
156 155 # |X / |/ /
157 156 # | | | |
158 157 fix_long_right_edges(edges)
159 158
160 159 # add_padding_line says whether to rewrite
161 160 #
162 161 # | | | | | | | |
163 162 # | o---+ into | o---+
164 163 # | / / | | | # <--- padding line
165 164 # o | | | / /
166 165 # o | |
167 166 add_padding_line = (len(text) > 2 and coldiff == -1 and
168 167 [x for (x, y) in edges if x + 1 < y])
169 168
170 169 # fix_nodeline_tail says whether to rewrite
171 170 #
172 171 # | | o | | | | o | |
173 172 # | | |/ / | | |/ /
174 173 # | o | | into | o / / # <--- fixed nodeline tail
175 174 # | |/ / | |/ /
176 175 # o | | o | |
177 176 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
178 177
179 178 # nodeline is the line containing the node character (typically o)
180 179 nodeline = ["|", " "] * idx
181 180 nodeline.extend([char, " "])
182 181
183 182 nodeline.extend(
184 183 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
185 184 state[0], fix_nodeline_tail))
186 185
187 186 # shift_interline is the line containing the non-vertical
188 187 # edges between this entry and the next
189 188 shift_interline = ["|", " "] * idx
190 189 if coldiff == -1:
191 190 n_spaces = 1
192 191 edge_ch = "/"
193 192 elif coldiff == 0:
194 193 n_spaces = 2
195 194 edge_ch = "|"
196 195 else:
197 196 n_spaces = 3
198 197 edge_ch = "\\"
199 198 shift_interline.extend(n_spaces * [" "])
200 199 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
201 200
202 201 # draw edges from the current node to its parents
203 202 draw_edges(edges, nodeline, shift_interline)
204 203
205 204 # lines is the list of all graph lines to print
206 205 lines = [nodeline]
207 206 if add_padding_line:
208 207 lines.append(get_padding_line(idx, ncols, edges))
209 208 lines.append(shift_interline)
210 209
211 210 # make sure that there are as many graph lines as there are
212 211 # log strings
213 212 while len(text) < len(lines):
214 213 text.append("")
215 214 if len(lines) < len(text):
216 215 extra_interline = ["|", " "] * (ncols + coldiff)
217 216 while len(lines) < len(text):
218 217 lines.append(extra_interline)
219 218
220 219 # print lines
221 220 indentation_level = max(ncols, ncols + coldiff)
222 221 for (line, logstr) in zip(lines, text):
223 222 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
224 223 ui.write(ln.rstrip() + '\n')
225 224
226 225 # ... and start over
227 226 state[0] = coldiff
228 227 state[1] = idx
229 228
230 def get_revs(repo, rev_opt):
231 if rev_opt:
232 revs = scmutil.revrange(repo, rev_opt)
233 if len(revs) == 0:
234 return (nullrev, nullrev)
235 return (max(revs), min(revs))
236 else:
237 return (len(repo) - 1, 0)
238
239 229 def check_unsupported_flags(pats, opts):
240 230 for op in ["newest_first"]:
241 231 if op in opts and opts[op]:
242 232 raise util.Abort(_("-G/--graph option is incompatible with --%s")
243 233 % op.replace("_", "-"))
244 234
245 235 def _makefilematcher(repo, pats, followfirst):
246 236 # When displaying a revision with --patch --follow FILE, we have
247 237 # to know which file of the revision must be diffed. With
248 238 # --follow, we want the names of the ancestors of FILE in the
249 239 # revision, stored in "fcache". "fcache" is populated by
250 240 # reproducing the graph traversal already done by --follow revset
251 241 # and relating linkrevs to file names (which is not "correct" but
252 242 # good enough).
253 243 fcache = {}
254 244 fcacheready = [False]
255 245 pctx = repo['.']
256 246 wctx = repo[None]
257 247
258 248 def populate():
259 249 for fn in pats:
260 250 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
261 251 for c in i:
262 252 fcache.setdefault(c.linkrev(), set()).add(c.path())
263 253
264 254 def filematcher(rev):
265 255 if not fcacheready[0]:
266 256 # Lazy initialization
267 257 fcacheready[0] = True
268 258 populate()
269 259 return scmutil.match(wctx, fcache.get(rev, []), default='path')
270 260
271 261 return filematcher
272 262
273 263 def _makelogrevset(repo, pats, opts, revs):
274 264 """Return (expr, filematcher) where expr is a revset string built
275 265 from log options and file patterns or None. If --stat or --patch
276 266 are not passed filematcher is None. Otherwise it is a callable
277 267 taking a revision number and returning a match objects filtering
278 268 the files to be detailed when displaying the revision.
279 269 """
280 270 opt2revset = {
281 271 'no_merges': ('not merge()', None),
282 272 'only_merges': ('merge()', None),
283 273 '_ancestors': ('ancestors(%(val)s)', None),
284 274 '_fancestors': ('_firstancestors(%(val)s)', None),
285 275 '_descendants': ('descendants(%(val)s)', None),
286 276 '_fdescendants': ('_firstdescendants(%(val)s)', None),
287 277 '_matchfiles': ('_matchfiles(%(val)s)', None),
288 278 'date': ('date(%(val)r)', None),
289 279 'branch': ('branch(%(val)r)', ' or '),
290 280 '_patslog': ('filelog(%(val)r)', ' or '),
291 281 '_patsfollow': ('follow(%(val)r)', ' or '),
292 282 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
293 283 'keyword': ('keyword(%(val)r)', ' or '),
294 284 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
295 285 'user': ('user(%(val)r)', ' or '),
296 286 }
297 287
298 288 opts = dict(opts)
299 289 # follow or not follow?
300 290 follow = opts.get('follow') or opts.get('follow_first')
301 291 followfirst = opts.get('follow_first') and 1 or 0
302 292 # --follow with FILE behaviour depends on revs...
303 293 startrev = revs[0]
304 294 followdescendants = (len(revs) > 1 and revs[0] < revs[1]) and 1 or 0
305 295
306 296 # branch and only_branch are really aliases and must be handled at
307 297 # the same time
308 298 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
309 299 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
310 300 # pats/include/exclude are passed to match.match() directly in
311 301 # _matchfile() revset but walkchangerevs() builds its matcher with
312 302 # scmutil.match(). The difference is input pats are globbed on
313 303 # platforms without shell expansion (windows).
314 304 pctx = repo[None]
315 305 match, pats = scmutil.matchandpats(pctx, pats, opts)
316 306 slowpath = match.anypats() or (match.files() and opts.get('removed'))
317 307 if not slowpath:
318 308 for f in match.files():
319 309 if follow and f not in pctx:
320 310 raise util.Abort(_('cannot follow file not in parent '
321 311 'revision: "%s"') % f)
322 312 filelog = repo.file(f)
323 313 if not len(filelog):
324 314 # A zero count may be a directory or deleted file, so
325 315 # try to find matching entries on the slow path.
326 316 if follow:
327 317 raise util.Abort(
328 318 _('cannot follow nonexistent file: "%s"') % f)
329 319 slowpath = True
330 320 if slowpath:
331 321 # See cmdutil.walkchangerevs() slow path.
332 322 #
333 323 if follow:
334 324 raise util.Abort(_('can only follow copies/renames for explicit '
335 325 'filenames'))
336 326 # pats/include/exclude cannot be represented as separate
337 327 # revset expressions as their filtering logic applies at file
338 328 # level. For instance "-I a -X a" matches a revision touching
339 329 # "a" and "b" while "file(a) and not file(b)" does
340 330 # not. Besides, filesets are evaluated against the working
341 331 # directory.
342 332 matchargs = ['r:', 'd:relpath']
343 333 for p in pats:
344 334 matchargs.append('p:' + p)
345 335 for p in opts.get('include', []):
346 336 matchargs.append('i:' + p)
347 337 for p in opts.get('exclude', []):
348 338 matchargs.append('x:' + p)
349 339 matchargs = ','.join(('%r' % p) for p in matchargs)
350 340 opts['_matchfiles'] = matchargs
351 341 else:
352 342 if follow:
353 343 fpats = ('_patsfollow', '_patsfollowfirst')
354 344 fnopats = (('_ancestors', '_fancestors'),
355 345 ('_descendants', '_fdescendants'))
356 346 if pats:
357 347 # follow() revset inteprets its file argument as a
358 348 # manifest entry, so use match.files(), not pats.
359 349 opts[fpats[followfirst]] = list(match.files())
360 350 else:
361 351 opts[fnopats[followdescendants][followfirst]] = str(startrev)
362 352 else:
363 353 opts['_patslog'] = list(pats)
364 354
365 355 filematcher = None
366 356 if opts.get('patch') or opts.get('stat'):
367 357 if follow:
368 358 filematcher = _makefilematcher(repo, pats, followfirst)
369 359 else:
370 360 filematcher = lambda rev: match
371 361
372 362 expr = []
373 363 for op, val in opts.iteritems():
374 364 if not val:
375 365 continue
376 366 if op not in opt2revset:
377 367 continue
378 368 revop, andor = opt2revset[op]
379 369 if '%(val)' not in revop:
380 370 expr.append(revop)
381 371 else:
382 372 if not isinstance(val, list):
383 373 e = revop % {'val': val}
384 374 else:
385 375 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
386 376 expr.append(e)
387 377
388 378 if expr:
389 379 expr = '(' + ' and '.join(expr) + ')'
390 380 else:
391 381 expr = None
392 382 return expr, filematcher
393 383
394 384 def getlogrevs(repo, pats, opts):
395 385 """Return (revs, expr, filematcher) where revs is an iterable of
396 386 revision numbers, expr is a revset string built from log options
397 387 and file patterns or None, and used to filter 'revs'. If --stat or
398 388 --patch are not passed filematcher is None. Otherwise it is a
399 389 callable taking a revision number and returning a match objects
400 390 filtering the files to be detailed when displaying the revision.
401 391 """
402 392 def increasingrevs(repo, revs, matcher):
403 393 # The sorted input rev sequence is chopped in sub-sequences
404 394 # which are sorted in ascending order and passed to the
405 395 # matcher. The filtered revs are sorted again as they were in
406 396 # the original sub-sequence. This achieve several things:
407 397 #
408 398 # - getlogrevs() now returns a generator which behaviour is
409 399 # adapted to log need. First results come fast, last ones
410 400 # are batched for performances.
411 401 #
412 402 # - revset matchers often operate faster on revision in
413 403 # changelog order, because most filters deal with the
414 404 # changelog.
415 405 #
416 406 # - revset matchers can reorder revisions. "A or B" typically
417 407 # returns returns the revision matching A then the revision
418 408 # matching B. We want to hide this internal implementation
419 409 # detail from the caller, and sorting the filtered revision
420 410 # again achieves this.
421 411 for i, window in cmdutil.increasingwindows(0, len(revs), windowsize=1):
422 412 orevs = revs[i:i + window]
423 413 nrevs = set(matcher(repo, sorted(orevs)))
424 414 for rev in orevs:
425 415 if rev in nrevs:
426 416 yield rev
427 417
428 418 if not len(repo):
429 419 return iter([]), None, None
430 420 # Default --rev value depends on --follow but --follow behaviour
431 421 # depends on revisions resolved from --rev...
432 422 follow = opts.get('follow') or opts.get('follow_first')
433 423 if opts.get('rev'):
434 424 revs = scmutil.revrange(repo, opts['rev'])
435 425 else:
436 426 if follow and len(repo) > 0:
437 427 revs = scmutil.revrange(repo, ['.:0'])
438 428 else:
439 429 revs = range(len(repo) - 1, -1, -1)
440 430 if not revs:
441 431 return iter([]), None, None
442 432 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
443 433 if expr:
444 434 matcher = revset.match(repo.ui, expr)
445 435 revs = increasingrevs(repo, revs, matcher)
446 436 if not opts.get('hidden'):
447 437 # --hidden is still experimental and not worth a dedicated revset
448 438 # yet. Fortunately, filtering revision number is fast.
449 439 revs = (r for r in revs if r not in repo.changelog.hiddenrevs)
450 440 else:
451 441 revs = iter(revs)
452 442 return revs, expr, filematcher
453 443
454 444 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
455 445 filematcher=None):
456 446 seen, state = [], asciistate()
457 447 for rev, type, ctx, parents in dag:
458 448 char = 'o'
459 449 if ctx.node() in showparents:
460 450 char = '@'
461 451 elif ctx.obsolete():
462 452 char = 'x'
463 453 copies = None
464 454 if getrenamed and ctx.rev():
465 455 copies = []
466 456 for fn in ctx.files():
467 457 rename = getrenamed(fn, ctx.rev())
468 458 if rename:
469 459 copies.append((fn, rename[0]))
470 460 revmatchfn = None
471 461 if filematcher is not None:
472 462 revmatchfn = filematcher(ctx.rev())
473 463 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
474 464 lines = displayer.hunk.pop(rev).split('\n')
475 465 if not lines[-1]:
476 466 del lines[-1]
477 467 displayer.flush(rev)
478 468 edges = edgefn(type, char, lines, seen, rev, parents)
479 469 for type, char, lines, coldata in edges:
480 470 ascii(ui, state, type, char, lines, coldata)
481 471 displayer.close()
482 472
483 473 @command('glog',
484 474 [('f', 'follow', None,
485 475 _('follow changeset history, or file history across copies and renames')),
486 476 ('', 'follow-first', None,
487 477 _('only follow the first parent of merge changesets (DEPRECATED)')),
488 478 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
489 479 ('C', 'copies', None, _('show copied files')),
490 480 ('k', 'keyword', [],
491 481 _('do case-insensitive search for a given text'), _('TEXT')),
492 482 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
493 483 ('', 'removed', None, _('include revisions where files were removed')),
494 484 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
495 485 ('u', 'user', [], _('revisions committed by user'), _('USER')),
496 486 ('', 'only-branch', [],
497 487 _('show only changesets within the given named branch (DEPRECATED)'),
498 488 _('BRANCH')),
499 489 ('b', 'branch', [],
500 490 _('show changesets within the given named branch'), _('BRANCH')),
501 491 ('P', 'prune', [],
502 492 _('do not display revision or any of its ancestors'), _('REV')),
503 493 ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
504 494 ] + commands.logopts + commands.walkopts,
505 495 _('[OPTION]... [FILE]'))
506 496 def graphlog(ui, repo, *pats, **opts):
507 497 """show revision history alongside an ASCII revision graph
508 498
509 499 Print a revision history alongside a revision graph drawn with
510 500 ASCII characters.
511 501
512 502 Nodes printed as an @ character are parents of the working
513 503 directory.
514 504 """
515 505
516 506 revs, expr, filematcher = getlogrevs(repo, pats, opts)
517 507 revs = sorted(revs, reverse=1)
518 508 limit = cmdutil.loglimit(opts)
519 509 if limit is not None:
520 510 revs = revs[:limit]
521 511 revdag = graphmod.dagwalker(repo, revs)
522 512
523 513 getrenamed = None
524 514 if opts.get('copies'):
525 515 endrev = None
526 516 if opts.get('rev'):
527 517 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
528 518 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
529 519 displayer = show_changeset(ui, repo, opts, buffered=True)
530 520 showparents = [ctx.node() for ctx in repo[None].parents()]
531 521 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
532 522 filematcher)
533 523
534 524 def graphrevs(repo, nodes, opts):
535 525 limit = cmdutil.loglimit(opts)
536 526 nodes.reverse()
537 527 if limit is not None:
538 528 nodes = nodes[:limit]
539 529 return graphmod.nodes(repo, nodes)
540 530
541 531 def goutgoing(ui, repo, dest=None, **opts):
542 532 """show the outgoing changesets alongside an ASCII revision graph
543 533
544 534 Print the outgoing changesets alongside a revision graph drawn with
545 535 ASCII characters.
546 536
547 537 Nodes printed as an @ character are parents of the working
548 538 directory.
549 539 """
550 540
551 541 check_unsupported_flags([], opts)
552 542 o = hg._outgoing(ui, repo, dest, opts)
553 543 if o is None:
554 544 return
555 545
556 546 revdag = graphrevs(repo, o, opts)
557 547 displayer = show_changeset(ui, repo, opts, buffered=True)
558 548 showparents = [ctx.node() for ctx in repo[None].parents()]
559 549 generate(ui, revdag, displayer, showparents, asciiedges)
560 550
561 551 def gincoming(ui, repo, source="default", **opts):
562 552 """show the incoming changesets alongside an ASCII revision graph
563 553
564 554 Print the incoming changesets alongside a revision graph drawn with
565 555 ASCII characters.
566 556
567 557 Nodes printed as an @ character are parents of the working
568 558 directory.
569 559 """
570 560 def subreporecurse():
571 561 return 1
572 562
573 563 check_unsupported_flags([], opts)
574 564 def display(other, chlist, displayer):
575 565 revdag = graphrevs(other, chlist, opts)
576 566 showparents = [ctx.node() for ctx in repo[None].parents()]
577 567 generate(ui, revdag, displayer, showparents, asciiedges)
578 568
579 569 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
580 570
581 571 def uisetup(ui):
582 572 '''Initialize the extension.'''
583 573 _wrapcmd('log', commands.table, graphlog)
584 574 _wrapcmd('incoming', commands.table, gincoming)
585 575 _wrapcmd('outgoing', commands.table, goutgoing)
586 576
587 577 def _wrapcmd(cmd, table, wrapfn):
588 578 '''wrap the command'''
589 579 def graph(orig, *args, **kwargs):
590 580 if kwargs['graph']:
591 581 return wrapfn(*args, **kwargs)
592 582 return orig(*args, **kwargs)
593 583 entry = extensions.wrapcommand(table, cmd, graph)
594 584 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
General Comments 0
You need to be logged in to leave comments. Login now