##// END OF EJS Templates
graphlog: extract ascii drawing code into graphmod
Patrick Mezard -
r17179:0849d725 default
parent child Browse files
Show More
@@ -1,582 +1,379 b''
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to view revision graphs from a shell
8 '''command to view revision graphs from a shell
9
9
10 This extension adds a --graph option to the incoming, outgoing and log
10 This extension adds a --graph option to the incoming, outgoing and log
11 commands. When this options is given, an ASCII representation of the
11 commands. When this options is given, an ASCII representation of the
12 revision graph is also shown.
12 revision graph is also shown.
13 '''
13 '''
14
14
15 from mercurial.cmdutil import show_changeset
15 from mercurial.cmdutil import show_changeset
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial import cmdutil, commands, extensions, scmutil
17 from mercurial import cmdutil, commands, extensions, scmutil
18 from mercurial import hg, util, graphmod, templatekw, revset
18 from mercurial import hg, util, graphmod, templatekw, revset
19
19
20 cmdtable = {}
20 cmdtable = {}
21 command = cmdutil.command(cmdtable)
21 command = cmdutil.command(cmdtable)
22 testedwith = 'internal'
22 testedwith = 'internal'
23
23
24 def asciiedges(type, char, lines, seen, rev, parents):
25 """adds edge info to changelog DAG walk suitable for ascii()"""
26 if rev not in seen:
27 seen.append(rev)
28 nodeidx = seen.index(rev)
29
30 knownparents = []
31 newparents = []
32 for parent in parents:
33 if parent in seen:
34 knownparents.append(parent)
35 else:
36 newparents.append(parent)
37
38 ncols = len(seen)
39 nextseen = seen[:]
40 nextseen[nodeidx:nodeidx + 1] = newparents
41 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
42
43 while len(newparents) > 2:
44 # ascii() only knows how to add or remove a single column between two
45 # calls. Nodes with more than two parents break this constraint so we
46 # introduce intermediate expansion lines to grow the active node list
47 # slowly.
48 edges.append((nodeidx, nodeidx))
49 edges.append((nodeidx, nodeidx + 1))
50 nmorecols = 1
51 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
52 char = '\\'
53 lines = []
54 nodeidx += 1
55 ncols += 1
56 edges = []
57 del newparents[0]
58
59 if len(newparents) > 0:
60 edges.append((nodeidx, nodeidx))
61 if len(newparents) > 1:
62 edges.append((nodeidx, nodeidx + 1))
63 nmorecols = len(nextseen) - ncols
64 seen[:] = nextseen
65 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
66
67 def _fixlongrightedges(edges):
68 for (i, (start, end)) in enumerate(edges):
69 if end > start:
70 edges[i] = (start, end + 1)
71
72 def _getnodelineedgestail(
73 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
74 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
75 # Still going in the same non-vertical direction.
76 if n_columns_diff == -1:
77 start = max(node_index + 1, p_node_index)
78 tail = ["|", " "] * (start - node_index - 1)
79 tail.extend(["/", " "] * (n_columns - start))
80 return tail
81 else:
82 return ["\\", " "] * (n_columns - node_index - 1)
83 else:
84 return ["|", " "] * (n_columns - node_index - 1)
85
86 def _drawedges(edges, nodeline, interline):
87 for (start, end) in edges:
88 if start == end + 1:
89 interline[2 * end + 1] = "/"
90 elif start == end - 1:
91 interline[2 * start + 1] = "\\"
92 elif start == end:
93 interline[2 * start] = "|"
94 else:
95 if 2 * end >= len(nodeline):
96 continue
97 nodeline[2 * end] = "+"
98 if start > end:
99 (start, end) = (end, start)
100 for i in range(2 * start + 1, 2 * end):
101 if nodeline[i] != "+":
102 nodeline[i] = "-"
103
104 def _getpaddingline(ni, n_columns, edges):
105 line = []
106 line.extend(["|", " "] * ni)
107 if (ni, ni - 1) in edges or (ni, ni) in edges:
108 # (ni, ni - 1) (ni, ni)
109 # | | | | | | | |
110 # +---o | | o---+
111 # | | c | | c | |
112 # | |/ / | |/ /
113 # | | | | | |
114 c = "|"
115 else:
116 c = " "
117 line.extend([c, " "])
118 line.extend(["|", " "] * (n_columns - ni - 1))
119 return line
120
121 def asciistate():
122 """returns the initial value for the "state" argument to ascii()"""
123 return [0, 0]
124
125 def ascii(ui, state, type, char, text, coldata):
126 """prints an ASCII graph of the DAG
127
128 takes the following arguments (one call per node in the graph):
129
130 - ui to write to
131 - Somewhere to keep the needed state in (init to asciistate())
132 - Column of the current node in the set of ongoing edges.
133 - Type indicator of node data, usually 'C' for changesets.
134 - Payload: (char, lines):
135 - Character to use as node's symbol.
136 - List of lines to display as the node's text.
137 - Edges; a list of (col, next_col) indicating the edges between
138 the current node and its parents.
139 - Number of columns (ongoing edges) in the current revision.
140 - The difference between the number of columns (ongoing edges)
141 in the next revision and the number of columns (ongoing edges)
142 in the current revision. That is: -1 means one column removed;
143 0 means no columns added or removed; 1 means one column added.
144 """
145
146 idx, edges, ncols, coldiff = coldata
147 assert -2 < coldiff < 2
148 if coldiff == -1:
149 # Transform
150 #
151 # | | | | | |
152 # o | | into o---+
153 # |X / |/ /
154 # | | | |
155 _fixlongrightedges(edges)
156
157 # add_padding_line says whether to rewrite
158 #
159 # | | | | | | | |
160 # | o---+ into | o---+
161 # | / / | | | # <--- padding line
162 # o | | | / /
163 # o | |
164 add_padding_line = (len(text) > 2 and coldiff == -1 and
165 [x for (x, y) in edges if x + 1 < y])
166
167 # fix_nodeline_tail says whether to rewrite
168 #
169 # | | o | | | | o | |
170 # | | |/ / | | |/ /
171 # | o | | into | o / / # <--- fixed nodeline tail
172 # | |/ / | |/ /
173 # o | | o | |
174 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
175
176 # nodeline is the line containing the node character (typically o)
177 nodeline = ["|", " "] * idx
178 nodeline.extend([char, " "])
179
180 nodeline.extend(
181 _getnodelineedgestail(idx, state[1], ncols, coldiff,
182 state[0], fix_nodeline_tail))
183
184 # shift_interline is the line containing the non-vertical
185 # edges between this entry and the next
186 shift_interline = ["|", " "] * idx
187 if coldiff == -1:
188 n_spaces = 1
189 edge_ch = "/"
190 elif coldiff == 0:
191 n_spaces = 2
192 edge_ch = "|"
193 else:
194 n_spaces = 3
195 edge_ch = "\\"
196 shift_interline.extend(n_spaces * [" "])
197 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
198
199 # draw edges from the current node to its parents
200 _drawedges(edges, nodeline, shift_interline)
201
202 # lines is the list of all graph lines to print
203 lines = [nodeline]
204 if add_padding_line:
205 lines.append(_getpaddingline(idx, ncols, edges))
206 lines.append(shift_interline)
207
208 # make sure that there are as many graph lines as there are
209 # log strings
210 while len(text) < len(lines):
211 text.append("")
212 if len(lines) < len(text):
213 extra_interline = ["|", " "] * (ncols + coldiff)
214 while len(lines) < len(text):
215 lines.append(extra_interline)
216
217 # print lines
218 indentation_level = max(ncols, ncols + coldiff)
219 for (line, logstr) in zip(lines, text):
220 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
221 ui.write(ln.rstrip() + '\n')
222
223 # ... and start over
224 state[0] = coldiff
225 state[1] = idx
226
227 def _checkunsupportedflags(pats, opts):
24 def _checkunsupportedflags(pats, opts):
228 for op in ["newest_first"]:
25 for op in ["newest_first"]:
229 if op in opts and opts[op]:
26 if op in opts and opts[op]:
230 raise util.Abort(_("-G/--graph option is incompatible with --%s")
27 raise util.Abort(_("-G/--graph option is incompatible with --%s")
231 % op.replace("_", "-"))
28 % op.replace("_", "-"))
232
29
233 def _makefilematcher(repo, pats, followfirst):
30 def _makefilematcher(repo, pats, followfirst):
234 # When displaying a revision with --patch --follow FILE, we have
31 # When displaying a revision with --patch --follow FILE, we have
235 # to know which file of the revision must be diffed. With
32 # to know which file of the revision must be diffed. With
236 # --follow, we want the names of the ancestors of FILE in the
33 # --follow, we want the names of the ancestors of FILE in the
237 # revision, stored in "fcache". "fcache" is populated by
34 # revision, stored in "fcache". "fcache" is populated by
238 # reproducing the graph traversal already done by --follow revset
35 # reproducing the graph traversal already done by --follow revset
239 # and relating linkrevs to file names (which is not "correct" but
36 # and relating linkrevs to file names (which is not "correct" but
240 # good enough).
37 # good enough).
241 fcache = {}
38 fcache = {}
242 fcacheready = [False]
39 fcacheready = [False]
243 pctx = repo['.']
40 pctx = repo['.']
244 wctx = repo[None]
41 wctx = repo[None]
245
42
246 def populate():
43 def populate():
247 for fn in pats:
44 for fn in pats:
248 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
45 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
249 for c in i:
46 for c in i:
250 fcache.setdefault(c.linkrev(), set()).add(c.path())
47 fcache.setdefault(c.linkrev(), set()).add(c.path())
251
48
252 def filematcher(rev):
49 def filematcher(rev):
253 if not fcacheready[0]:
50 if not fcacheready[0]:
254 # Lazy initialization
51 # Lazy initialization
255 fcacheready[0] = True
52 fcacheready[0] = True
256 populate()
53 populate()
257 return scmutil.match(wctx, fcache.get(rev, []), default='path')
54 return scmutil.match(wctx, fcache.get(rev, []), default='path')
258
55
259 return filematcher
56 return filematcher
260
57
261 def _makelogrevset(repo, pats, opts, revs):
58 def _makelogrevset(repo, pats, opts, revs):
262 """Return (expr, filematcher) where expr is a revset string built
59 """Return (expr, filematcher) where expr is a revset string built
263 from log options and file patterns or None. If --stat or --patch
60 from log options and file patterns or None. If --stat or --patch
264 are not passed filematcher is None. Otherwise it is a callable
61 are not passed filematcher is None. Otherwise it is a callable
265 taking a revision number and returning a match objects filtering
62 taking a revision number and returning a match objects filtering
266 the files to be detailed when displaying the revision.
63 the files to be detailed when displaying the revision.
267 """
64 """
268 opt2revset = {
65 opt2revset = {
269 'no_merges': ('not merge()', None),
66 'no_merges': ('not merge()', None),
270 'only_merges': ('merge()', None),
67 'only_merges': ('merge()', None),
271 '_ancestors': ('ancestors(%(val)s)', None),
68 '_ancestors': ('ancestors(%(val)s)', None),
272 '_fancestors': ('_firstancestors(%(val)s)', None),
69 '_fancestors': ('_firstancestors(%(val)s)', None),
273 '_descendants': ('descendants(%(val)s)', None),
70 '_descendants': ('descendants(%(val)s)', None),
274 '_fdescendants': ('_firstdescendants(%(val)s)', None),
71 '_fdescendants': ('_firstdescendants(%(val)s)', None),
275 '_matchfiles': ('_matchfiles(%(val)s)', None),
72 '_matchfiles': ('_matchfiles(%(val)s)', None),
276 'date': ('date(%(val)r)', None),
73 'date': ('date(%(val)r)', None),
277 'branch': ('branch(%(val)r)', ' or '),
74 'branch': ('branch(%(val)r)', ' or '),
278 '_patslog': ('filelog(%(val)r)', ' or '),
75 '_patslog': ('filelog(%(val)r)', ' or '),
279 '_patsfollow': ('follow(%(val)r)', ' or '),
76 '_patsfollow': ('follow(%(val)r)', ' or '),
280 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
77 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
281 'keyword': ('keyword(%(val)r)', ' or '),
78 'keyword': ('keyword(%(val)r)', ' or '),
282 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
79 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
283 'user': ('user(%(val)r)', ' or '),
80 'user': ('user(%(val)r)', ' or '),
284 }
81 }
285
82
286 opts = dict(opts)
83 opts = dict(opts)
287 # follow or not follow?
84 # follow or not follow?
288 follow = opts.get('follow') or opts.get('follow_first')
85 follow = opts.get('follow') or opts.get('follow_first')
289 followfirst = opts.get('follow_first') and 1 or 0
86 followfirst = opts.get('follow_first') and 1 or 0
290 # --follow with FILE behaviour depends on revs...
87 # --follow with FILE behaviour depends on revs...
291 startrev = revs[0]
88 startrev = revs[0]
292 followdescendants = (len(revs) > 1 and revs[0] < revs[1]) and 1 or 0
89 followdescendants = (len(revs) > 1 and revs[0] < revs[1]) and 1 or 0
293
90
294 # branch and only_branch are really aliases and must be handled at
91 # branch and only_branch are really aliases and must be handled at
295 # the same time
92 # the same time
296 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
93 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
297 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
94 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
298 # pats/include/exclude are passed to match.match() directly in
95 # pats/include/exclude are passed to match.match() directly in
299 # _matchfile() revset but walkchangerevs() builds its matcher with
96 # _matchfile() revset but walkchangerevs() builds its matcher with
300 # scmutil.match(). The difference is input pats are globbed on
97 # scmutil.match(). The difference is input pats are globbed on
301 # platforms without shell expansion (windows).
98 # platforms without shell expansion (windows).
302 pctx = repo[None]
99 pctx = repo[None]
303 match, pats = scmutil.matchandpats(pctx, pats, opts)
100 match, pats = scmutil.matchandpats(pctx, pats, opts)
304 slowpath = match.anypats() or (match.files() and opts.get('removed'))
101 slowpath = match.anypats() or (match.files() and opts.get('removed'))
305 if not slowpath:
102 if not slowpath:
306 for f in match.files():
103 for f in match.files():
307 if follow and f not in pctx:
104 if follow and f not in pctx:
308 raise util.Abort(_('cannot follow file not in parent '
105 raise util.Abort(_('cannot follow file not in parent '
309 'revision: "%s"') % f)
106 'revision: "%s"') % f)
310 filelog = repo.file(f)
107 filelog = repo.file(f)
311 if not len(filelog):
108 if not len(filelog):
312 # A zero count may be a directory or deleted file, so
109 # A zero count may be a directory or deleted file, so
313 # try to find matching entries on the slow path.
110 # try to find matching entries on the slow path.
314 if follow:
111 if follow:
315 raise util.Abort(
112 raise util.Abort(
316 _('cannot follow nonexistent file: "%s"') % f)
113 _('cannot follow nonexistent file: "%s"') % f)
317 slowpath = True
114 slowpath = True
318 if slowpath:
115 if slowpath:
319 # See cmdutil.walkchangerevs() slow path.
116 # See cmdutil.walkchangerevs() slow path.
320 #
117 #
321 if follow:
118 if follow:
322 raise util.Abort(_('can only follow copies/renames for explicit '
119 raise util.Abort(_('can only follow copies/renames for explicit '
323 'filenames'))
120 'filenames'))
324 # pats/include/exclude cannot be represented as separate
121 # pats/include/exclude cannot be represented as separate
325 # revset expressions as their filtering logic applies at file
122 # revset expressions as their filtering logic applies at file
326 # level. For instance "-I a -X a" matches a revision touching
123 # level. For instance "-I a -X a" matches a revision touching
327 # "a" and "b" while "file(a) and not file(b)" does
124 # "a" and "b" while "file(a) and not file(b)" does
328 # not. Besides, filesets are evaluated against the working
125 # not. Besides, filesets are evaluated against the working
329 # directory.
126 # directory.
330 matchargs = ['r:', 'd:relpath']
127 matchargs = ['r:', 'd:relpath']
331 for p in pats:
128 for p in pats:
332 matchargs.append('p:' + p)
129 matchargs.append('p:' + p)
333 for p in opts.get('include', []):
130 for p in opts.get('include', []):
334 matchargs.append('i:' + p)
131 matchargs.append('i:' + p)
335 for p in opts.get('exclude', []):
132 for p in opts.get('exclude', []):
336 matchargs.append('x:' + p)
133 matchargs.append('x:' + p)
337 matchargs = ','.join(('%r' % p) for p in matchargs)
134 matchargs = ','.join(('%r' % p) for p in matchargs)
338 opts['_matchfiles'] = matchargs
135 opts['_matchfiles'] = matchargs
339 else:
136 else:
340 if follow:
137 if follow:
341 fpats = ('_patsfollow', '_patsfollowfirst')
138 fpats = ('_patsfollow', '_patsfollowfirst')
342 fnopats = (('_ancestors', '_fancestors'),
139 fnopats = (('_ancestors', '_fancestors'),
343 ('_descendants', '_fdescendants'))
140 ('_descendants', '_fdescendants'))
344 if pats:
141 if pats:
345 # follow() revset inteprets its file argument as a
142 # follow() revset inteprets its file argument as a
346 # manifest entry, so use match.files(), not pats.
143 # manifest entry, so use match.files(), not pats.
347 opts[fpats[followfirst]] = list(match.files())
144 opts[fpats[followfirst]] = list(match.files())
348 else:
145 else:
349 opts[fnopats[followdescendants][followfirst]] = str(startrev)
146 opts[fnopats[followdescendants][followfirst]] = str(startrev)
350 else:
147 else:
351 opts['_patslog'] = list(pats)
148 opts['_patslog'] = list(pats)
352
149
353 filematcher = None
150 filematcher = None
354 if opts.get('patch') or opts.get('stat'):
151 if opts.get('patch') or opts.get('stat'):
355 if follow:
152 if follow:
356 filematcher = _makefilematcher(repo, pats, followfirst)
153 filematcher = _makefilematcher(repo, pats, followfirst)
357 else:
154 else:
358 filematcher = lambda rev: match
155 filematcher = lambda rev: match
359
156
360 expr = []
157 expr = []
361 for op, val in opts.iteritems():
158 for op, val in opts.iteritems():
362 if not val:
159 if not val:
363 continue
160 continue
364 if op not in opt2revset:
161 if op not in opt2revset:
365 continue
162 continue
366 revop, andor = opt2revset[op]
163 revop, andor = opt2revset[op]
367 if '%(val)' not in revop:
164 if '%(val)' not in revop:
368 expr.append(revop)
165 expr.append(revop)
369 else:
166 else:
370 if not isinstance(val, list):
167 if not isinstance(val, list):
371 e = revop % {'val': val}
168 e = revop % {'val': val}
372 else:
169 else:
373 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
170 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
374 expr.append(e)
171 expr.append(e)
375
172
376 if expr:
173 if expr:
377 expr = '(' + ' and '.join(expr) + ')'
174 expr = '(' + ' and '.join(expr) + ')'
378 else:
175 else:
379 expr = None
176 expr = None
380 return expr, filematcher
177 return expr, filematcher
381
178
382 def getlogrevs(repo, pats, opts):
179 def getlogrevs(repo, pats, opts):
383 """Return (revs, expr, filematcher) where revs is an iterable of
180 """Return (revs, expr, filematcher) where revs is an iterable of
384 revision numbers, expr is a revset string built from log options
181 revision numbers, expr is a revset string built from log options
385 and file patterns or None, and used to filter 'revs'. If --stat or
182 and file patterns or None, and used to filter 'revs'. If --stat or
386 --patch are not passed filematcher is None. Otherwise it is a
183 --patch are not passed filematcher is None. Otherwise it is a
387 callable taking a revision number and returning a match objects
184 callable taking a revision number and returning a match objects
388 filtering the files to be detailed when displaying the revision.
185 filtering the files to be detailed when displaying the revision.
389 """
186 """
390 def increasingrevs(repo, revs, matcher):
187 def increasingrevs(repo, revs, matcher):
391 # The sorted input rev sequence is chopped in sub-sequences
188 # The sorted input rev sequence is chopped in sub-sequences
392 # which are sorted in ascending order and passed to the
189 # which are sorted in ascending order and passed to the
393 # matcher. The filtered revs are sorted again as they were in
190 # matcher. The filtered revs are sorted again as they were in
394 # the original sub-sequence. This achieve several things:
191 # the original sub-sequence. This achieve several things:
395 #
192 #
396 # - getlogrevs() now returns a generator which behaviour is
193 # - getlogrevs() now returns a generator which behaviour is
397 # adapted to log need. First results come fast, last ones
194 # adapted to log need. First results come fast, last ones
398 # are batched for performances.
195 # are batched for performances.
399 #
196 #
400 # - revset matchers often operate faster on revision in
197 # - revset matchers often operate faster on revision in
401 # changelog order, because most filters deal with the
198 # changelog order, because most filters deal with the
402 # changelog.
199 # changelog.
403 #
200 #
404 # - revset matchers can reorder revisions. "A or B" typically
201 # - revset matchers can reorder revisions. "A or B" typically
405 # returns returns the revision matching A then the revision
202 # returns returns the revision matching A then the revision
406 # matching B. We want to hide this internal implementation
203 # matching B. We want to hide this internal implementation
407 # detail from the caller, and sorting the filtered revision
204 # detail from the caller, and sorting the filtered revision
408 # again achieves this.
205 # again achieves this.
409 for i, window in cmdutil.increasingwindows(0, len(revs), windowsize=1):
206 for i, window in cmdutil.increasingwindows(0, len(revs), windowsize=1):
410 orevs = revs[i:i + window]
207 orevs = revs[i:i + window]
411 nrevs = set(matcher(repo, sorted(orevs)))
208 nrevs = set(matcher(repo, sorted(orevs)))
412 for rev in orevs:
209 for rev in orevs:
413 if rev in nrevs:
210 if rev in nrevs:
414 yield rev
211 yield rev
415
212
416 if not len(repo):
213 if not len(repo):
417 return iter([]), None, None
214 return iter([]), None, None
418 # Default --rev value depends on --follow but --follow behaviour
215 # Default --rev value depends on --follow but --follow behaviour
419 # depends on revisions resolved from --rev...
216 # depends on revisions resolved from --rev...
420 follow = opts.get('follow') or opts.get('follow_first')
217 follow = opts.get('follow') or opts.get('follow_first')
421 if opts.get('rev'):
218 if opts.get('rev'):
422 revs = scmutil.revrange(repo, opts['rev'])
219 revs = scmutil.revrange(repo, opts['rev'])
423 else:
220 else:
424 if follow and len(repo) > 0:
221 if follow and len(repo) > 0:
425 revs = scmutil.revrange(repo, ['.:0'])
222 revs = scmutil.revrange(repo, ['.:0'])
426 else:
223 else:
427 revs = range(len(repo) - 1, -1, -1)
224 revs = range(len(repo) - 1, -1, -1)
428 if not revs:
225 if not revs:
429 return iter([]), None, None
226 return iter([]), None, None
430 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
227 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
431 if expr:
228 if expr:
432 matcher = revset.match(repo.ui, expr)
229 matcher = revset.match(repo.ui, expr)
433 revs = increasingrevs(repo, revs, matcher)
230 revs = increasingrevs(repo, revs, matcher)
434 if not opts.get('hidden'):
231 if not opts.get('hidden'):
435 # --hidden is still experimental and not worth a dedicated revset
232 # --hidden is still experimental and not worth a dedicated revset
436 # yet. Fortunately, filtering revision number is fast.
233 # yet. Fortunately, filtering revision number is fast.
437 revs = (r for r in revs if r not in repo.changelog.hiddenrevs)
234 revs = (r for r in revs if r not in repo.changelog.hiddenrevs)
438 else:
235 else:
439 revs = iter(revs)
236 revs = iter(revs)
440 return revs, expr, filematcher
237 return revs, expr, filematcher
441
238
442 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
239 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
443 filematcher=None):
240 filematcher=None):
444 seen, state = [], asciistate()
241 seen, state = [], graphmod.asciistate()
445 for rev, type, ctx, parents in dag:
242 for rev, type, ctx, parents in dag:
446 char = 'o'
243 char = 'o'
447 if ctx.node() in showparents:
244 if ctx.node() in showparents:
448 char = '@'
245 char = '@'
449 elif ctx.obsolete():
246 elif ctx.obsolete():
450 char = 'x'
247 char = 'x'
451 copies = None
248 copies = None
452 if getrenamed and ctx.rev():
249 if getrenamed and ctx.rev():
453 copies = []
250 copies = []
454 for fn in ctx.files():
251 for fn in ctx.files():
455 rename = getrenamed(fn, ctx.rev())
252 rename = getrenamed(fn, ctx.rev())
456 if rename:
253 if rename:
457 copies.append((fn, rename[0]))
254 copies.append((fn, rename[0]))
458 revmatchfn = None
255 revmatchfn = None
459 if filematcher is not None:
256 if filematcher is not None:
460 revmatchfn = filematcher(ctx.rev())
257 revmatchfn = filematcher(ctx.rev())
461 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
258 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
462 lines = displayer.hunk.pop(rev).split('\n')
259 lines = displayer.hunk.pop(rev).split('\n')
463 if not lines[-1]:
260 if not lines[-1]:
464 del lines[-1]
261 del lines[-1]
465 displayer.flush(rev)
262 displayer.flush(rev)
466 edges = edgefn(type, char, lines, seen, rev, parents)
263 edges = edgefn(type, char, lines, seen, rev, parents)
467 for type, char, lines, coldata in edges:
264 for type, char, lines, coldata in edges:
468 ascii(ui, state, type, char, lines, coldata)
265 graphmod.ascii(ui, state, type, char, lines, coldata)
469 displayer.close()
266 displayer.close()
470
267
471 @command('glog',
268 @command('glog',
472 [('f', 'follow', None,
269 [('f', 'follow', None,
473 _('follow changeset history, or file history across copies and renames')),
270 _('follow changeset history, or file history across copies and renames')),
474 ('', 'follow-first', None,
271 ('', 'follow-first', None,
475 _('only follow the first parent of merge changesets (DEPRECATED)')),
272 _('only follow the first parent of merge changesets (DEPRECATED)')),
476 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
273 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
477 ('C', 'copies', None, _('show copied files')),
274 ('C', 'copies', None, _('show copied files')),
478 ('k', 'keyword', [],
275 ('k', 'keyword', [],
479 _('do case-insensitive search for a given text'), _('TEXT')),
276 _('do case-insensitive search for a given text'), _('TEXT')),
480 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
277 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
481 ('', 'removed', None, _('include revisions where files were removed')),
278 ('', 'removed', None, _('include revisions where files were removed')),
482 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
279 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
483 ('u', 'user', [], _('revisions committed by user'), _('USER')),
280 ('u', 'user', [], _('revisions committed by user'), _('USER')),
484 ('', 'only-branch', [],
281 ('', 'only-branch', [],
485 _('show only changesets within the given named branch (DEPRECATED)'),
282 _('show only changesets within the given named branch (DEPRECATED)'),
486 _('BRANCH')),
283 _('BRANCH')),
487 ('b', 'branch', [],
284 ('b', 'branch', [],
488 _('show changesets within the given named branch'), _('BRANCH')),
285 _('show changesets within the given named branch'), _('BRANCH')),
489 ('P', 'prune', [],
286 ('P', 'prune', [],
490 _('do not display revision or any of its ancestors'), _('REV')),
287 _('do not display revision or any of its ancestors'), _('REV')),
491 ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
288 ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
492 ] + commands.logopts + commands.walkopts,
289 ] + commands.logopts + commands.walkopts,
493 _('[OPTION]... [FILE]'))
290 _('[OPTION]... [FILE]'))
494 def graphlog(ui, repo, *pats, **opts):
291 def graphlog(ui, repo, *pats, **opts):
495 """show revision history alongside an ASCII revision graph
292 """show revision history alongside an ASCII revision graph
496
293
497 Print a revision history alongside a revision graph drawn with
294 Print a revision history alongside a revision graph drawn with
498 ASCII characters.
295 ASCII characters.
499
296
500 Nodes printed as an @ character are parents of the working
297 Nodes printed as an @ character are parents of the working
501 directory.
298 directory.
502 """
299 """
503
300
504 revs, expr, filematcher = getlogrevs(repo, pats, opts)
301 revs, expr, filematcher = getlogrevs(repo, pats, opts)
505 revs = sorted(revs, reverse=1)
302 revs = sorted(revs, reverse=1)
506 limit = cmdutil.loglimit(opts)
303 limit = cmdutil.loglimit(opts)
507 if limit is not None:
304 if limit is not None:
508 revs = revs[:limit]
305 revs = revs[:limit]
509 revdag = graphmod.dagwalker(repo, revs)
306 revdag = graphmod.dagwalker(repo, revs)
510
307
511 getrenamed = None
308 getrenamed = None
512 if opts.get('copies'):
309 if opts.get('copies'):
513 endrev = None
310 endrev = None
514 if opts.get('rev'):
311 if opts.get('rev'):
515 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
312 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
516 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
313 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
517 displayer = show_changeset(ui, repo, opts, buffered=True)
314 displayer = show_changeset(ui, repo, opts, buffered=True)
518 showparents = [ctx.node() for ctx in repo[None].parents()]
315 showparents = [ctx.node() for ctx in repo[None].parents()]
519 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
316 generate(ui, revdag, displayer, showparents, graphmod.asciiedges,
520 filematcher)
317 getrenamed, filematcher)
521
318
522 def graphrevs(repo, nodes, opts):
319 def graphrevs(repo, nodes, opts):
523 limit = cmdutil.loglimit(opts)
320 limit = cmdutil.loglimit(opts)
524 nodes.reverse()
321 nodes.reverse()
525 if limit is not None:
322 if limit is not None:
526 nodes = nodes[:limit]
323 nodes = nodes[:limit]
527 return graphmod.nodes(repo, nodes)
324 return graphmod.nodes(repo, nodes)
528
325
529 def goutgoing(ui, repo, dest=None, **opts):
326 def goutgoing(ui, repo, dest=None, **opts):
530 """show the outgoing changesets alongside an ASCII revision graph
327 """show the outgoing changesets alongside an ASCII revision graph
531
328
532 Print the outgoing changesets alongside a revision graph drawn with
329 Print the outgoing changesets alongside a revision graph drawn with
533 ASCII characters.
330 ASCII characters.
534
331
535 Nodes printed as an @ character are parents of the working
332 Nodes printed as an @ character are parents of the working
536 directory.
333 directory.
537 """
334 """
538
335
539 _checkunsupportedflags([], opts)
336 _checkunsupportedflags([], opts)
540 o = hg._outgoing(ui, repo, dest, opts)
337 o = hg._outgoing(ui, repo, dest, opts)
541 if o is None:
338 if o is None:
542 return
339 return
543
340
544 revdag = graphrevs(repo, o, opts)
341 revdag = graphrevs(repo, o, opts)
545 displayer = show_changeset(ui, repo, opts, buffered=True)
342 displayer = show_changeset(ui, repo, opts, buffered=True)
546 showparents = [ctx.node() for ctx in repo[None].parents()]
343 showparents = [ctx.node() for ctx in repo[None].parents()]
547 generate(ui, revdag, displayer, showparents, asciiedges)
344 generate(ui, revdag, displayer, showparents, graphmod.asciiedges)
548
345
549 def gincoming(ui, repo, source="default", **opts):
346 def gincoming(ui, repo, source="default", **opts):
550 """show the incoming changesets alongside an ASCII revision graph
347 """show the incoming changesets alongside an ASCII revision graph
551
348
552 Print the incoming changesets alongside a revision graph drawn with
349 Print the incoming changesets alongside a revision graph drawn with
553 ASCII characters.
350 ASCII characters.
554
351
555 Nodes printed as an @ character are parents of the working
352 Nodes printed as an @ character are parents of the working
556 directory.
353 directory.
557 """
354 """
558 def subreporecurse():
355 def subreporecurse():
559 return 1
356 return 1
560
357
561 _checkunsupportedflags([], opts)
358 _checkunsupportedflags([], opts)
562 def display(other, chlist, displayer):
359 def display(other, chlist, displayer):
563 revdag = graphrevs(other, chlist, opts)
360 revdag = graphrevs(other, chlist, opts)
564 showparents = [ctx.node() for ctx in repo[None].parents()]
361 showparents = [ctx.node() for ctx in repo[None].parents()]
565 generate(ui, revdag, displayer, showparents, asciiedges)
362 generate(ui, revdag, displayer, showparents, graphmod.asciiedges)
566
363
567 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
364 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
568
365
569 def uisetup(ui):
366 def uisetup(ui):
570 '''Initialize the extension.'''
367 '''Initialize the extension.'''
571 _wrapcmd('log', commands.table, graphlog)
368 _wrapcmd('log', commands.table, graphlog)
572 _wrapcmd('incoming', commands.table, gincoming)
369 _wrapcmd('incoming', commands.table, gincoming)
573 _wrapcmd('outgoing', commands.table, goutgoing)
370 _wrapcmd('outgoing', commands.table, goutgoing)
574
371
575 def _wrapcmd(cmd, table, wrapfn):
372 def _wrapcmd(cmd, table, wrapfn):
576 '''wrap the command'''
373 '''wrap the command'''
577 def graph(orig, *args, **kwargs):
374 def graph(orig, *args, **kwargs):
578 if kwargs['graph']:
375 if kwargs['graph']:
579 return wrapfn(*args, **kwargs)
376 return wrapfn(*args, **kwargs)
580 return orig(*args, **kwargs)
377 return orig(*args, **kwargs)
581 entry = extensions.wrapcommand(table, cmd, graph)
378 entry = extensions.wrapcommand(table, cmd, graph)
582 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
379 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
@@ -1,165 +1,368 b''
1 # Revision graph generator for Mercurial
1 # Revision graph generator for Mercurial
2 #
2 #
3 # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl>
3 # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl>
4 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """supports walking the history as DAGs suitable for graphical output
9 """supports walking the history as DAGs suitable for graphical output
10
10
11 The most basic format we use is that of::
11 The most basic format we use is that of::
12
12
13 (id, type, data, [parentids])
13 (id, type, data, [parentids])
14
14
15 The node and parent ids are arbitrary integers which identify a node in the
15 The node and parent ids are arbitrary integers which identify a node in the
16 context of the graph returned. Type is a constant specifying the node type.
16 context of the graph returned. Type is a constant specifying the node type.
17 Data depends on type.
17 Data depends on type.
18 """
18 """
19
19
20 from mercurial.node import nullrev
20 from mercurial.node import nullrev
21 import util
21 import util
22
22
23 CHANGESET = 'C'
23 CHANGESET = 'C'
24
24
25 def dagwalker(repo, revs):
25 def dagwalker(repo, revs):
26 """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
26 """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
27
27
28 This generator function walks through revisions (which should be ordered
28 This generator function walks through revisions (which should be ordered
29 from bigger to lower). It returns a tuple for each node. The node and parent
29 from bigger to lower). It returns a tuple for each node. The node and parent
30 ids are arbitrary integers which identify a node in the context of the graph
30 ids are arbitrary integers which identify a node in the context of the graph
31 returned.
31 returned.
32 """
32 """
33 if not revs:
33 if not revs:
34 return
34 return
35
35
36 cl = repo.changelog
36 cl = repo.changelog
37 lowestrev = min(revs)
37 lowestrev = min(revs)
38 gpcache = {}
38 gpcache = {}
39
39
40 knownrevs = set(revs)
40 knownrevs = set(revs)
41 for rev in revs:
41 for rev in revs:
42 ctx = repo[rev]
42 ctx = repo[rev]
43 parents = sorted(set([p.rev() for p in ctx.parents()
43 parents = sorted(set([p.rev() for p in ctx.parents()
44 if p.rev() in knownrevs]))
44 if p.rev() in knownrevs]))
45 mpars = [p.rev() for p in ctx.parents() if
45 mpars = [p.rev() for p in ctx.parents() if
46 p.rev() != nullrev and p.rev() not in parents]
46 p.rev() != nullrev and p.rev() not in parents]
47
47
48 for mpar in mpars:
48 for mpar in mpars:
49 gp = gpcache.get(mpar)
49 gp = gpcache.get(mpar)
50 if gp is None:
50 if gp is None:
51 gp = gpcache[mpar] = grandparent(cl, lowestrev, revs, mpar)
51 gp = gpcache[mpar] = grandparent(cl, lowestrev, revs, mpar)
52 if not gp:
52 if not gp:
53 parents.append(mpar)
53 parents.append(mpar)
54 else:
54 else:
55 parents.extend(g for g in gp if g not in parents)
55 parents.extend(g for g in gp if g not in parents)
56
56
57 yield (ctx.rev(), CHANGESET, ctx, parents)
57 yield (ctx.rev(), CHANGESET, ctx, parents)
58
58
59 def nodes(repo, nodes):
59 def nodes(repo, nodes):
60 """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
60 """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
61
61
62 This generator function walks the given nodes. It only returns parents
62 This generator function walks the given nodes. It only returns parents
63 that are in nodes, too.
63 that are in nodes, too.
64 """
64 """
65 include = set(nodes)
65 include = set(nodes)
66 for node in nodes:
66 for node in nodes:
67 ctx = repo[node]
67 ctx = repo[node]
68 parents = set([p.rev() for p in ctx.parents() if p.node() in include])
68 parents = set([p.rev() for p in ctx.parents() if p.node() in include])
69 yield (ctx.rev(), CHANGESET, ctx, sorted(parents))
69 yield (ctx.rev(), CHANGESET, ctx, sorted(parents))
70
70
71 def colored(dag, repo):
71 def colored(dag, repo):
72 """annotates a DAG with colored edge information
72 """annotates a DAG with colored edge information
73
73
74 For each DAG node this function emits tuples::
74 For each DAG node this function emits tuples::
75
75
76 (id, type, data, (col, color), [(col, nextcol, color)])
76 (id, type, data, (col, color), [(col, nextcol, color)])
77
77
78 with the following new elements:
78 with the following new elements:
79
79
80 - Tuple (col, color) with column and color index for the current node
80 - Tuple (col, color) with column and color index for the current node
81 - A list of tuples indicating the edges between the current node and its
81 - A list of tuples indicating the edges between the current node and its
82 parents.
82 parents.
83 """
83 """
84 seen = []
84 seen = []
85 colors = {}
85 colors = {}
86 newcolor = 1
86 newcolor = 1
87 config = {}
87 config = {}
88
88
89 for key, val in repo.ui.configitems('graph'):
89 for key, val in repo.ui.configitems('graph'):
90 if '.' in key:
90 if '.' in key:
91 branch, setting = key.rsplit('.', 1)
91 branch, setting = key.rsplit('.', 1)
92 # Validation
92 # Validation
93 if setting == "width" and val.isdigit():
93 if setting == "width" and val.isdigit():
94 config.setdefault(branch, {})[setting] = int(val)
94 config.setdefault(branch, {})[setting] = int(val)
95 elif setting == "color" and val.isalnum():
95 elif setting == "color" and val.isalnum():
96 config.setdefault(branch, {})[setting] = val
96 config.setdefault(branch, {})[setting] = val
97
97
98 if config:
98 if config:
99 getconf = util.lrucachefunc(
99 getconf = util.lrucachefunc(
100 lambda rev: config.get(repo[rev].branch(), {}))
100 lambda rev: config.get(repo[rev].branch(), {}))
101 else:
101 else:
102 getconf = lambda rev: {}
102 getconf = lambda rev: {}
103
103
104 for (cur, type, data, parents) in dag:
104 for (cur, type, data, parents) in dag:
105
105
106 # Compute seen and next
106 # Compute seen and next
107 if cur not in seen:
107 if cur not in seen:
108 seen.append(cur) # new head
108 seen.append(cur) # new head
109 colors[cur] = newcolor
109 colors[cur] = newcolor
110 newcolor += 1
110 newcolor += 1
111
111
112 col = seen.index(cur)
112 col = seen.index(cur)
113 color = colors.pop(cur)
113 color = colors.pop(cur)
114 next = seen[:]
114 next = seen[:]
115
115
116 # Add parents to next
116 # Add parents to next
117 addparents = [p for p in parents if p not in next]
117 addparents = [p for p in parents if p not in next]
118 next[col:col + 1] = addparents
118 next[col:col + 1] = addparents
119
119
120 # Set colors for the parents
120 # Set colors for the parents
121 for i, p in enumerate(addparents):
121 for i, p in enumerate(addparents):
122 if not i:
122 if not i:
123 colors[p] = color
123 colors[p] = color
124 else:
124 else:
125 colors[p] = newcolor
125 colors[p] = newcolor
126 newcolor += 1
126 newcolor += 1
127
127
128 # Add edges to the graph
128 # Add edges to the graph
129 edges = []
129 edges = []
130 for ecol, eid in enumerate(seen):
130 for ecol, eid in enumerate(seen):
131 if eid in next:
131 if eid in next:
132 bconf = getconf(eid)
132 bconf = getconf(eid)
133 edges.append((
133 edges.append((
134 ecol, next.index(eid), colors[eid],
134 ecol, next.index(eid), colors[eid],
135 bconf.get('width', -1),
135 bconf.get('width', -1),
136 bconf.get('color', '')))
136 bconf.get('color', '')))
137 elif eid == cur:
137 elif eid == cur:
138 for p in parents:
138 for p in parents:
139 bconf = getconf(p)
139 bconf = getconf(p)
140 edges.append((
140 edges.append((
141 ecol, next.index(p), color,
141 ecol, next.index(p), color,
142 bconf.get('width', -1),
142 bconf.get('width', -1),
143 bconf.get('color', '')))
143 bconf.get('color', '')))
144
144
145 # Yield and move on
145 # Yield and move on
146 yield (cur, type, data, (col, color), edges)
146 yield (cur, type, data, (col, color), edges)
147 seen = next
147 seen = next
148
148
149 def grandparent(cl, lowestrev, roots, head):
149 def grandparent(cl, lowestrev, roots, head):
150 """Return all ancestors of head in roots which revision is
150 """Return all ancestors of head in roots which revision is
151 greater or equal to lowestrev.
151 greater or equal to lowestrev.
152 """
152 """
153 pending = set([head])
153 pending = set([head])
154 seen = set()
154 seen = set()
155 kept = set()
155 kept = set()
156 llowestrev = max(nullrev, lowestrev)
156 llowestrev = max(nullrev, lowestrev)
157 while pending:
157 while pending:
158 r = pending.pop()
158 r = pending.pop()
159 if r >= llowestrev and r not in seen:
159 if r >= llowestrev and r not in seen:
160 if r in roots:
160 if r in roots:
161 kept.add(r)
161 kept.add(r)
162 else:
162 else:
163 pending.update([p for p in cl.parentrevs(r)])
163 pending.update([p for p in cl.parentrevs(r)])
164 seen.add(r)
164 seen.add(r)
165 return sorted(kept)
165 return sorted(kept)
166
167 def asciiedges(type, char, lines, seen, rev, parents):
168 """adds edge info to changelog DAG walk suitable for ascii()"""
169 if rev not in seen:
170 seen.append(rev)
171 nodeidx = seen.index(rev)
172
173 knownparents = []
174 newparents = []
175 for parent in parents:
176 if parent in seen:
177 knownparents.append(parent)
178 else:
179 newparents.append(parent)
180
181 ncols = len(seen)
182 nextseen = seen[:]
183 nextseen[nodeidx:nodeidx + 1] = newparents
184 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
185
186 while len(newparents) > 2:
187 # ascii() only knows how to add or remove a single column between two
188 # calls. Nodes with more than two parents break this constraint so we
189 # introduce intermediate expansion lines to grow the active node list
190 # slowly.
191 edges.append((nodeidx, nodeidx))
192 edges.append((nodeidx, nodeidx + 1))
193 nmorecols = 1
194 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
195 char = '\\'
196 lines = []
197 nodeidx += 1
198 ncols += 1
199 edges = []
200 del newparents[0]
201
202 if len(newparents) > 0:
203 edges.append((nodeidx, nodeidx))
204 if len(newparents) > 1:
205 edges.append((nodeidx, nodeidx + 1))
206 nmorecols = len(nextseen) - ncols
207 seen[:] = nextseen
208 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
209
210 def _fixlongrightedges(edges):
211 for (i, (start, end)) in enumerate(edges):
212 if end > start:
213 edges[i] = (start, end + 1)
214
215 def _getnodelineedgestail(
216 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
217 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
218 # Still going in the same non-vertical direction.
219 if n_columns_diff == -1:
220 start = max(node_index + 1, p_node_index)
221 tail = ["|", " "] * (start - node_index - 1)
222 tail.extend(["/", " "] * (n_columns - start))
223 return tail
224 else:
225 return ["\\", " "] * (n_columns - node_index - 1)
226 else:
227 return ["|", " "] * (n_columns - node_index - 1)
228
229 def _drawedges(edges, nodeline, interline):
230 for (start, end) in edges:
231 if start == end + 1:
232 interline[2 * end + 1] = "/"
233 elif start == end - 1:
234 interline[2 * start + 1] = "\\"
235 elif start == end:
236 interline[2 * start] = "|"
237 else:
238 if 2 * end >= len(nodeline):
239 continue
240 nodeline[2 * end] = "+"
241 if start > end:
242 (start, end) = (end, start)
243 for i in range(2 * start + 1, 2 * end):
244 if nodeline[i] != "+":
245 nodeline[i] = "-"
246
247 def _getpaddingline(ni, n_columns, edges):
248 line = []
249 line.extend(["|", " "] * ni)
250 if (ni, ni - 1) in edges or (ni, ni) in edges:
251 # (ni, ni - 1) (ni, ni)
252 # | | | | | | | |
253 # +---o | | o---+
254 # | | c | | c | |
255 # | |/ / | |/ /
256 # | | | | | |
257 c = "|"
258 else:
259 c = " "
260 line.extend([c, " "])
261 line.extend(["|", " "] * (n_columns - ni - 1))
262 return line
263
264 def asciistate():
265 """returns the initial value for the "state" argument to ascii()"""
266 return [0, 0]
267
268 def ascii(ui, state, type, char, text, coldata):
269 """prints an ASCII graph of the DAG
270
271 takes the following arguments (one call per node in the graph):
272
273 - ui to write to
274 - Somewhere to keep the needed state in (init to asciistate())
275 - Column of the current node in the set of ongoing edges.
276 - Type indicator of node data, usually 'C' for changesets.
277 - Payload: (char, lines):
278 - Character to use as node's symbol.
279 - List of lines to display as the node's text.
280 - Edges; a list of (col, next_col) indicating the edges between
281 the current node and its parents.
282 - Number of columns (ongoing edges) in the current revision.
283 - The difference between the number of columns (ongoing edges)
284 in the next revision and the number of columns (ongoing edges)
285 in the current revision. That is: -1 means one column removed;
286 0 means no columns added or removed; 1 means one column added.
287 """
288
289 idx, edges, ncols, coldiff = coldata
290 assert -2 < coldiff < 2
291 if coldiff == -1:
292 # Transform
293 #
294 # | | | | | |
295 # o | | into o---+
296 # |X / |/ /
297 # | | | |
298 _fixlongrightedges(edges)
299
300 # add_padding_line says whether to rewrite
301 #
302 # | | | | | | | |
303 # | o---+ into | o---+
304 # | / / | | | # <--- padding line
305 # o | | | / /
306 # o | |
307 add_padding_line = (len(text) > 2 and coldiff == -1 and
308 [x for (x, y) in edges if x + 1 < y])
309
310 # fix_nodeline_tail says whether to rewrite
311 #
312 # | | o | | | | o | |
313 # | | |/ / | | |/ /
314 # | o | | into | o / / # <--- fixed nodeline tail
315 # | |/ / | |/ /
316 # o | | o | |
317 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
318
319 # nodeline is the line containing the node character (typically o)
320 nodeline = ["|", " "] * idx
321 nodeline.extend([char, " "])
322
323 nodeline.extend(
324 _getnodelineedgestail(idx, state[1], ncols, coldiff,
325 state[0], fix_nodeline_tail))
326
327 # shift_interline is the line containing the non-vertical
328 # edges between this entry and the next
329 shift_interline = ["|", " "] * idx
330 if coldiff == -1:
331 n_spaces = 1
332 edge_ch = "/"
333 elif coldiff == 0:
334 n_spaces = 2
335 edge_ch = "|"
336 else:
337 n_spaces = 3
338 edge_ch = "\\"
339 shift_interline.extend(n_spaces * [" "])
340 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
341
342 # draw edges from the current node to its parents
343 _drawedges(edges, nodeline, shift_interline)
344
345 # lines is the list of all graph lines to print
346 lines = [nodeline]
347 if add_padding_line:
348 lines.append(_getpaddingline(idx, ncols, edges))
349 lines.append(shift_interline)
350
351 # make sure that there are as many graph lines as there are
352 # log strings
353 while len(text) < len(lines):
354 text.append("")
355 if len(lines) < len(text):
356 extra_interline = ["|", " "] * (ncols + coldiff)
357 while len(lines) < len(text):
358 lines.append(extra_interline)
359
360 # print lines
361 indentation_level = max(ncols, ncols + coldiff)
362 for (line, logstr) in zip(lines, text):
363 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
364 ui.write(ln.rstrip() + '\n')
365
366 # ... and start over
367 state[0] = coldiff
368 state[1] = idx
General Comments 0
You need to be logged in to leave comments. Login now