##// END OF EJS Templates
graphmod: add a function for topological iteration...
Pierre-Yves David -
r23564:f7ce0837 default
parent child Browse files
Show More
@@ -1,367 +1,529
1 # Revision graph generator for Mercurial
1 # Revision graph generator for Mercurial
2 #
2 #
3 # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl>
3 # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl>
4 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """supports walking the history as DAGs suitable for graphical output
9 """supports walking the history as DAGs suitable for graphical output
10
10
11 The most basic format we use is that of::
11 The most basic format we use is that of::
12
12
13 (id, type, data, [parentids])
13 (id, type, data, [parentids])
14
14
15 The node and parent ids are arbitrary integers which identify a node in the
15 The node and parent ids are arbitrary integers which identify a node in the
16 context of the graph returned. Type is a constant specifying the node type.
16 context of the graph returned. Type is a constant specifying the node type.
17 Data depends on type.
17 Data depends on type.
18 """
18 """
19
19
20 from mercurial.node import nullrev
20 from mercurial.node import nullrev
21 import util
21 import util
22
22
23 CHANGESET = 'C'
23 CHANGESET = 'C'
24
24
25 def groupbranchiter(revs, parentsfunc):
26 """yield revision from heads to roots one (topo) branch after the other.
27
28 This function aims to be used by a graph generator that wishes to minimize
29 the amount of parallel branches and their interleaving.
30
31 Example iteration order:
32
33 o 4
34 |
35 o 1
36 |
37 | o 3
38 | |
39 | o 2
40 |/
41 o 0
42
43 Currently does not handle non-contiguous <revs> input.
44
45 Currently consider every changeset under a merge to be on the same branch
46 using revision number to sort them.
47
48 Could be easily extend to give priority to an initial branch."""
49 ### Quick summary of the algorithm
50 #
51 # This function is based around a "retention" principle. We keep revisions
52 # in memory until we are ready to emit a whole branch that immediately
53 # "merge" into an existing one. This reduce the number of branch "ongoing"
54 # at the same time.
55 #
56 # During iteration revs are split into two groups:
57 # A) revision already emitted
58 # B) revision in "retention". They are stored as different subgroups.
59 #
60 # for each REV, we do the follow logic:
61 #
62 # a) if REV is a parent of (A), we will emit it. But before emitting it,
63 # we'll "free" all the revs from subgroup in (B) that were waiting for
64 # REV to be available. So we emit all revision of such subgroup before
65 # emitting REV
66 #
67 # b) else, we'll search for a subgroup in (B) awaiting for REV to be
68 # available, if such subgroup exist, we add REV to it and the subgroup is
69 # now awaiting for REV.parents() to be available.
70 #
71 # c) finally if no such group existed in (B), we create a new subgroup.
72 #
73 #
74 # To bootstrap the algorithm, we emit the tipmost revision.
75
76 revs.sort(reverse=True)
77
78 # Set of parents of revision that have been yield. They can be considered
79 # unblocked as the graph generator is already aware of them so there is no
80 # need to delay the one that reference them.
81 unblocked = set()
82
83 # list of group waiting to be displayed, each group is defined by:
84 #
85 # (revs: lists of revs waiting to be displayed,
86 # blocked: set of that cannot be displayed before those in 'revs')
87 #
88 # The second value ('blocked') correspond to parents of any revision in the
89 # group ('revs') that is not itself contained in the group. The main idea
90 # of this algorithm is to delay as much as possible the emission of any
91 # revision. This means waiting for the moment we are about to display
92 # theses parents to display the revs in a group.
93 #
94 # This first implementation is smart until it meet a merge: it will emit
95 # revs as soon as any parents is about to be emitted and can grow an
96 # arbitrary number of revs in 'blocked'. In practice this mean we properly
97 # retains new branches but give up on any special ordering for ancestors of
98 # merges. The implementation can be improved to handle this better.
99 #
100 # The first subgroup is special. It correspond to all the revision that
101 # were already emitted. The 'revs' lists is expected to be empty and the
102 # 'blocked' set contains the parents revisions of already emitted revision.
103 #
104 # You could pre-seed the <parents> set of groups[0] to a specific
105 # changesets to select what the first emitted branch should be.
106 #
107 # We do not support revisions will hole yet, but adding such support would
108 # be easy. The iteration will have to be done using both input revision and
109 # parents (see cl.ancestors function + a few tweaks) but only revisions
110 # parts of the initial set should be emitted.
111 groups = [([], unblocked)]
112 for current in revs:
113 # Look for a subgroup blocked, waiting for the current revision.
114 matching = [i for i, g in enumerate(groups) if current in g[1]]
115
116 if matching:
117 # The main idea is to gather together all sets that await on the
118 # same revision.
119 #
120 # This merging is done at the time we are about to add this common
121 # awaited to the subgroup for simplicity purpose. Such merge could
122 # happen sooner when we update the "blocked" set of revision.
123 #
124 # We also always keep the oldest subgroup first. We can probably
125 # improve the behavior by having the longuest set first. That way,
126 # graph algorythms could minimise the length of parallele lines
127 # their draw. This is currently not done.
128 targetidx = matching.pop(0)
129 trevs, tparents = groups[targetidx]
130 for i in matching:
131 gr = groups[i]
132 trevs.extend(gr[0])
133 tparents |= gr[1]
134 # delete all merged subgroups (but the one we keep)
135 # (starting from the last subgroup for performance and sanity reason)
136 for i in reversed(matching):
137 del groups[i]
138 else:
139 # This is a new head. We create a new subgroup for it.
140 targetidx = len(groups)
141 groups.append(([], set([current])))
142
143 gr = groups[targetidx]
144
145 # We now adds the current nodes to this subgroups. This is done after
146 # the subgroup merging because all elements from a subgroup that relied
147 # on this rev must preceed it.
148 #
149 # we also update the <parents> set to includes the parents on the
150 # new nodes.
151 gr[0].append(current)
152 gr[1].remove(current)
153 gr[1].update([p for p in parentsfunc(current) if p > nullrev])
154
155 # Look for a subgroup to display
156 #
157 # When unblocked is empty (if clause), We are not waiting over any
158 # revision during the first iteration (if no priority was given) or if
159 # we outputed a whole disconnected sets of the graph (reached a root).
160 # In that case we arbitrarily takes the oldest known subgroup. The
161 # heuristique could probably be better.
162 #
163 # Otherwise (elif clause) this mean we have some emitted revision. if
164 # the subgroup awaits on the same revision that the outputed ones, we
165 # can safely output it.
166 if not unblocked:
167 if len(groups) > 1: # display other subset
168 targetidx = 1
169 gr = groups[1]
170 elif not gr[1] & unblocked:
171 gr = None
172
173 if gr is not None:
174 # update the set of awaited revisions with the one from the
175 # subgroup
176 unblocked |= gr[1]
177 # output all revisions in the subgroup
178 for r in gr[0]:
179 yield r
180 # delete the subgroup that you just output
181 # unless it is groups[0] in which case you just empty it.
182 if targetidx:
183 del groups[targetidx]
184 else:
185 gr[0][:] = []
186
25 def dagwalker(repo, revs):
187 def dagwalker(repo, revs):
26 """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
188 """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
27
189
28 This generator function walks through revisions (which should be ordered
190 This generator function walks through revisions (which should be ordered
29 from bigger to lower). It returns a tuple for each node. The node and parent
191 from bigger to lower). It returns a tuple for each node. The node and parent
30 ids are arbitrary integers which identify a node in the context of the graph
192 ids are arbitrary integers which identify a node in the context of the graph
31 returned.
193 returned.
32 """
194 """
33 if not revs:
195 if not revs:
34 return
196 return
35
197
36 cl = repo.changelog
198 cl = repo.changelog
37 lowestrev = revs.min()
199 lowestrev = revs.min()
38 gpcache = {}
200 gpcache = {}
39
201
40 for rev in revs:
202 for rev in revs:
41 ctx = repo[rev]
203 ctx = repo[rev]
42 parents = sorted(set([p.rev() for p in ctx.parents()
204 parents = sorted(set([p.rev() for p in ctx.parents()
43 if p.rev() in revs]))
205 if p.rev() in revs]))
44 mpars = [p.rev() for p in ctx.parents() if
206 mpars = [p.rev() for p in ctx.parents() if
45 p.rev() != nullrev and p.rev() not in parents]
207 p.rev() != nullrev and p.rev() not in parents]
46
208
47 for mpar in mpars:
209 for mpar in mpars:
48 gp = gpcache.get(mpar)
210 gp = gpcache.get(mpar)
49 if gp is None:
211 if gp is None:
50 gp = gpcache[mpar] = grandparent(cl, lowestrev, revs, mpar)
212 gp = gpcache[mpar] = grandparent(cl, lowestrev, revs, mpar)
51 if not gp:
213 if not gp:
52 parents.append(mpar)
214 parents.append(mpar)
53 else:
215 else:
54 parents.extend(g for g in gp if g not in parents)
216 parents.extend(g for g in gp if g not in parents)
55
217
56 yield (ctx.rev(), CHANGESET, ctx, parents)
218 yield (ctx.rev(), CHANGESET, ctx, parents)
57
219
58 def nodes(repo, nodes):
220 def nodes(repo, nodes):
59 """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
221 """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
60
222
61 This generator function walks the given nodes. It only returns parents
223 This generator function walks the given nodes. It only returns parents
62 that are in nodes, too.
224 that are in nodes, too.
63 """
225 """
64 include = set(nodes)
226 include = set(nodes)
65 for node in nodes:
227 for node in nodes:
66 ctx = repo[node]
228 ctx = repo[node]
67 parents = set([p.rev() for p in ctx.parents() if p.node() in include])
229 parents = set([p.rev() for p in ctx.parents() if p.node() in include])
68 yield (ctx.rev(), CHANGESET, ctx, sorted(parents))
230 yield (ctx.rev(), CHANGESET, ctx, sorted(parents))
69
231
70 def colored(dag, repo):
232 def colored(dag, repo):
71 """annotates a DAG with colored edge information
233 """annotates a DAG with colored edge information
72
234
73 For each DAG node this function emits tuples::
235 For each DAG node this function emits tuples::
74
236
75 (id, type, data, (col, color), [(col, nextcol, color)])
237 (id, type, data, (col, color), [(col, nextcol, color)])
76
238
77 with the following new elements:
239 with the following new elements:
78
240
79 - Tuple (col, color) with column and color index for the current node
241 - Tuple (col, color) with column and color index for the current node
80 - A list of tuples indicating the edges between the current node and its
242 - A list of tuples indicating the edges between the current node and its
81 parents.
243 parents.
82 """
244 """
83 seen = []
245 seen = []
84 colors = {}
246 colors = {}
85 newcolor = 1
247 newcolor = 1
86 config = {}
248 config = {}
87
249
88 for key, val in repo.ui.configitems('graph'):
250 for key, val in repo.ui.configitems('graph'):
89 if '.' in key:
251 if '.' in key:
90 branch, setting = key.rsplit('.', 1)
252 branch, setting = key.rsplit('.', 1)
91 # Validation
253 # Validation
92 if setting == "width" and val.isdigit():
254 if setting == "width" and val.isdigit():
93 config.setdefault(branch, {})[setting] = int(val)
255 config.setdefault(branch, {})[setting] = int(val)
94 elif setting == "color" and val.isalnum():
256 elif setting == "color" and val.isalnum():
95 config.setdefault(branch, {})[setting] = val
257 config.setdefault(branch, {})[setting] = val
96
258
97 if config:
259 if config:
98 getconf = util.lrucachefunc(
260 getconf = util.lrucachefunc(
99 lambda rev: config.get(repo[rev].branch(), {}))
261 lambda rev: config.get(repo[rev].branch(), {}))
100 else:
262 else:
101 getconf = lambda rev: {}
263 getconf = lambda rev: {}
102
264
103 for (cur, type, data, parents) in dag:
265 for (cur, type, data, parents) in dag:
104
266
105 # Compute seen and next
267 # Compute seen and next
106 if cur not in seen:
268 if cur not in seen:
107 seen.append(cur) # new head
269 seen.append(cur) # new head
108 colors[cur] = newcolor
270 colors[cur] = newcolor
109 newcolor += 1
271 newcolor += 1
110
272
111 col = seen.index(cur)
273 col = seen.index(cur)
112 color = colors.pop(cur)
274 color = colors.pop(cur)
113 next = seen[:]
275 next = seen[:]
114
276
115 # Add parents to next
277 # Add parents to next
116 addparents = [p for p in parents if p not in next]
278 addparents = [p for p in parents if p not in next]
117 next[col:col + 1] = addparents
279 next[col:col + 1] = addparents
118
280
119 # Set colors for the parents
281 # Set colors for the parents
120 for i, p in enumerate(addparents):
282 for i, p in enumerate(addparents):
121 if not i:
283 if not i:
122 colors[p] = color
284 colors[p] = color
123 else:
285 else:
124 colors[p] = newcolor
286 colors[p] = newcolor
125 newcolor += 1
287 newcolor += 1
126
288
127 # Add edges to the graph
289 # Add edges to the graph
128 edges = []
290 edges = []
129 for ecol, eid in enumerate(seen):
291 for ecol, eid in enumerate(seen):
130 if eid in next:
292 if eid in next:
131 bconf = getconf(eid)
293 bconf = getconf(eid)
132 edges.append((
294 edges.append((
133 ecol, next.index(eid), colors[eid],
295 ecol, next.index(eid), colors[eid],
134 bconf.get('width', -1),
296 bconf.get('width', -1),
135 bconf.get('color', '')))
297 bconf.get('color', '')))
136 elif eid == cur:
298 elif eid == cur:
137 for p in parents:
299 for p in parents:
138 bconf = getconf(p)
300 bconf = getconf(p)
139 edges.append((
301 edges.append((
140 ecol, next.index(p), color,
302 ecol, next.index(p), color,
141 bconf.get('width', -1),
303 bconf.get('width', -1),
142 bconf.get('color', '')))
304 bconf.get('color', '')))
143
305
144 # Yield and move on
306 # Yield and move on
145 yield (cur, type, data, (col, color), edges)
307 yield (cur, type, data, (col, color), edges)
146 seen = next
308 seen = next
147
309
148 def grandparent(cl, lowestrev, roots, head):
310 def grandparent(cl, lowestrev, roots, head):
149 """Return all ancestors of head in roots which revision is
311 """Return all ancestors of head in roots which revision is
150 greater or equal to lowestrev.
312 greater or equal to lowestrev.
151 """
313 """
152 pending = set([head])
314 pending = set([head])
153 seen = set()
315 seen = set()
154 kept = set()
316 kept = set()
155 llowestrev = max(nullrev, lowestrev)
317 llowestrev = max(nullrev, lowestrev)
156 while pending:
318 while pending:
157 r = pending.pop()
319 r = pending.pop()
158 if r >= llowestrev and r not in seen:
320 if r >= llowestrev and r not in seen:
159 if r in roots:
321 if r in roots:
160 kept.add(r)
322 kept.add(r)
161 else:
323 else:
162 pending.update([p for p in cl.parentrevs(r)])
324 pending.update([p for p in cl.parentrevs(r)])
163 seen.add(r)
325 seen.add(r)
164 return sorted(kept)
326 return sorted(kept)
165
327
166 def asciiedges(type, char, lines, seen, rev, parents):
328 def asciiedges(type, char, lines, seen, rev, parents):
167 """adds edge info to changelog DAG walk suitable for ascii()"""
329 """adds edge info to changelog DAG walk suitable for ascii()"""
168 if rev not in seen:
330 if rev not in seen:
169 seen.append(rev)
331 seen.append(rev)
170 nodeidx = seen.index(rev)
332 nodeidx = seen.index(rev)
171
333
172 knownparents = []
334 knownparents = []
173 newparents = []
335 newparents = []
174 for parent in parents:
336 for parent in parents:
175 if parent in seen:
337 if parent in seen:
176 knownparents.append(parent)
338 knownparents.append(parent)
177 else:
339 else:
178 newparents.append(parent)
340 newparents.append(parent)
179
341
180 ncols = len(seen)
342 ncols = len(seen)
181 nextseen = seen[:]
343 nextseen = seen[:]
182 nextseen[nodeidx:nodeidx + 1] = newparents
344 nextseen[nodeidx:nodeidx + 1] = newparents
183 edges = [(nodeidx, nextseen.index(p)) for p in knownparents if p != nullrev]
345 edges = [(nodeidx, nextseen.index(p)) for p in knownparents if p != nullrev]
184
346
185 while len(newparents) > 2:
347 while len(newparents) > 2:
186 # ascii() only knows how to add or remove a single column between two
348 # ascii() only knows how to add or remove a single column between two
187 # calls. Nodes with more than two parents break this constraint so we
349 # calls. Nodes with more than two parents break this constraint so we
188 # introduce intermediate expansion lines to grow the active node list
350 # introduce intermediate expansion lines to grow the active node list
189 # slowly.
351 # slowly.
190 edges.append((nodeidx, nodeidx))
352 edges.append((nodeidx, nodeidx))
191 edges.append((nodeidx, nodeidx + 1))
353 edges.append((nodeidx, nodeidx + 1))
192 nmorecols = 1
354 nmorecols = 1
193 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
355 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
194 char = '\\'
356 char = '\\'
195 lines = []
357 lines = []
196 nodeidx += 1
358 nodeidx += 1
197 ncols += 1
359 ncols += 1
198 edges = []
360 edges = []
199 del newparents[0]
361 del newparents[0]
200
362
201 if len(newparents) > 0:
363 if len(newparents) > 0:
202 edges.append((nodeidx, nodeidx))
364 edges.append((nodeidx, nodeidx))
203 if len(newparents) > 1:
365 if len(newparents) > 1:
204 edges.append((nodeidx, nodeidx + 1))
366 edges.append((nodeidx, nodeidx + 1))
205 nmorecols = len(nextseen) - ncols
367 nmorecols = len(nextseen) - ncols
206 seen[:] = nextseen
368 seen[:] = nextseen
207 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
369 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
208
370
209 def _fixlongrightedges(edges):
371 def _fixlongrightedges(edges):
210 for (i, (start, end)) in enumerate(edges):
372 for (i, (start, end)) in enumerate(edges):
211 if end > start:
373 if end > start:
212 edges[i] = (start, end + 1)
374 edges[i] = (start, end + 1)
213
375
214 def _getnodelineedgestail(
376 def _getnodelineedgestail(
215 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
377 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
216 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
378 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
217 # Still going in the same non-vertical direction.
379 # Still going in the same non-vertical direction.
218 if n_columns_diff == -1:
380 if n_columns_diff == -1:
219 start = max(node_index + 1, p_node_index)
381 start = max(node_index + 1, p_node_index)
220 tail = ["|", " "] * (start - node_index - 1)
382 tail = ["|", " "] * (start - node_index - 1)
221 tail.extend(["/", " "] * (n_columns - start))
383 tail.extend(["/", " "] * (n_columns - start))
222 return tail
384 return tail
223 else:
385 else:
224 return ["\\", " "] * (n_columns - node_index - 1)
386 return ["\\", " "] * (n_columns - node_index - 1)
225 else:
387 else:
226 return ["|", " "] * (n_columns - node_index - 1)
388 return ["|", " "] * (n_columns - node_index - 1)
227
389
228 def _drawedges(edges, nodeline, interline):
390 def _drawedges(edges, nodeline, interline):
229 for (start, end) in edges:
391 for (start, end) in edges:
230 if start == end + 1:
392 if start == end + 1:
231 interline[2 * end + 1] = "/"
393 interline[2 * end + 1] = "/"
232 elif start == end - 1:
394 elif start == end - 1:
233 interline[2 * start + 1] = "\\"
395 interline[2 * start + 1] = "\\"
234 elif start == end:
396 elif start == end:
235 interline[2 * start] = "|"
397 interline[2 * start] = "|"
236 else:
398 else:
237 if 2 * end >= len(nodeline):
399 if 2 * end >= len(nodeline):
238 continue
400 continue
239 nodeline[2 * end] = "+"
401 nodeline[2 * end] = "+"
240 if start > end:
402 if start > end:
241 (start, end) = (end, start)
403 (start, end) = (end, start)
242 for i in range(2 * start + 1, 2 * end):
404 for i in range(2 * start + 1, 2 * end):
243 if nodeline[i] != "+":
405 if nodeline[i] != "+":
244 nodeline[i] = "-"
406 nodeline[i] = "-"
245
407
246 def _getpaddingline(ni, n_columns, edges):
408 def _getpaddingline(ni, n_columns, edges):
247 line = []
409 line = []
248 line.extend(["|", " "] * ni)
410 line.extend(["|", " "] * ni)
249 if (ni, ni - 1) in edges or (ni, ni) in edges:
411 if (ni, ni - 1) in edges or (ni, ni) in edges:
250 # (ni, ni - 1) (ni, ni)
412 # (ni, ni - 1) (ni, ni)
251 # | | | | | | | |
413 # | | | | | | | |
252 # +---o | | o---+
414 # +---o | | o---+
253 # | | c | | c | |
415 # | | c | | c | |
254 # | |/ / | |/ /
416 # | |/ / | |/ /
255 # | | | | | |
417 # | | | | | |
256 c = "|"
418 c = "|"
257 else:
419 else:
258 c = " "
420 c = " "
259 line.extend([c, " "])
421 line.extend([c, " "])
260 line.extend(["|", " "] * (n_columns - ni - 1))
422 line.extend(["|", " "] * (n_columns - ni - 1))
261 return line
423 return line
262
424
263 def asciistate():
425 def asciistate():
264 """returns the initial value for the "state" argument to ascii()"""
426 """returns the initial value for the "state" argument to ascii()"""
265 return [0, 0]
427 return [0, 0]
266
428
267 def ascii(ui, state, type, char, text, coldata):
429 def ascii(ui, state, type, char, text, coldata):
268 """prints an ASCII graph of the DAG
430 """prints an ASCII graph of the DAG
269
431
270 takes the following arguments (one call per node in the graph):
432 takes the following arguments (one call per node in the graph):
271
433
272 - ui to write to
434 - ui to write to
273 - Somewhere to keep the needed state in (init to asciistate())
435 - Somewhere to keep the needed state in (init to asciistate())
274 - Column of the current node in the set of ongoing edges.
436 - Column of the current node in the set of ongoing edges.
275 - Type indicator of node data, usually 'C' for changesets.
437 - Type indicator of node data, usually 'C' for changesets.
276 - Payload: (char, lines):
438 - Payload: (char, lines):
277 - Character to use as node's symbol.
439 - Character to use as node's symbol.
278 - List of lines to display as the node's text.
440 - List of lines to display as the node's text.
279 - Edges; a list of (col, next_col) indicating the edges between
441 - Edges; a list of (col, next_col) indicating the edges between
280 the current node and its parents.
442 the current node and its parents.
281 - Number of columns (ongoing edges) in the current revision.
443 - Number of columns (ongoing edges) in the current revision.
282 - The difference between the number of columns (ongoing edges)
444 - The difference between the number of columns (ongoing edges)
283 in the next revision and the number of columns (ongoing edges)
445 in the next revision and the number of columns (ongoing edges)
284 in the current revision. That is: -1 means one column removed;
446 in the current revision. That is: -1 means one column removed;
285 0 means no columns added or removed; 1 means one column added.
447 0 means no columns added or removed; 1 means one column added.
286 """
448 """
287
449
288 idx, edges, ncols, coldiff = coldata
450 idx, edges, ncols, coldiff = coldata
289 assert -2 < coldiff < 2
451 assert -2 < coldiff < 2
290 if coldiff == -1:
452 if coldiff == -1:
291 # Transform
453 # Transform
292 #
454 #
293 # | | | | | |
455 # | | | | | |
294 # o | | into o---+
456 # o | | into o---+
295 # |X / |/ /
457 # |X / |/ /
296 # | | | |
458 # | | | |
297 _fixlongrightedges(edges)
459 _fixlongrightedges(edges)
298
460
299 # add_padding_line says whether to rewrite
461 # add_padding_line says whether to rewrite
300 #
462 #
301 # | | | | | | | |
463 # | | | | | | | |
302 # | o---+ into | o---+
464 # | o---+ into | o---+
303 # | / / | | | # <--- padding line
465 # | / / | | | # <--- padding line
304 # o | | | / /
466 # o | | | / /
305 # o | |
467 # o | |
306 add_padding_line = (len(text) > 2 and coldiff == -1 and
468 add_padding_line = (len(text) > 2 and coldiff == -1 and
307 [x for (x, y) in edges if x + 1 < y])
469 [x for (x, y) in edges if x + 1 < y])
308
470
309 # fix_nodeline_tail says whether to rewrite
471 # fix_nodeline_tail says whether to rewrite
310 #
472 #
311 # | | o | | | | o | |
473 # | | o | | | | o | |
312 # | | |/ / | | |/ /
474 # | | |/ / | | |/ /
313 # | o | | into | o / / # <--- fixed nodeline tail
475 # | o | | into | o / / # <--- fixed nodeline tail
314 # | |/ / | |/ /
476 # | |/ / | |/ /
315 # o | | o | |
477 # o | | o | |
316 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
478 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
317
479
318 # nodeline is the line containing the node character (typically o)
480 # nodeline is the line containing the node character (typically o)
319 nodeline = ["|", " "] * idx
481 nodeline = ["|", " "] * idx
320 nodeline.extend([char, " "])
482 nodeline.extend([char, " "])
321
483
322 nodeline.extend(
484 nodeline.extend(
323 _getnodelineedgestail(idx, state[1], ncols, coldiff,
485 _getnodelineedgestail(idx, state[1], ncols, coldiff,
324 state[0], fix_nodeline_tail))
486 state[0], fix_nodeline_tail))
325
487
326 # shift_interline is the line containing the non-vertical
488 # shift_interline is the line containing the non-vertical
327 # edges between this entry and the next
489 # edges between this entry and the next
328 shift_interline = ["|", " "] * idx
490 shift_interline = ["|", " "] * idx
329 if coldiff == -1:
491 if coldiff == -1:
330 n_spaces = 1
492 n_spaces = 1
331 edge_ch = "/"
493 edge_ch = "/"
332 elif coldiff == 0:
494 elif coldiff == 0:
333 n_spaces = 2
495 n_spaces = 2
334 edge_ch = "|"
496 edge_ch = "|"
335 else:
497 else:
336 n_spaces = 3
498 n_spaces = 3
337 edge_ch = "\\"
499 edge_ch = "\\"
338 shift_interline.extend(n_spaces * [" "])
500 shift_interline.extend(n_spaces * [" "])
339 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
501 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
340
502
341 # draw edges from the current node to its parents
503 # draw edges from the current node to its parents
342 _drawedges(edges, nodeline, shift_interline)
504 _drawedges(edges, nodeline, shift_interline)
343
505
344 # lines is the list of all graph lines to print
506 # lines is the list of all graph lines to print
345 lines = [nodeline]
507 lines = [nodeline]
346 if add_padding_line:
508 if add_padding_line:
347 lines.append(_getpaddingline(idx, ncols, edges))
509 lines.append(_getpaddingline(idx, ncols, edges))
348 lines.append(shift_interline)
510 lines.append(shift_interline)
349
511
350 # make sure that there are as many graph lines as there are
512 # make sure that there are as many graph lines as there are
351 # log strings
513 # log strings
352 while len(text) < len(lines):
514 while len(text) < len(lines):
353 text.append("")
515 text.append("")
354 if len(lines) < len(text):
516 if len(lines) < len(text):
355 extra_interline = ["|", " "] * (ncols + coldiff)
517 extra_interline = ["|", " "] * (ncols + coldiff)
356 while len(lines) < len(text):
518 while len(lines) < len(text):
357 lines.append(extra_interline)
519 lines.append(extra_interline)
358
520
359 # print lines
521 # print lines
360 indentation_level = max(ncols, ncols + coldiff)
522 indentation_level = max(ncols, ncols + coldiff)
361 for (line, logstr) in zip(lines, text):
523 for (line, logstr) in zip(lines, text):
362 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
524 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
363 ui.write(ln.rstrip() + '\n')
525 ui.write(ln.rstrip() + '\n')
364
526
365 # ... and start over
527 # ... and start over
366 state[0] = coldiff
528 state[0] = coldiff
367 state[1] = idx
529 state[1] = idx
General Comments 0
You need to be logged in to leave comments. Login now