graphmod.py
561 lines
| 19.9 KiB
| text/x-python
|
PythonLexer
/ mercurial / graphmod.py
Dirkjan Ochtman
|
r6691 | # Revision graph generator for Mercurial | ||
# | ||||
# Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl> | ||||
# Copyright 2007 Joel Rosdahl <joel@rosdahl.net> | ||||
# | ||||
Martin Geisler
|
r8225 | # This software may be used and distributed according to the terms of the | ||
Matt Mackall
|
r10263 | # GNU General Public License version 2 or any later version. | ||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8840 | """supports walking the history as DAGs suitable for graphical output | ||
The most basic format we use is that of:: | ||||
(id, type, data, [parentids]) | ||||
The node and parent ids are arbitrary integers which identify a node in the | ||||
context of the graph returned. Type is a constant specifying the node type. | ||||
Data depends on type. | ||||
""" | ||||
Gregory Szorc
|
r25951 | from __future__ import absolute_import | ||
Peter Arrenbrecht
|
r8840 | |||
Pierre-Yves David
|
r23567 | import heapq | ||
Gregory Szorc
|
r25951 | from .node import nullrev | ||
Laurent Charignon
|
r26003 | from . import ( | ||
revset, | ||||
util, | ||||
) | ||||
Gregory Szorc
|
r25951 | |||
Peter Arrenbrecht
|
r8840 | CHANGESET = 'C' | ||
Dirkjan Ochtman
|
r6691 | |||
Pierre-Yves David
|
r23568 | def groupbranchiter(revs, parentsfunc, firstbranch=()): | ||
Augie Fackler
|
r23570 | """Yield revisions from heads to roots one (topo) branch at a time. | ||
Pierre-Yves David
|
r23564 | |||
This function aims to be used by a graph generator that wishes to minimize | ||||
Augie Fackler
|
r23570 | the number of parallel branches and their interleaving. | ||
Pierre-Yves David
|
r23564 | |||
Augie Fackler
|
r23570 | Example iteration order (numbers show the "true" order in a changelog): | ||
Pierre-Yves David
|
r23564 | |||
o 4 | ||||
| | ||||
o 1 | ||||
| | ||||
| o 3 | ||||
| | | ||||
| o 2 | ||||
|/ | ||||
o 0 | ||||
Augie Fackler
|
r23570 | Note that the ancestors of merges are understood by the current | ||
algorithm to be on the same branch. This means no reordering will | ||||
occur behind a merge. | ||||
Pierre-Yves David
|
r23568 | """ | ||
Pierre-Yves David
|
r23564 | |||
### Quick summary of the algorithm | ||||
# | ||||
# This function is based around a "retention" principle. We keep revisions | ||||
# in memory until we are ready to emit a whole branch that immediately | ||||
Augie Fackler
|
r23570 | # "merges" into an existing one. This reduces the number of parallel | ||
# branches with interleaved revisions. | ||||
Pierre-Yves David
|
r23564 | # | ||
# During iteration revs are split into two groups: | ||||
# A) revision already emitted | ||||
# B) revision in "retention". They are stored as different subgroups. | ||||
# | ||||
Augie Fackler
|
r23570 | # for each REV, we do the following logic: | ||
Pierre-Yves David
|
r23564 | # | ||
Augie Fackler
|
r23570 | # 1) if REV is a parent of (A), we will emit it. If there is a | ||
# retention group ((B) above) that is blocked on REV being | ||||
# available, we emit all the revisions out of that retention | ||||
# group first. | ||||
Pierre-Yves David
|
r23564 | # | ||
Augie Fackler
|
r23570 | # 2) else, we'll search for a subgroup in (B) awaiting for REV to be | ||
Pierre-Yves David
|
r23564 | # available, if such subgroup exist, we add REV to it and the subgroup is | ||
# now awaiting for REV.parents() to be available. | ||||
# | ||||
Augie Fackler
|
r23570 | # 3) finally if no such group existed in (B), we create a new subgroup. | ||
Pierre-Yves David
|
r23564 | # | ||
# | ||||
Augie Fackler
|
r23570 | # To bootstrap the algorithm, we emit the tipmost revision (which | ||
# puts it in group (A) from above). | ||||
Pierre-Yves David
|
r23564 | |||
revs.sort(reverse=True) | ||||
Augie Fackler
|
r23570 | # Set of parents of revision that have been emitted. They can be considered | ||
Pierre-Yves David
|
r23564 | # unblocked as the graph generator is already aware of them so there is no | ||
Augie Fackler
|
r23570 | # need to delay the revisions that reference them. | ||
Pierre-Yves David
|
r23568 | # | ||
# If someone wants to prioritize a branch over the others, pre-filling this | ||||
# set will force all other branches to wait until this branch is ready to be | ||||
Augie Fackler
|
r23570 | # emitted. | ||
Pierre-Yves David
|
r23568 | unblocked = set(firstbranch) | ||
Pierre-Yves David
|
r23564 | |||
Augie Fackler
|
r23570 | # list of groups waiting to be displayed, each group is defined by: | ||
Pierre-Yves David
|
r23564 | # | ||
# (revs: lists of revs waiting to be displayed, | ||||
# blocked: set of that cannot be displayed before those in 'revs') | ||||
# | ||||
# The second value ('blocked') correspond to parents of any revision in the | ||||
# group ('revs') that is not itself contained in the group. The main idea | ||||
# of this algorithm is to delay as much as possible the emission of any | ||||
# revision. This means waiting for the moment we are about to display | ||||
Augie Fackler
|
r23570 | # these parents to display the revs in a group. | ||
Pierre-Yves David
|
r23564 | # | ||
Augie Fackler
|
r23570 | # This first implementation is smart until it encounters a merge: it will | ||
# emit revs as soon as any parent is about to be emitted and can grow an | ||||
Pierre-Yves David
|
r23564 | # arbitrary number of revs in 'blocked'. In practice this mean we properly | ||
Augie Fackler
|
r23570 | # retains new branches but gives up on any special ordering for ancestors | ||
# of merges. The implementation can be improved to handle this better. | ||||
Pierre-Yves David
|
r23564 | # | ||
Augie Fackler
|
r23570 | # The first subgroup is special. It corresponds to all the revision that | ||
Pierre-Yves David
|
r23564 | # were already emitted. The 'revs' lists is expected to be empty and the | ||
# 'blocked' set contains the parents revisions of already emitted revision. | ||||
# | ||||
# You could pre-seed the <parents> set of groups[0] to a specific | ||||
# changesets to select what the first emitted branch should be. | ||||
groups = [([], unblocked)] | ||||
Pierre-Yves David
|
r23567 | pendingheap = [] | ||
pendingset = set() | ||||
heapq.heapify(pendingheap) | ||||
heappop = heapq.heappop | ||||
heappush = heapq.heappush | ||||
for currentrev in revs: | ||||
# Heap works with smallest element, we want highest so we invert | ||||
if currentrev not in pendingset: | ||||
heappush(pendingheap, -currentrev) | ||||
pendingset.add(currentrev) | ||||
# iterates on pending rev until after the current rev have been | ||||
Mads Kiilerich
|
r24180 | # processed. | ||
Pierre-Yves David
|
r23567 | rev = None | ||
while rev != currentrev: | ||||
rev = -heappop(pendingheap) | ||||
pendingset.remove(rev) | ||||
Pierre-Yves David
|
r23566 | # Seek for a subgroup blocked, waiting for the current revision. | ||
Pierre-Yves David
|
r23567 | matching = [i for i, g in enumerate(groups) if rev in g[1]] | ||
Pierre-Yves David
|
r23564 | |||
Pierre-Yves David
|
r23566 | if matching: | ||
Augie Fackler
|
r23570 | # The main idea is to gather together all sets that are blocked | ||
# on the same revision. | ||||
# | ||||
# Groups are merged when a common blocking ancestor is | ||||
# observed. For example, given two groups: | ||||
Pierre-Yves David
|
r23566 | # | ||
Augie Fackler
|
r23570 | # revs [5, 4] waiting for 1 | ||
# revs [3, 2] waiting for 1 | ||||
# | ||||
# These two groups will be merged when we process | ||||
# 1. In theory, we could have merged the groups when | ||||
# we added 2 to the group it is now in (we could have | ||||
# noticed the groups were both blocked on 1 then), but | ||||
# the way it works now makes the algorithm simpler. | ||||
Pierre-Yves David
|
r23566 | # | ||
# We also always keep the oldest subgroup first. We can | ||||
Augie Fackler
|
r23570 | # probably improve the behavior by having the longest set | ||
# first. That way, graph algorithms could minimise the length | ||||
# of parallel lines their drawing. This is currently not done. | ||||
Pierre-Yves David
|
r23566 | targetidx = matching.pop(0) | ||
trevs, tparents = groups[targetidx] | ||||
for i in matching: | ||||
gr = groups[i] | ||||
trevs.extend(gr[0]) | ||||
tparents |= gr[1] | ||||
Augie Fackler
|
r23570 | # delete all merged subgroups (except the one we kept) | ||
# (starting from the last subgroup for performance and | ||||
# sanity reasons) | ||||
Pierre-Yves David
|
r23566 | for i in reversed(matching): | ||
del groups[i] | ||||
else: | ||||
# This is a new head. We create a new subgroup for it. | ||||
targetidx = len(groups) | ||||
Pierre-Yves David
|
r23567 | groups.append(([], set([rev]))) | ||
Pierre-Yves David
|
r23564 | |||
Pierre-Yves David
|
r23566 | gr = groups[targetidx] | ||
Pierre-Yves David
|
r23564 | |||
Augie Fackler
|
r23570 | # We now add the current nodes to this subgroups. This is done | ||
Pierre-Yves David
|
r23566 | # after the subgroup merging because all elements from a subgroup | ||
Augie Fackler
|
r23570 | # that relied on this rev must precede it. | ||
Pierre-Yves David
|
r23566 | # | ||
Augie Fackler
|
r23570 | # we also update the <parents> set to include the parents of the | ||
Pierre-Yves David
|
r23566 | # new nodes. | ||
Pierre-Yves David
|
r23567 | if rev == currentrev: # only display stuff in rev | ||
gr[0].append(rev) | ||||
gr[1].remove(rev) | ||||
parents = [p for p in parentsfunc(rev) if p > nullrev] | ||||
gr[1].update(parents) | ||||
for p in parents: | ||||
if p not in pendingset: | ||||
pendingset.add(p) | ||||
heappush(pendingheap, -p) | ||||
Pierre-Yves David
|
r23564 | |||
Pierre-Yves David
|
r23566 | # Look for a subgroup to display | ||
# | ||||
Augie Fackler
|
r23570 | # When unblocked is empty (if clause), we were not waiting for any | ||
# revisions during the first iteration (if no priority was given) or | ||||
# if we emitted a whole disconnected set of the graph (reached a | ||||
# root). In that case we arbitrarily take the oldest known | ||||
# subgroup. The heuristic could probably be better. | ||||
Pierre-Yves David
|
r23566 | # | ||
Augie Fackler
|
r23570 | # Otherwise (elif clause) if the subgroup is blocked on | ||
# a revision we just emitted, we can safely emit it as | ||||
# well. | ||||
Pierre-Yves David
|
r23566 | if not unblocked: | ||
if len(groups) > 1: # display other subset | ||||
targetidx = 1 | ||||
gr = groups[1] | ||||
elif not gr[1] & unblocked: | ||||
gr = None | ||||
Pierre-Yves David
|
r23564 | |||
Pierre-Yves David
|
r23566 | if gr is not None: | ||
# update the set of awaited revisions with the one from the | ||||
# subgroup | ||||
unblocked |= gr[1] | ||||
# output all revisions in the subgroup | ||||
for r in gr[0]: | ||||
yield r | ||||
# delete the subgroup that you just output | ||||
# unless it is groups[0] in which case you just empty it. | ||||
if targetidx: | ||||
del groups[targetidx] | ||||
else: | ||||
gr[0][:] = [] | ||||
Augie Fackler
|
r23570 | # Check if we have some subgroup waiting for revisions we are not going to | ||
Pierre-Yves David
|
r23567 | # iterate over | ||
for g in groups: | ||||
for r in g[0]: | ||||
yield r | ||||
Pierre-Yves David
|
r23564 | |||
Alexander Solovyov
|
r14042 | def dagwalker(repo, revs): | ||
"""cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples | ||||
This generator function walks through revisions (which should be ordered | ||||
from bigger to lower). It returns a tuple for each node. The node and parent | ||||
ids are arbitrary integers which identify a node in the context of the graph | ||||
returned. | ||||
Peter Arrenbrecht
|
r8836 | """ | ||
Alexander Solovyov
|
r14042 | if not revs: | ||
Idan Kamara
|
r14087 | return | ||
Alexander Solovyov
|
r14042 | |||
gpcache = {} | ||||
Augie Fackler
|
r23569 | if repo.ui.configbool('experimental', 'graph-group-branches', False): | ||
Pierre-Yves David
|
r23568 | firstbranch = () | ||
Augie Fackler
|
r23569 | firstbranchrevset = repo.ui.config( | ||
'experimental', 'graph-group-branches.firstbranch', '') | ||||
Pierre-Yves David
|
r23568 | if firstbranchrevset: | ||
firstbranch = repo.revs(firstbranchrevset) | ||||
parentrevs = repo.changelog.parentrevs | ||||
revs = list(groupbranchiter(revs, parentrevs, firstbranch)) | ||||
Pierre-Yves David
|
r23565 | |||
Idan Kamara
|
r14087 | for rev in revs: | ||
ctx = repo[rev] | ||||
Patrick Mezard
|
r14088 | parents = sorted(set([p.rev() for p in ctx.parents() | ||
Pierre-Yves David
|
r23006 | if p.rev() in revs])) | ||
Alexander Solovyov
|
r14042 | mpars = [p.rev() for p in ctx.parents() if | ||
p.rev() != nullrev and p.rev() not in parents] | ||||
for mpar in mpars: | ||||
Patrick Mezard
|
r14131 | gp = gpcache.get(mpar) | ||
Alexander Solovyov
|
r14042 | if gp is None: | ||
Laurent Charignon
|
r26003 | gp = gpcache[mpar] = revset.reachableroots(repo, revs, [mpar]) | ||
Patrick Mezard
|
r14131 | if not gp: | ||
Idan Kamara
|
r14087 | parents.append(mpar) | ||
Patrick Mezard
|
r14131 | else: | ||
parents.extend(g for g in gp if g not in parents) | ||||
Alexander Solovyov
|
r14042 | |||
Idan Kamara
|
r14087 | yield (ctx.rev(), CHANGESET, ctx, parents) | ||
Peter Arrenbrecht
|
r8836 | |||
Peter Arrenbrecht
|
r8837 | def nodes(repo, nodes): | ||
Peter Arrenbrecht
|
r8840 | """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples | ||
This generator function walks the given nodes. It only returns parents | ||||
that are in nodes, too. | ||||
""" | ||||
Peter Arrenbrecht
|
r8837 | include = set(nodes) | ||
for node in nodes: | ||||
ctx = repo[node] | ||||
Nicolas Dumazet
|
r12951 | parents = set([p.rev() for p in ctx.parents() if p.node() in include]) | ||
Peter Arrenbrecht
|
r8840 | yield (ctx.rev(), CHANGESET, ctx, sorted(parents)) | ||
Peter Arrenbrecht
|
r8837 | |||
Constantine Linnick
|
r16129 | def colored(dag, repo): | ||
Peter Arrenbrecht
|
r8842 | """annotates a DAG with colored edge information | ||
For each DAG node this function emits tuples:: | ||||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8842 | (id, type, data, (col, color), [(col, nextcol, color)]) | ||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8842 | with the following new elements: | ||
Peter Arrenbrecht
|
r8835 | - Tuple (col, color) with column and color index for the current node | ||
Peter Arrenbrecht
|
r8842 | - A list of tuples indicating the edges between the current node and its | ||
parents. | ||||
Dirkjan Ochtman
|
r6691 | """ | ||
Peter Arrenbrecht
|
r8841 | seen = [] | ||
Dirkjan Ochtman
|
r6691 | colors = {} | ||
Peter Arrenbrecht
|
r8841 | newcolor = 1 | ||
Constantine Linnick
|
r16129 | config = {} | ||
for key, val in repo.ui.configitems('graph'): | ||||
Matt Mackall
|
r16131 | if '.' in key: | ||
branch, setting = key.rsplit('.', 1) | ||||
# Validation | ||||
if setting == "width" and val.isdigit(): | ||||
Patrick Mezard
|
r16138 | config.setdefault(branch, {})[setting] = int(val) | ||
Matt Mackall
|
r16131 | elif setting == "color" and val.isalnum(): | ||
config.setdefault(branch, {})[setting] = val | ||||
Constantine Linnick
|
r16129 | |||
Matt Mackall
|
r16132 | if config: | ||
Patrick Mezard
|
r16138 | getconf = util.lrucachefunc( | ||
lambda rev: config.get(repo[rev].branch(), {})) | ||||
Matt Mackall
|
r16132 | else: | ||
Patrick Mezard
|
r16138 | getconf = lambda rev: {} | ||
Constantine Linnick
|
r16129 | |||
Peter Arrenbrecht
|
r8842 | for (cur, type, data, parents) in dag: | ||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8841 | # Compute seen and next | ||
if cur not in seen: | ||||
seen.append(cur) # new head | ||||
colors[cur] = newcolor | ||||
newcolor += 1 | ||||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8841 | col = seen.index(cur) | ||
color = colors.pop(cur) | ||||
next = seen[:] | ||||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8842 | # Add parents to next | ||
Dirkjan Ochtman
|
r6691 | addparents = [p for p in parents if p not in next] | ||
Peter Arrenbrecht
|
r8841 | next[col:col + 1] = addparents | ||
Dirkjan Ochtman
|
r6691 | |||
# Set colors for the parents | ||||
for i, p in enumerate(addparents): | ||||
if not i: | ||||
colors[p] = color | ||||
else: | ||||
Peter Arrenbrecht
|
r8841 | colors[p] = newcolor | ||
newcolor += 1 | ||||
Dirkjan Ochtman
|
r6691 | |||
# Add edges to the graph | ||||
edges = [] | ||||
Peter Arrenbrecht
|
r8841 | for ecol, eid in enumerate(seen): | ||
if eid in next: | ||||
Patrick Mezard
|
r16138 | bconf = getconf(eid) | ||
Constantine Linnick
|
r16129 | edges.append(( | ||
ecol, next.index(eid), colors[eid], | ||||
Patrick Mezard
|
r16138 | bconf.get('width', -1), | ||
bconf.get('color', ''))) | ||||
Peter Arrenbrecht
|
r8842 | elif eid == cur: | ||
Dirkjan Ochtman
|
r6691 | for p in parents: | ||
Patrick Mezard
|
r16138 | bconf = getconf(p) | ||
Constantine Linnick
|
r16129 | edges.append(( | ||
ecol, next.index(p), color, | ||||
Patrick Mezard
|
r16138 | bconf.get('width', -1), | ||
bconf.get('color', ''))) | ||||
Dirkjan Ochtman
|
r6691 | |||
# Yield and move on | ||||
Peter Arrenbrecht
|
r8842 | yield (cur, type, data, (col, color), edges) | ||
Peter Arrenbrecht
|
r8841 | seen = next | ||
Alexander Solovyov
|
r14042 | |||
Patrick Mezard
|
r17179 | def asciiedges(type, char, lines, seen, rev, parents): | ||
"""adds edge info to changelog DAG walk suitable for ascii()""" | ||||
if rev not in seen: | ||||
seen.append(rev) | ||||
nodeidx = seen.index(rev) | ||||
knownparents = [] | ||||
newparents = [] | ||||
for parent in parents: | ||||
if parent in seen: | ||||
knownparents.append(parent) | ||||
else: | ||||
newparents.append(parent) | ||||
ncols = len(seen) | ||||
nextseen = seen[:] | ||||
nextseen[nodeidx:nodeidx + 1] = newparents | ||||
Bryan O'Sullivan
|
r18467 | edges = [(nodeidx, nextseen.index(p)) for p in knownparents if p != nullrev] | ||
Patrick Mezard
|
r17179 | |||
while len(newparents) > 2: | ||||
# ascii() only knows how to add or remove a single column between two | ||||
# calls. Nodes with more than two parents break this constraint so we | ||||
# introduce intermediate expansion lines to grow the active node list | ||||
# slowly. | ||||
edges.append((nodeidx, nodeidx)) | ||||
edges.append((nodeidx, nodeidx + 1)) | ||||
nmorecols = 1 | ||||
yield (type, char, lines, (nodeidx, edges, ncols, nmorecols)) | ||||
char = '\\' | ||||
lines = [] | ||||
nodeidx += 1 | ||||
ncols += 1 | ||||
edges = [] | ||||
del newparents[0] | ||||
if len(newparents) > 0: | ||||
edges.append((nodeidx, nodeidx)) | ||||
if len(newparents) > 1: | ||||
edges.append((nodeidx, nodeidx + 1)) | ||||
nmorecols = len(nextseen) - ncols | ||||
seen[:] = nextseen | ||||
yield (type, char, lines, (nodeidx, edges, ncols, nmorecols)) | ||||
def _fixlongrightedges(edges): | ||||
for (i, (start, end)) in enumerate(edges): | ||||
if end > start: | ||||
edges[i] = (start, end + 1) | ||||
def _getnodelineedgestail( | ||||
node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail): | ||||
if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0: | ||||
# Still going in the same non-vertical direction. | ||||
if n_columns_diff == -1: | ||||
start = max(node_index + 1, p_node_index) | ||||
tail = ["|", " "] * (start - node_index - 1) | ||||
tail.extend(["/", " "] * (n_columns - start)) | ||||
return tail | ||||
else: | ||||
return ["\\", " "] * (n_columns - node_index - 1) | ||||
else: | ||||
return ["|", " "] * (n_columns - node_index - 1) | ||||
def _drawedges(edges, nodeline, interline): | ||||
for (start, end) in edges: | ||||
if start == end + 1: | ||||
interline[2 * end + 1] = "/" | ||||
elif start == end - 1: | ||||
interline[2 * start + 1] = "\\" | ||||
elif start == end: | ||||
interline[2 * start] = "|" | ||||
else: | ||||
if 2 * end >= len(nodeline): | ||||
continue | ||||
nodeline[2 * end] = "+" | ||||
if start > end: | ||||
(start, end) = (end, start) | ||||
for i in range(2 * start + 1, 2 * end): | ||||
if nodeline[i] != "+": | ||||
nodeline[i] = "-" | ||||
def _getpaddingline(ni, n_columns, edges): | ||||
line = [] | ||||
line.extend(["|", " "] * ni) | ||||
if (ni, ni - 1) in edges or (ni, ni) in edges: | ||||
# (ni, ni - 1) (ni, ni) | ||||
# | | | | | | | | | ||||
# +---o | | o---+ | ||||
# | | c | | c | | | ||||
# | |/ / | |/ / | ||||
# | | | | | | | ||||
c = "|" | ||||
else: | ||||
c = " " | ||||
line.extend([c, " "]) | ||||
line.extend(["|", " "] * (n_columns - ni - 1)) | ||||
return line | ||||
def asciistate(): | ||||
"""returns the initial value for the "state" argument to ascii()""" | ||||
return [0, 0] | ||||
def ascii(ui, state, type, char, text, coldata): | ||||
"""prints an ASCII graph of the DAG | ||||
takes the following arguments (one call per node in the graph): | ||||
- ui to write to | ||||
- Somewhere to keep the needed state in (init to asciistate()) | ||||
- Column of the current node in the set of ongoing edges. | ||||
- Type indicator of node data, usually 'C' for changesets. | ||||
- Payload: (char, lines): | ||||
- Character to use as node's symbol. | ||||
- List of lines to display as the node's text. | ||||
- Edges; a list of (col, next_col) indicating the edges between | ||||
the current node and its parents. | ||||
- Number of columns (ongoing edges) in the current revision. | ||||
- The difference between the number of columns (ongoing edges) | ||||
in the next revision and the number of columns (ongoing edges) | ||||
in the current revision. That is: -1 means one column removed; | ||||
0 means no columns added or removed; 1 means one column added. | ||||
""" | ||||
idx, edges, ncols, coldiff = coldata | ||||
assert -2 < coldiff < 2 | ||||
if coldiff == -1: | ||||
# Transform | ||||
# | ||||
# | | | | | | | ||||
# o | | into o---+ | ||||
# |X / |/ / | ||||
# | | | | | ||||
_fixlongrightedges(edges) | ||||
# add_padding_line says whether to rewrite | ||||
# | ||||
# | | | | | | | | | ||||
# | o---+ into | o---+ | ||||
# | / / | | | # <--- padding line | ||||
# o | | | / / | ||||
# o | | | ||||
add_padding_line = (len(text) > 2 and coldiff == -1 and | ||||
[x for (x, y) in edges if x + 1 < y]) | ||||
# fix_nodeline_tail says whether to rewrite | ||||
# | ||||
# | | o | | | | o | | | ||||
# | | |/ / | | |/ / | ||||
# | o | | into | o / / # <--- fixed nodeline tail | ||||
# | |/ / | |/ / | ||||
# o | | o | | | ||||
fix_nodeline_tail = len(text) <= 2 and not add_padding_line | ||||
# nodeline is the line containing the node character (typically o) | ||||
nodeline = ["|", " "] * idx | ||||
nodeline.extend([char, " "]) | ||||
nodeline.extend( | ||||
_getnodelineedgestail(idx, state[1], ncols, coldiff, | ||||
state[0], fix_nodeline_tail)) | ||||
# shift_interline is the line containing the non-vertical | ||||
# edges between this entry and the next | ||||
shift_interline = ["|", " "] * idx | ||||
if coldiff == -1: | ||||
n_spaces = 1 | ||||
edge_ch = "/" | ||||
elif coldiff == 0: | ||||
n_spaces = 2 | ||||
edge_ch = "|" | ||||
else: | ||||
n_spaces = 3 | ||||
edge_ch = "\\" | ||||
shift_interline.extend(n_spaces * [" "]) | ||||
shift_interline.extend([edge_ch, " "] * (ncols - idx - 1)) | ||||
# draw edges from the current node to its parents | ||||
_drawedges(edges, nodeline, shift_interline) | ||||
# lines is the list of all graph lines to print | ||||
lines = [nodeline] | ||||
if add_padding_line: | ||||
lines.append(_getpaddingline(idx, ncols, edges)) | ||||
lines.append(shift_interline) | ||||
# make sure that there are as many graph lines as there are | ||||
# log strings | ||||
while len(text) < len(lines): | ||||
text.append("") | ||||
if len(lines) < len(text): | ||||
extra_interline = ["|", " "] * (ncols + coldiff) | ||||
while len(lines) < len(text): | ||||
lines.append(extra_interline) | ||||
# print lines | ||||
indentation_level = max(ncols, ncols + coldiff) | ||||
for (line, logstr) in zip(lines, text): | ||||
ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr) | ||||
ui.write(ln.rstrip() + '\n') | ||||
# ... and start over | ||||
state[0] = coldiff | ||||
state[1] = idx | ||||