graphmod.py
655 lines
| 23.5 KiB
| text/x-python
|
PythonLexer
/ mercurial / graphmod.py
Dirkjan Ochtman
|
r6691 | # Revision graph generator for Mercurial | ||
# | ||||
# Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl> | ||||
# Copyright 2007 Joel Rosdahl <joel@rosdahl.net> | ||||
# | ||||
Martin Geisler
|
r8225 | # This software may be used and distributed according to the terms of the | ||
Matt Mackall
|
r10263 | # GNU General Public License version 2 or any later version. | ||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8840 | """supports walking the history as DAGs suitable for graphical output | ||
The most basic format we use is that of:: | ||||
(id, type, data, [parentids]) | ||||
The node and parent ids are arbitrary integers which identify a node in the | ||||
context of the graph returned. Type is a constant specifying the node type. | ||||
Data depends on type. | ||||
""" | ||||
Gregory Szorc
|
r25951 | from __future__ import absolute_import | ||
Peter Arrenbrecht
|
r8840 | |||
Pierre-Yves David
|
r23567 | import heapq | ||
Gregory Szorc
|
r25951 | from .node import nullrev | ||
Laurent Charignon
|
r26003 | from . import ( | ||
revset, | ||||
util, | ||||
) | ||||
Gregory Szorc
|
r25951 | |||
Peter Arrenbrecht
|
r8840 | CHANGESET = 'C' | ||
Martijn Pieters
|
r28376 | PARENT = 'P' | ||
GRANDPARENT = 'G' | ||||
MISSINGPARENT = 'M' | ||||
Martijn Pieters
|
r28601 | # Style of line to draw. None signals a line that ends and is removed at this | ||
# point. | ||||
Martijn Pieters
|
r28627 | EDGES = {PARENT: '|', GRANDPARENT: ':', MISSINGPARENT: None} | ||
Dirkjan Ochtman
|
r6691 | |||
Pierre-Yves David
|
r23568 | def groupbranchiter(revs, parentsfunc, firstbranch=()): | ||
Augie Fackler
|
r23570 | """Yield revisions from heads to roots one (topo) branch at a time. | ||
Pierre-Yves David
|
r23564 | |||
This function aims to be used by a graph generator that wishes to minimize | ||||
Augie Fackler
|
r23570 | the number of parallel branches and their interleaving. | ||
Pierre-Yves David
|
r23564 | |||
Augie Fackler
|
r23570 | Example iteration order (numbers show the "true" order in a changelog): | ||
Pierre-Yves David
|
r23564 | |||
o 4 | ||||
| | ||||
o 1 | ||||
| | ||||
| o 3 | ||||
| | | ||||
| o 2 | ||||
|/ | ||||
o 0 | ||||
Augie Fackler
|
r23570 | Note that the ancestors of merges are understood by the current | ||
algorithm to be on the same branch. This means no reordering will | ||||
occur behind a merge. | ||||
Pierre-Yves David
|
r23568 | """ | ||
Pierre-Yves David
|
r23564 | |||
### Quick summary of the algorithm | ||||
# | ||||
# This function is based around a "retention" principle. We keep revisions | ||||
# in memory until we are ready to emit a whole branch that immediately | ||||
Augie Fackler
|
r23570 | # "merges" into an existing one. This reduces the number of parallel | ||
# branches with interleaved revisions. | ||||
Pierre-Yves David
|
r23564 | # | ||
# During iteration revs are split into two groups: | ||||
# A) revision already emitted | ||||
# B) revision in "retention". They are stored as different subgroups. | ||||
# | ||||
Augie Fackler
|
r23570 | # for each REV, we do the following logic: | ||
Pierre-Yves David
|
r23564 | # | ||
Augie Fackler
|
r23570 | # 1) if REV is a parent of (A), we will emit it. If there is a | ||
# retention group ((B) above) that is blocked on REV being | ||||
# available, we emit all the revisions out of that retention | ||||
# group first. | ||||
Pierre-Yves David
|
r23564 | # | ||
Augie Fackler
|
r23570 | # 2) else, we'll search for a subgroup in (B) awaiting for REV to be | ||
Pierre-Yves David
|
r23564 | # available, if such subgroup exist, we add REV to it and the subgroup is | ||
# now awaiting for REV.parents() to be available. | ||||
# | ||||
Augie Fackler
|
r23570 | # 3) finally if no such group existed in (B), we create a new subgroup. | ||
Pierre-Yves David
|
r23564 | # | ||
# | ||||
Augie Fackler
|
r23570 | # To bootstrap the algorithm, we emit the tipmost revision (which | ||
# puts it in group (A) from above). | ||||
Pierre-Yves David
|
r23564 | |||
revs.sort(reverse=True) | ||||
Augie Fackler
|
r23570 | # Set of parents of revision that have been emitted. They can be considered | ||
Pierre-Yves David
|
r23564 | # unblocked as the graph generator is already aware of them so there is no | ||
Augie Fackler
|
r23570 | # need to delay the revisions that reference them. | ||
Pierre-Yves David
|
r23568 | # | ||
# If someone wants to prioritize a branch over the others, pre-filling this | ||||
# set will force all other branches to wait until this branch is ready to be | ||||
Augie Fackler
|
r23570 | # emitted. | ||
Pierre-Yves David
|
r23568 | unblocked = set(firstbranch) | ||
Pierre-Yves David
|
r23564 | |||
Augie Fackler
|
r23570 | # list of groups waiting to be displayed, each group is defined by: | ||
Pierre-Yves David
|
r23564 | # | ||
# (revs: lists of revs waiting to be displayed, | ||||
# blocked: set of that cannot be displayed before those in 'revs') | ||||
# | ||||
# The second value ('blocked') correspond to parents of any revision in the | ||||
# group ('revs') that is not itself contained in the group. The main idea | ||||
# of this algorithm is to delay as much as possible the emission of any | ||||
# revision. This means waiting for the moment we are about to display | ||||
Augie Fackler
|
r23570 | # these parents to display the revs in a group. | ||
Pierre-Yves David
|
r23564 | # | ||
Augie Fackler
|
r23570 | # This first implementation is smart until it encounters a merge: it will | ||
# emit revs as soon as any parent is about to be emitted and can grow an | ||||
Pierre-Yves David
|
r23564 | # arbitrary number of revs in 'blocked'. In practice this mean we properly | ||
Augie Fackler
|
r23570 | # retains new branches but gives up on any special ordering for ancestors | ||
# of merges. The implementation can be improved to handle this better. | ||||
Pierre-Yves David
|
r23564 | # | ||
Augie Fackler
|
r23570 | # The first subgroup is special. It corresponds to all the revision that | ||
Pierre-Yves David
|
r23564 | # were already emitted. The 'revs' lists is expected to be empty and the | ||
# 'blocked' set contains the parents revisions of already emitted revision. | ||||
# | ||||
# You could pre-seed the <parents> set of groups[0] to a specific | ||||
# changesets to select what the first emitted branch should be. | ||||
groups = [([], unblocked)] | ||||
Pierre-Yves David
|
r23567 | pendingheap = [] | ||
pendingset = set() | ||||
heapq.heapify(pendingheap) | ||||
heappop = heapq.heappop | ||||
heappush = heapq.heappush | ||||
for currentrev in revs: | ||||
# Heap works with smallest element, we want highest so we invert | ||||
if currentrev not in pendingset: | ||||
heappush(pendingheap, -currentrev) | ||||
pendingset.add(currentrev) | ||||
# iterates on pending rev until after the current rev have been | ||||
Mads Kiilerich
|
r24180 | # processed. | ||
Pierre-Yves David
|
r23567 | rev = None | ||
while rev != currentrev: | ||||
rev = -heappop(pendingheap) | ||||
pendingset.remove(rev) | ||||
Pierre-Yves David
|
r23566 | # Seek for a subgroup blocked, waiting for the current revision. | ||
Pierre-Yves David
|
r23567 | matching = [i for i, g in enumerate(groups) if rev in g[1]] | ||
Pierre-Yves David
|
r23564 | |||
Pierre-Yves David
|
r23566 | if matching: | ||
Augie Fackler
|
r23570 | # The main idea is to gather together all sets that are blocked | ||
# on the same revision. | ||||
# | ||||
# Groups are merged when a common blocking ancestor is | ||||
# observed. For example, given two groups: | ||||
Pierre-Yves David
|
r23566 | # | ||
Augie Fackler
|
r23570 | # revs [5, 4] waiting for 1 | ||
# revs [3, 2] waiting for 1 | ||||
# | ||||
# These two groups will be merged when we process | ||||
# 1. In theory, we could have merged the groups when | ||||
# we added 2 to the group it is now in (we could have | ||||
# noticed the groups were both blocked on 1 then), but | ||||
# the way it works now makes the algorithm simpler. | ||||
Pierre-Yves David
|
r23566 | # | ||
# We also always keep the oldest subgroup first. We can | ||||
Augie Fackler
|
r23570 | # probably improve the behavior by having the longest set | ||
# first. That way, graph algorithms could minimise the length | ||||
# of parallel lines their drawing. This is currently not done. | ||||
Pierre-Yves David
|
r23566 | targetidx = matching.pop(0) | ||
trevs, tparents = groups[targetidx] | ||||
for i in matching: | ||||
gr = groups[i] | ||||
trevs.extend(gr[0]) | ||||
tparents |= gr[1] | ||||
Augie Fackler
|
r23570 | # delete all merged subgroups (except the one we kept) | ||
# (starting from the last subgroup for performance and | ||||
# sanity reasons) | ||||
Pierre-Yves David
|
r23566 | for i in reversed(matching): | ||
del groups[i] | ||||
else: | ||||
# This is a new head. We create a new subgroup for it. | ||||
targetidx = len(groups) | ||||
Pierre-Yves David
|
r23567 | groups.append(([], set([rev]))) | ||
Pierre-Yves David
|
r23564 | |||
Pierre-Yves David
|
r23566 | gr = groups[targetidx] | ||
Pierre-Yves David
|
r23564 | |||
Augie Fackler
|
r23570 | # We now add the current nodes to this subgroups. This is done | ||
Pierre-Yves David
|
r23566 | # after the subgroup merging because all elements from a subgroup | ||
Augie Fackler
|
r23570 | # that relied on this rev must precede it. | ||
Pierre-Yves David
|
r23566 | # | ||
Augie Fackler
|
r23570 | # we also update the <parents> set to include the parents of the | ||
Pierre-Yves David
|
r23566 | # new nodes. | ||
Pierre-Yves David
|
r23567 | if rev == currentrev: # only display stuff in rev | ||
gr[0].append(rev) | ||||
gr[1].remove(rev) | ||||
parents = [p for p in parentsfunc(rev) if p > nullrev] | ||||
gr[1].update(parents) | ||||
for p in parents: | ||||
if p not in pendingset: | ||||
pendingset.add(p) | ||||
heappush(pendingheap, -p) | ||||
Pierre-Yves David
|
r23564 | |||
Pierre-Yves David
|
r23566 | # Look for a subgroup to display | ||
# | ||||
Augie Fackler
|
r23570 | # When unblocked is empty (if clause), we were not waiting for any | ||
# revisions during the first iteration (if no priority was given) or | ||||
# if we emitted a whole disconnected set of the graph (reached a | ||||
# root). In that case we arbitrarily take the oldest known | ||||
# subgroup. The heuristic could probably be better. | ||||
Pierre-Yves David
|
r23566 | # | ||
Augie Fackler
|
r23570 | # Otherwise (elif clause) if the subgroup is blocked on | ||
# a revision we just emitted, we can safely emit it as | ||||
# well. | ||||
Pierre-Yves David
|
r23566 | if not unblocked: | ||
if len(groups) > 1: # display other subset | ||||
targetidx = 1 | ||||
gr = groups[1] | ||||
elif not gr[1] & unblocked: | ||||
gr = None | ||||
Pierre-Yves David
|
r23564 | |||
Pierre-Yves David
|
r23566 | if gr is not None: | ||
# update the set of awaited revisions with the one from the | ||||
# subgroup | ||||
unblocked |= gr[1] | ||||
# output all revisions in the subgroup | ||||
for r in gr[0]: | ||||
yield r | ||||
# delete the subgroup that you just output | ||||
# unless it is groups[0] in which case you just empty it. | ||||
if targetidx: | ||||
del groups[targetidx] | ||||
else: | ||||
gr[0][:] = [] | ||||
Augie Fackler
|
r23570 | # Check if we have some subgroup waiting for revisions we are not going to | ||
Pierre-Yves David
|
r23567 | # iterate over | ||
for g in groups: | ||||
for r in g[0]: | ||||
yield r | ||||
Pierre-Yves David
|
r23564 | |||
Alexander Solovyov
|
r14042 | def dagwalker(repo, revs): | ||
Martijn Pieters
|
r28376 | """cset DAG generator yielding (id, CHANGESET, ctx, [parentinfo]) tuples | ||
Alexander Solovyov
|
r14042 | |||
This generator function walks through revisions (which should be ordered | ||||
Martijn Pieters
|
r28376 | from bigger to lower). It returns a tuple for each node. | ||
Each parentinfo entry is a tuple with (edgetype, parentid), where edgetype | ||||
is one of PARENT, GRANDPARENT or MISSINGPARENT. The node and parent ids | ||||
are arbitrary integers which identify a node in the context of the graph | ||||
Alexander Solovyov
|
r14042 | returned. | ||
Martijn Pieters
|
r28376 | |||
Peter Arrenbrecht
|
r8836 | """ | ||
Alexander Solovyov
|
r14042 | if not revs: | ||
Idan Kamara
|
r14087 | return | ||
Alexander Solovyov
|
r14042 | |||
gpcache = {} | ||||
Augie Fackler
|
r23569 | if repo.ui.configbool('experimental', 'graph-group-branches', False): | ||
Pierre-Yves David
|
r23568 | firstbranch = () | ||
Augie Fackler
|
r23569 | firstbranchrevset = repo.ui.config( | ||
'experimental', 'graph-group-branches.firstbranch', '') | ||||
Pierre-Yves David
|
r23568 | if firstbranchrevset: | ||
firstbranch = repo.revs(firstbranchrevset) | ||||
parentrevs = repo.changelog.parentrevs | ||||
Pierre-Yves David
|
r26092 | revs = groupbranchiter(revs, parentrevs, firstbranch) | ||
revs = revset.baseset(revs) | ||||
Pierre-Yves David
|
r23565 | |||
Idan Kamara
|
r14087 | for rev in revs: | ||
ctx = repo[rev] | ||||
Martijn Pieters
|
r28376 | # partition into parents in the rev set and missing parents, then | ||
# augment the lists with markers, to inform graph drawing code about | ||||
# what kind of edge to draw between nodes. | ||||
pset = set(p.rev() for p in ctx.parents() if p.rev() in revs) | ||||
mpars = [p.rev() for p in ctx.parents() | ||||
if p.rev() != nullrev and p.rev() not in pset] | ||||
parents = [(PARENT, p) for p in sorted(pset)] | ||||
Alexander Solovyov
|
r14042 | |||
for mpar in mpars: | ||||
Patrick Mezard
|
r14131 | gp = gpcache.get(mpar) | ||
Alexander Solovyov
|
r14042 | if gp is None: | ||
Yuya Nishihara
|
r26187 | # precompute slow query as we know reachableroots() goes | ||
# through all revs (issue4782) | ||||
if not isinstance(revs, revset.baseset): | ||||
revs = revset.baseset(revs) | ||||
Martijn Pieters
|
r28376 | gp = gpcache[mpar] = sorted(set(revset.reachableroots( | ||
repo, revs, [mpar]))) | ||||
Patrick Mezard
|
r14131 | if not gp: | ||
Martijn Pieters
|
r28376 | parents.append((MISSINGPARENT, mpar)) | ||
pset.add(mpar) | ||||
Patrick Mezard
|
r14131 | else: | ||
Martijn Pieters
|
r28376 | parents.extend((GRANDPARENT, g) for g in gp if g not in pset) | ||
pset.update(gp) | ||||
Alexander Solovyov
|
r14042 | |||
Idan Kamara
|
r14087 | yield (ctx.rev(), CHANGESET, ctx, parents) | ||
Peter Arrenbrecht
|
r8836 | |||
Peter Arrenbrecht
|
r8837 | def nodes(repo, nodes): | ||
Peter Arrenbrecht
|
r8840 | """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples | ||
This generator function walks the given nodes. It only returns parents | ||||
that are in nodes, too. | ||||
""" | ||||
Peter Arrenbrecht
|
r8837 | include = set(nodes) | ||
for node in nodes: | ||||
ctx = repo[node] | ||||
Martijn Pieters
|
r28376 | parents = set((PARENT, p.rev()) for p in ctx.parents() | ||
if p.node() in include) | ||||
Peter Arrenbrecht
|
r8840 | yield (ctx.rev(), CHANGESET, ctx, sorted(parents)) | ||
Peter Arrenbrecht
|
r8837 | |||
Constantine Linnick
|
r16129 | def colored(dag, repo): | ||
Peter Arrenbrecht
|
r8842 | """annotates a DAG with colored edge information | ||
For each DAG node this function emits tuples:: | ||||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8842 | (id, type, data, (col, color), [(col, nextcol, color)]) | ||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8842 | with the following new elements: | ||
Peter Arrenbrecht
|
r8835 | - Tuple (col, color) with column and color index for the current node | ||
Peter Arrenbrecht
|
r8842 | - A list of tuples indicating the edges between the current node and its | ||
parents. | ||||
Dirkjan Ochtman
|
r6691 | """ | ||
Peter Arrenbrecht
|
r8841 | seen = [] | ||
Dirkjan Ochtman
|
r6691 | colors = {} | ||
Peter Arrenbrecht
|
r8841 | newcolor = 1 | ||
Constantine Linnick
|
r16129 | config = {} | ||
for key, val in repo.ui.configitems('graph'): | ||||
Matt Mackall
|
r16131 | if '.' in key: | ||
branch, setting = key.rsplit('.', 1) | ||||
# Validation | ||||
if setting == "width" and val.isdigit(): | ||||
Patrick Mezard
|
r16138 | config.setdefault(branch, {})[setting] = int(val) | ||
Matt Mackall
|
r16131 | elif setting == "color" and val.isalnum(): | ||
config.setdefault(branch, {})[setting] = val | ||||
Constantine Linnick
|
r16129 | |||
Matt Mackall
|
r16132 | if config: | ||
Patrick Mezard
|
r16138 | getconf = util.lrucachefunc( | ||
lambda rev: config.get(repo[rev].branch(), {})) | ||||
Matt Mackall
|
r16132 | else: | ||
Patrick Mezard
|
r16138 | getconf = lambda rev: {} | ||
Constantine Linnick
|
r16129 | |||
Peter Arrenbrecht
|
r8842 | for (cur, type, data, parents) in dag: | ||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8841 | # Compute seen and next | ||
if cur not in seen: | ||||
seen.append(cur) # new head | ||||
colors[cur] = newcolor | ||||
newcolor += 1 | ||||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8841 | col = seen.index(cur) | ||
color = colors.pop(cur) | ||||
next = seen[:] | ||||
Dirkjan Ochtman
|
r6691 | |||
Peter Arrenbrecht
|
r8842 | # Add parents to next | ||
Martijn Pieters
|
r28376 | addparents = [p for pt, p in parents if p not in next] | ||
Peter Arrenbrecht
|
r8841 | next[col:col + 1] = addparents | ||
Dirkjan Ochtman
|
r6691 | |||
# Set colors for the parents | ||||
for i, p in enumerate(addparents): | ||||
if not i: | ||||
colors[p] = color | ||||
else: | ||||
Peter Arrenbrecht
|
r8841 | colors[p] = newcolor | ||
newcolor += 1 | ||||
Dirkjan Ochtman
|
r6691 | |||
# Add edges to the graph | ||||
edges = [] | ||||
Peter Arrenbrecht
|
r8841 | for ecol, eid in enumerate(seen): | ||
if eid in next: | ||||
Patrick Mezard
|
r16138 | bconf = getconf(eid) | ||
Constantine Linnick
|
r16129 | edges.append(( | ||
ecol, next.index(eid), colors[eid], | ||||
Patrick Mezard
|
r16138 | bconf.get('width', -1), | ||
bconf.get('color', ''))) | ||||
Peter Arrenbrecht
|
r8842 | elif eid == cur: | ||
Martijn Pieters
|
r28376 | for ptype, p in parents: | ||
Patrick Mezard
|
r16138 | bconf = getconf(p) | ||
Constantine Linnick
|
r16129 | edges.append(( | ||
ecol, next.index(p), color, | ||||
Patrick Mezard
|
r16138 | bconf.get('width', -1), | ||
bconf.get('color', ''))) | ||||
Dirkjan Ochtman
|
r6691 | |||
# Yield and move on | ||||
Peter Arrenbrecht
|
r8842 | yield (cur, type, data, (col, color), edges) | ||
Peter Arrenbrecht
|
r8841 | seen = next | ||
Alexander Solovyov
|
r14042 | |||
Martijn Pieters
|
r28375 | def asciiedges(type, char, lines, state, rev, parents): | ||
Patrick Mezard
|
r17179 | """adds edge info to changelog DAG walk suitable for ascii()""" | ||
Martijn Pieters
|
r28375 | seen = state['seen'] | ||
Patrick Mezard
|
r17179 | if rev not in seen: | ||
seen.append(rev) | ||||
nodeidx = seen.index(rev) | ||||
knownparents = [] | ||||
newparents = [] | ||||
Martijn Pieters
|
r28376 | for ptype, parent in parents: | ||
Patrick Mezard
|
r17179 | if parent in seen: | ||
knownparents.append(parent) | ||||
else: | ||||
newparents.append(parent) | ||||
Martijn Pieters
|
r28600 | state['edges'][parent] = state['styles'].get(ptype, '|') | ||
Patrick Mezard
|
r17179 | |||
ncols = len(seen) | ||||
nextseen = seen[:] | ||||
nextseen[nodeidx:nodeidx + 1] = newparents | ||||
Martijn Pieters
|
r28600 | edges = [(nodeidx, nextseen.index(p)) | ||
for p in knownparents if p != nullrev] | ||||
Patrick Mezard
|
r17179 | |||
while len(newparents) > 2: | ||||
# ascii() only knows how to add or remove a single column between two | ||||
# calls. Nodes with more than two parents break this constraint so we | ||||
# introduce intermediate expansion lines to grow the active node list | ||||
# slowly. | ||||
edges.append((nodeidx, nodeidx)) | ||||
edges.append((nodeidx, nodeidx + 1)) | ||||
nmorecols = 1 | ||||
yield (type, char, lines, (nodeidx, edges, ncols, nmorecols)) | ||||
char = '\\' | ||||
lines = [] | ||||
nodeidx += 1 | ||||
ncols += 1 | ||||
edges = [] | ||||
del newparents[0] | ||||
if len(newparents) > 0: | ||||
edges.append((nodeidx, nodeidx)) | ||||
if len(newparents) > 1: | ||||
edges.append((nodeidx, nodeidx + 1)) | ||||
nmorecols = len(nextseen) - ncols | ||||
seen[:] = nextseen | ||||
Martijn Pieters
|
r28600 | # remove current node from edge characters, no longer needed | ||
state['edges'].pop(rev, None) | ||||
Patrick Mezard
|
r17179 | yield (type, char, lines, (nodeidx, edges, ncols, nmorecols)) | ||
def _fixlongrightedges(edges): | ||||
for (i, (start, end)) in enumerate(edges): | ||||
if end > start: | ||||
edges[i] = (start, end + 1) | ||||
def _getnodelineedgestail( | ||||
Martijn Pieters
|
r28600 | echars, idx, pidx, ncols, coldiff, pdiff, fix_tail): | ||
if fix_tail and coldiff == pdiff and coldiff != 0: | ||||
Patrick Mezard
|
r17179 | # Still going in the same non-vertical direction. | ||
Martijn Pieters
|
r28600 | if coldiff == -1: | ||
start = max(idx + 1, pidx) | ||||
tail = echars[idx * 2:(start - 1) * 2] | ||||
tail.extend(["/", " "] * (ncols - start)) | ||||
Patrick Mezard
|
r17179 | return tail | ||
else: | ||||
Martijn Pieters
|
r28600 | return ["\\", " "] * (ncols - idx - 1) | ||
Patrick Mezard
|
r17179 | else: | ||
Martijn Pieters
|
r28600 | remainder = (ncols - idx - 1) | ||
return echars[-(remainder * 2):] if remainder > 0 else [] | ||||
Patrick Mezard
|
r17179 | |||
Martijn Pieters
|
r28600 | def _drawedges(echars, edges, nodeline, interline): | ||
Patrick Mezard
|
r17179 | for (start, end) in edges: | ||
if start == end + 1: | ||||
interline[2 * end + 1] = "/" | ||||
elif start == end - 1: | ||||
interline[2 * start + 1] = "\\" | ||||
elif start == end: | ||||
Martijn Pieters
|
r28600 | interline[2 * start] = echars[2 * start] | ||
Patrick Mezard
|
r17179 | else: | ||
if 2 * end >= len(nodeline): | ||||
continue | ||||
nodeline[2 * end] = "+" | ||||
if start > end: | ||||
(start, end) = (end, start) | ||||
for i in range(2 * start + 1, 2 * end): | ||||
if nodeline[i] != "+": | ||||
nodeline[i] = "-" | ||||
Martijn Pieters
|
r28600 | def _getpaddingline(echars, idx, ncols, edges): | ||
# all edges up to the current node | ||||
line = echars[:idx * 2] | ||||
# an edge for the current node, if there is one | ||||
if (idx, idx - 1) in edges or (idx, idx) in edges: | ||||
# (idx, idx - 1) (idx, idx) | ||||
Patrick Mezard
|
r17179 | # | | | | | | | | | ||
# +---o | | o---+ | ||||
Martijn Pieters
|
r28600 | # | | X | | X | | | ||
Patrick Mezard
|
r17179 | # | |/ / | |/ / | ||
# | | | | | | | ||||
Martijn Pieters
|
r28600 | line.extend(echars[idx * 2:(idx + 1) * 2]) | ||
Patrick Mezard
|
r17179 | else: | ||
Martijn Pieters
|
r28600 | line.extend(' ') | ||
# all edges to the right of the current node | ||||
remainder = ncols - idx - 1 | ||||
if remainder > 0: | ||||
line.extend(echars[-(remainder * 2):]) | ||||
Patrick Mezard
|
r17179 | return line | ||
Martijn Pieters
|
r28601 | def _drawendinglines(lines, extra, edgemap, seen): | ||
"""Draw ending lines for missing parent edges | ||||
None indicates an edge that ends at between this node and the next | ||||
Replace with a short line ending in ~ and add / lines to any edges to | ||||
the right. | ||||
""" | ||||
if None not in edgemap.values(): | ||||
return | ||||
# Check for more edges to the right of our ending edges. | ||||
# We need enough space to draw adjustment lines for these. | ||||
edgechars = extra[::2] | ||||
while edgechars and edgechars[-1] is None: | ||||
edgechars.pop() | ||||
shift_size = max((edgechars.count(None) * 2) - 1, 0) | ||||
while len(lines) < 3 + shift_size: | ||||
lines.append(extra[:]) | ||||
if shift_size: | ||||
empties = [] | ||||
toshift = [] | ||||
first_empty = extra.index(None) | ||||
for i, c in enumerate(extra[first_empty::2], first_empty // 2): | ||||
if c is None: | ||||
empties.append(i * 2) | ||||
else: | ||||
toshift.append(i * 2) | ||||
targets = list(range(first_empty, first_empty + len(toshift) * 2, 2)) | ||||
positions = toshift[:] | ||||
for line in lines[-shift_size:]: | ||||
line[first_empty:] = [' '] * (len(line) - first_empty) | ||||
for i in range(len(positions)): | ||||
pos = positions[i] - 1 | ||||
positions[i] = max(pos, targets[i]) | ||||
line[pos] = '/' if pos > targets[i] else extra[toshift[i]] | ||||
map = {1: '|', 2: '~'} | ||||
for i, line in enumerate(lines): | ||||
if None not in line: | ||||
continue | ||||
line[:] = [c or map.get(i, ' ') for c in line] | ||||
# remove edges that ended | ||||
remove = [p for p, c in edgemap.items() if c is None] | ||||
for parent in remove: | ||||
del edgemap[parent] | ||||
seen.remove(parent) | ||||
Patrick Mezard
|
r17179 | def asciistate(): | ||
"""returns the initial value for the "state" argument to ascii()""" | ||||
Martijn Pieters
|
r28600 | return { | ||
'seen': [], | ||||
'edges': {}, | ||||
'lastcoldiff': 0, | ||||
'lastindex': 0, | ||||
'styles': EDGES.copy(), | ||||
} | ||||
Patrick Mezard
|
r17179 | |||
def ascii(ui, state, type, char, text, coldata): | ||||
"""prints an ASCII graph of the DAG | ||||
takes the following arguments (one call per node in the graph): | ||||
- ui to write to | ||||
- Somewhere to keep the needed state in (init to asciistate()) | ||||
- Column of the current node in the set of ongoing edges. | ||||
- Type indicator of node data, usually 'C' for changesets. | ||||
- Payload: (char, lines): | ||||
- Character to use as node's symbol. | ||||
- List of lines to display as the node's text. | ||||
- Edges; a list of (col, next_col) indicating the edges between | ||||
the current node and its parents. | ||||
- Number of columns (ongoing edges) in the current revision. | ||||
- The difference between the number of columns (ongoing edges) | ||||
in the next revision and the number of columns (ongoing edges) | ||||
in the current revision. That is: -1 means one column removed; | ||||
0 means no columns added or removed; 1 means one column added. | ||||
""" | ||||
idx, edges, ncols, coldiff = coldata | ||||
assert -2 < coldiff < 2 | ||||
Martijn Pieters
|
r28600 | |||
edgemap, seen = state['edges'], state['seen'] | ||||
# Be tolerant of history issues; make sure we have at least ncols + coldiff | ||||
# elements to work with. See test-glog.t for broken history test cases. | ||||
echars = [c for p in seen for c in (edgemap.get(p, '|'), ' ')] | ||||
echars.extend(('|', ' ') * max(ncols + coldiff - len(seen), 0)) | ||||
Patrick Mezard
|
r17179 | if coldiff == -1: | ||
# Transform | ||||
# | ||||
# | | | | | | | ||||
# o | | into o---+ | ||||
# |X / |/ / | ||||
# | | | | | ||||
_fixlongrightedges(edges) | ||||
# add_padding_line says whether to rewrite | ||||
# | ||||
# | | | | | | | | | ||||
# | o---+ into | o---+ | ||||
# | / / | | | # <--- padding line | ||||
# o | | | / / | ||||
# o | | | ||||
add_padding_line = (len(text) > 2 and coldiff == -1 and | ||||
[x for (x, y) in edges if x + 1 < y]) | ||||
# fix_nodeline_tail says whether to rewrite | ||||
# | ||||
# | | o | | | | o | | | ||||
# | | |/ / | | |/ / | ||||
# | o | | into | o / / # <--- fixed nodeline tail | ||||
# | |/ / | |/ / | ||||
# o | | o | | | ||||
fix_nodeline_tail = len(text) <= 2 and not add_padding_line | ||||
# nodeline is the line containing the node character (typically o) | ||||
Martijn Pieters
|
r28600 | nodeline = echars[:idx * 2] | ||
Patrick Mezard
|
r17179 | nodeline.extend([char, " "]) | ||
nodeline.extend( | ||||
Martijn Pieters
|
r28600 | _getnodelineedgestail( | ||
echars, idx, state['lastindex'], ncols, coldiff, | ||||
state['lastcoldiff'], fix_nodeline_tail)) | ||||
Patrick Mezard
|
r17179 | |||
# shift_interline is the line containing the non-vertical | ||||
# edges between this entry and the next | ||||
Martijn Pieters
|
r28600 | shift_interline = echars[:idx * 2] | ||
shift_interline.extend(' ' * (2 + coldiff)) | ||||
count = ncols - idx - 1 | ||||
Patrick Mezard
|
r17179 | if coldiff == -1: | ||
Martijn Pieters
|
r28600 | shift_interline.extend('/ ' * count) | ||
Patrick Mezard
|
r17179 | elif coldiff == 0: | ||
Martijn Pieters
|
r28600 | shift_interline.extend(echars[(idx + 1) * 2:ncols * 2]) | ||
Patrick Mezard
|
r17179 | else: | ||
Martijn Pieters
|
r28600 | shift_interline.extend(r'\ ' * count) | ||
Patrick Mezard
|
r17179 | |||
# draw edges from the current node to its parents | ||||
Martijn Pieters
|
r28600 | _drawedges(echars, edges, nodeline, shift_interline) | ||
Patrick Mezard
|
r17179 | |||
# lines is the list of all graph lines to print | ||||
lines = [nodeline] | ||||
if add_padding_line: | ||||
Martijn Pieters
|
r28600 | lines.append(_getpaddingline(echars, idx, ncols, edges)) | ||
Patrick Mezard
|
r17179 | lines.append(shift_interline) | ||
# make sure that there are as many graph lines as there are | ||||
# log strings | ||||
Martijn Pieters
|
r28601 | extra_interline = echars[:(ncols + coldiff) * 2] | ||
if len(lines) < len(text): | ||||
while len(lines) < len(text): | ||||
lines.append(extra_interline[:]) | ||||
_drawendinglines(lines, extra_interline, edgemap, seen) | ||||
Patrick Mezard
|
r17179 | while len(text) < len(lines): | ||
text.append("") | ||||
# print lines | ||||
indentation_level = max(ncols, ncols + coldiff) | ||||
for (line, logstr) in zip(lines, text): | ||||
ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr) | ||||
ui.write(ln.rstrip() + '\n') | ||||
# ... and start over | ||||
Martijn Pieters
|
r28375 | state['lastcoldiff'] = coldiff | ||
state['lastindex'] = idx | ||||