Show More
@@ -1,516 +1,517 | |||||
1 | # Revision graph generator for Mercurial |
|
1 | # Revision graph generator for Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl> |
|
3 | # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl> | |
4 | # Copyright 2007 Joel Rosdahl <joel@rosdahl.net> |
|
4 | # Copyright 2007 Joel Rosdahl <joel@rosdahl.net> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2 or any later version. |
|
7 | # GNU General Public License version 2 or any later version. | |
8 |
|
8 | |||
9 | """supports walking the history as DAGs suitable for graphical output |
|
9 | """supports walking the history as DAGs suitable for graphical output | |
10 |
|
10 | |||
11 | The most basic format we use is that of:: |
|
11 | The most basic format we use is that of:: | |
12 |
|
12 | |||
13 | (id, type, data, [parentids]) |
|
13 | (id, type, data, [parentids]) | |
14 |
|
14 | |||
15 | The node and parent ids are arbitrary integers which identify a node in the |
|
15 | The node and parent ids are arbitrary integers which identify a node in the | |
16 | context of the graph returned. Type is a constant specifying the node type. |
|
16 | context of the graph returned. Type is a constant specifying the node type. | |
17 | Data depends on type. |
|
17 | Data depends on type. | |
18 | """ |
|
18 | """ | |
19 |
|
19 | |||
20 | from __future__ import absolute_import |
|
20 | from __future__ import absolute_import | |
21 |
|
21 | |||
22 | from .node import nullrev |
|
22 | from .node import nullrev | |
|
23 | from .thirdparty import attr | |||
23 | from . import ( |
|
24 | from . import ( | |
24 | dagop, |
|
25 | dagop, | |
25 | pycompat, |
|
26 | pycompat, | |
26 | smartset, |
|
27 | smartset, | |
27 | util, |
|
28 | util, | |
28 | ) |
|
29 | ) | |
29 |
|
30 | |||
30 | CHANGESET = b'C' |
|
31 | CHANGESET = b'C' | |
31 | PARENT = b'P' |
|
32 | PARENT = b'P' | |
32 | GRANDPARENT = b'G' |
|
33 | GRANDPARENT = b'G' | |
33 | MISSINGPARENT = b'M' |
|
34 | MISSINGPARENT = b'M' | |
34 | # Style of line to draw. None signals a line that ends and is removed at this |
|
35 | # Style of line to draw. None signals a line that ends and is removed at this | |
35 | # point. A number prefix means only the last N characters of the current block |
|
36 | # point. A number prefix means only the last N characters of the current block | |
36 | # will use that style, the rest will use the PARENT style. Add a - sign |
|
37 | # will use that style, the rest will use the PARENT style. Add a - sign | |
37 | # (so making N negative) and all but the first N characters use that style. |
|
38 | # (so making N negative) and all but the first N characters use that style. | |
38 | EDGES = {PARENT: b'|', GRANDPARENT: b':', MISSINGPARENT: None} |
|
39 | EDGES = {PARENT: b'|', GRANDPARENT: b':', MISSINGPARENT: None} | |
39 |
|
40 | |||
40 |
|
41 | |||
41 | def dagwalker(repo, revs): |
|
42 | def dagwalker(repo, revs): | |
42 | """cset DAG generator yielding (id, CHANGESET, ctx, [parentinfo]) tuples |
|
43 | """cset DAG generator yielding (id, CHANGESET, ctx, [parentinfo]) tuples | |
43 |
|
44 | |||
44 | This generator function walks through revisions (which should be ordered |
|
45 | This generator function walks through revisions (which should be ordered | |
45 | from bigger to lower). It returns a tuple for each node. |
|
46 | from bigger to lower). It returns a tuple for each node. | |
46 |
|
47 | |||
47 | Each parentinfo entry is a tuple with (edgetype, parentid), where edgetype |
|
48 | Each parentinfo entry is a tuple with (edgetype, parentid), where edgetype | |
48 | is one of PARENT, GRANDPARENT or MISSINGPARENT. The node and parent ids |
|
49 | is one of PARENT, GRANDPARENT or MISSINGPARENT. The node and parent ids | |
49 | are arbitrary integers which identify a node in the context of the graph |
|
50 | are arbitrary integers which identify a node in the context of the graph | |
50 | returned. |
|
51 | returned. | |
51 |
|
52 | |||
52 | """ |
|
53 | """ | |
53 | gpcache = {} |
|
54 | gpcache = {} | |
54 |
|
55 | |||
55 | for rev in revs: |
|
56 | for rev in revs: | |
56 | ctx = repo[rev] |
|
57 | ctx = repo[rev] | |
57 | # partition into parents in the rev set and missing parents, then |
|
58 | # partition into parents in the rev set and missing parents, then | |
58 | # augment the lists with markers, to inform graph drawing code about |
|
59 | # augment the lists with markers, to inform graph drawing code about | |
59 | # what kind of edge to draw between nodes. |
|
60 | # what kind of edge to draw between nodes. | |
60 | pset = set(p.rev() for p in ctx.parents() if p.rev() in revs) |
|
61 | pset = set(p.rev() for p in ctx.parents() if p.rev() in revs) | |
61 | mpars = [ |
|
62 | mpars = [ | |
62 | p.rev() |
|
63 | p.rev() | |
63 | for p in ctx.parents() |
|
64 | for p in ctx.parents() | |
64 | if p.rev() != nullrev and p.rev() not in pset |
|
65 | if p.rev() != nullrev and p.rev() not in pset | |
65 | ] |
|
66 | ] | |
66 | parents = [(PARENT, p) for p in sorted(pset)] |
|
67 | parents = [(PARENT, p) for p in sorted(pset)] | |
67 |
|
68 | |||
68 | for mpar in mpars: |
|
69 | for mpar in mpars: | |
69 | gp = gpcache.get(mpar) |
|
70 | gp = gpcache.get(mpar) | |
70 | if gp is None: |
|
71 | if gp is None: | |
71 | # precompute slow query as we know reachableroots() goes |
|
72 | # precompute slow query as we know reachableroots() goes | |
72 | # through all revs (issue4782) |
|
73 | # through all revs (issue4782) | |
73 | if not isinstance(revs, smartset.baseset): |
|
74 | if not isinstance(revs, smartset.baseset): | |
74 | revs = smartset.baseset(revs) |
|
75 | revs = smartset.baseset(revs) | |
75 | gp = gpcache[mpar] = sorted( |
|
76 | gp = gpcache[mpar] = sorted( | |
76 | set(dagop.reachableroots(repo, revs, [mpar])) |
|
77 | set(dagop.reachableroots(repo, revs, [mpar])) | |
77 | ) |
|
78 | ) | |
78 | if not gp: |
|
79 | if not gp: | |
79 | parents.append((MISSINGPARENT, mpar)) |
|
80 | parents.append((MISSINGPARENT, mpar)) | |
80 | pset.add(mpar) |
|
81 | pset.add(mpar) | |
81 | else: |
|
82 | else: | |
82 | parents.extend((GRANDPARENT, g) for g in gp if g not in pset) |
|
83 | parents.extend((GRANDPARENT, g) for g in gp if g not in pset) | |
83 | pset.update(gp) |
|
84 | pset.update(gp) | |
84 |
|
85 | |||
85 | yield (ctx.rev(), CHANGESET, ctx, parents) |
|
86 | yield (ctx.rev(), CHANGESET, ctx, parents) | |
86 |
|
87 | |||
87 |
|
88 | |||
88 | def nodes(repo, nodes): |
|
89 | def nodes(repo, nodes): | |
89 | """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples |
|
90 | """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples | |
90 |
|
91 | |||
91 | This generator function walks the given nodes. It only returns parents |
|
92 | This generator function walks the given nodes. It only returns parents | |
92 | that are in nodes, too. |
|
93 | that are in nodes, too. | |
93 | """ |
|
94 | """ | |
94 | include = set(nodes) |
|
95 | include = set(nodes) | |
95 | for node in nodes: |
|
96 | for node in nodes: | |
96 | ctx = repo[node] |
|
97 | ctx = repo[node] | |
97 | parents = set( |
|
98 | parents = set( | |
98 | (PARENT, p.rev()) for p in ctx.parents() if p.node() in include |
|
99 | (PARENT, p.rev()) for p in ctx.parents() if p.node() in include | |
99 | ) |
|
100 | ) | |
100 | yield (ctx.rev(), CHANGESET, ctx, sorted(parents)) |
|
101 | yield (ctx.rev(), CHANGESET, ctx, sorted(parents)) | |
101 |
|
102 | |||
102 |
|
103 | |||
103 | def colored(dag, repo): |
|
104 | def colored(dag, repo): | |
104 | """annotates a DAG with colored edge information |
|
105 | """annotates a DAG with colored edge information | |
105 |
|
106 | |||
106 | For each DAG node this function emits tuples:: |
|
107 | For each DAG node this function emits tuples:: | |
107 |
|
108 | |||
108 | (id, type, data, (col, color), [(col, nextcol, color)]) |
|
109 | (id, type, data, (col, color), [(col, nextcol, color)]) | |
109 |
|
110 | |||
110 | with the following new elements: |
|
111 | with the following new elements: | |
111 |
|
112 | |||
112 | - Tuple (col, color) with column and color index for the current node |
|
113 | - Tuple (col, color) with column and color index for the current node | |
113 | - A list of tuples indicating the edges between the current node and its |
|
114 | - A list of tuples indicating the edges between the current node and its | |
114 | parents. |
|
115 | parents. | |
115 | """ |
|
116 | """ | |
116 | seen = [] |
|
117 | seen = [] | |
117 | colors = {} |
|
118 | colors = {} | |
118 | newcolor = 1 |
|
119 | newcolor = 1 | |
119 | config = {} |
|
120 | config = {} | |
120 |
|
121 | |||
121 | for key, val in repo.ui.configitems(b'graph'): |
|
122 | for key, val in repo.ui.configitems(b'graph'): | |
122 | if b'.' in key: |
|
123 | if b'.' in key: | |
123 | branch, setting = key.rsplit(b'.', 1) |
|
124 | branch, setting = key.rsplit(b'.', 1) | |
124 | # Validation |
|
125 | # Validation | |
125 | if setting == b"width" and val.isdigit(): |
|
126 | if setting == b"width" and val.isdigit(): | |
126 | config.setdefault(branch, {})[setting] = int(val) |
|
127 | config.setdefault(branch, {})[setting] = int(val) | |
127 | elif setting == b"color" and val.isalnum(): |
|
128 | elif setting == b"color" and val.isalnum(): | |
128 | config.setdefault(branch, {})[setting] = val |
|
129 | config.setdefault(branch, {})[setting] = val | |
129 |
|
130 | |||
130 | if config: |
|
131 | if config: | |
131 | getconf = util.lrucachefunc( |
|
132 | getconf = util.lrucachefunc( | |
132 | lambda rev: config.get(repo[rev].branch(), {}) |
|
133 | lambda rev: config.get(repo[rev].branch(), {}) | |
133 | ) |
|
134 | ) | |
134 | else: |
|
135 | else: | |
135 | getconf = lambda rev: {} |
|
136 | getconf = lambda rev: {} | |
136 |
|
137 | |||
137 | for (cur, type, data, parents) in dag: |
|
138 | for (cur, type, data, parents) in dag: | |
138 |
|
139 | |||
139 | # Compute seen and next |
|
140 | # Compute seen and next | |
140 | if cur not in seen: |
|
141 | if cur not in seen: | |
141 | seen.append(cur) # new head |
|
142 | seen.append(cur) # new head | |
142 | colors[cur] = newcolor |
|
143 | colors[cur] = newcolor | |
143 | newcolor += 1 |
|
144 | newcolor += 1 | |
144 |
|
145 | |||
145 | col = seen.index(cur) |
|
146 | col = seen.index(cur) | |
146 | color = colors.pop(cur) |
|
147 | color = colors.pop(cur) | |
147 | next = seen[:] |
|
148 | next = seen[:] | |
148 |
|
149 | |||
149 | # Add parents to next |
|
150 | # Add parents to next | |
150 | addparents = [p for pt, p in parents if p not in next] |
|
151 | addparents = [p for pt, p in parents if p not in next] | |
151 | next[col : col + 1] = addparents |
|
152 | next[col : col + 1] = addparents | |
152 |
|
153 | |||
153 | # Set colors for the parents |
|
154 | # Set colors for the parents | |
154 | for i, p in enumerate(addparents): |
|
155 | for i, p in enumerate(addparents): | |
155 | if not i: |
|
156 | if not i: | |
156 | colors[p] = color |
|
157 | colors[p] = color | |
157 | else: |
|
158 | else: | |
158 | colors[p] = newcolor |
|
159 | colors[p] = newcolor | |
159 | newcolor += 1 |
|
160 | newcolor += 1 | |
160 |
|
161 | |||
161 | # Add edges to the graph |
|
162 | # Add edges to the graph | |
162 | edges = [] |
|
163 | edges = [] | |
163 | for ecol, eid in enumerate(seen): |
|
164 | for ecol, eid in enumerate(seen): | |
164 | if eid in next: |
|
165 | if eid in next: | |
165 | bconf = getconf(eid) |
|
166 | bconf = getconf(eid) | |
166 | edges.append( |
|
167 | edges.append( | |
167 | ( |
|
168 | ( | |
168 | ecol, |
|
169 | ecol, | |
169 | next.index(eid), |
|
170 | next.index(eid), | |
170 | colors[eid], |
|
171 | colors[eid], | |
171 | bconf.get(b'width', -1), |
|
172 | bconf.get(b'width', -1), | |
172 | bconf.get(b'color', b''), |
|
173 | bconf.get(b'color', b''), | |
173 | ) |
|
174 | ) | |
174 | ) |
|
175 | ) | |
175 | elif eid == cur: |
|
176 | elif eid == cur: | |
176 | for ptype, p in parents: |
|
177 | for ptype, p in parents: | |
177 | bconf = getconf(p) |
|
178 | bconf = getconf(p) | |
178 | edges.append( |
|
179 | edges.append( | |
179 | ( |
|
180 | ( | |
180 | ecol, |
|
181 | ecol, | |
181 | next.index(p), |
|
182 | next.index(p), | |
182 | color, |
|
183 | color, | |
183 | bconf.get(b'width', -1), |
|
184 | bconf.get(b'width', -1), | |
184 | bconf.get(b'color', b''), |
|
185 | bconf.get(b'color', b''), | |
185 | ) |
|
186 | ) | |
186 | ) |
|
187 | ) | |
187 |
|
188 | |||
188 | # Yield and move on |
|
189 | # Yield and move on | |
189 | yield (cur, type, data, (col, color), edges) |
|
190 | yield (cur, type, data, (col, color), edges) | |
190 | seen = next |
|
191 | seen = next | |
191 |
|
192 | |||
192 |
|
193 | |||
193 | def asciiedges(type, char, state, rev, parents): |
|
194 | def asciiedges(type, char, state, rev, parents): | |
194 | """adds edge info to changelog DAG walk suitable for ascii()""" |
|
195 | """adds edge info to changelog DAG walk suitable for ascii()""" | |
195 |
seen = state |
|
196 | seen = state.seen | |
196 | if rev not in seen: |
|
197 | if rev not in seen: | |
197 | seen.append(rev) |
|
198 | seen.append(rev) | |
198 | nodeidx = seen.index(rev) |
|
199 | nodeidx = seen.index(rev) | |
199 |
|
200 | |||
200 | knownparents = [] |
|
201 | knownparents = [] | |
201 | newparents = [] |
|
202 | newparents = [] | |
202 | for ptype, parent in parents: |
|
203 | for ptype, parent in parents: | |
203 | if parent == rev: |
|
204 | if parent == rev: | |
204 | # self reference (should only be seen in null rev) |
|
205 | # self reference (should only be seen in null rev) | |
205 | continue |
|
206 | continue | |
206 | if parent in seen: |
|
207 | if parent in seen: | |
207 | knownparents.append(parent) |
|
208 | knownparents.append(parent) | |
208 | else: |
|
209 | else: | |
209 | newparents.append(parent) |
|
210 | newparents.append(parent) | |
210 |
state |
|
211 | state.edges[parent] = state.styles.get(ptype, b'|') | |
211 |
|
212 | |||
212 | ncols = len(seen) |
|
213 | ncols = len(seen) | |
213 | width = 1 + ncols * 2 |
|
214 | width = 1 + ncols * 2 | |
214 | nextseen = seen[:] |
|
215 | nextseen = seen[:] | |
215 | nextseen[nodeidx : nodeidx + 1] = newparents |
|
216 | nextseen[nodeidx : nodeidx + 1] = newparents | |
216 | edges = [(nodeidx, nextseen.index(p)) for p in knownparents] |
|
217 | edges = [(nodeidx, nextseen.index(p)) for p in knownparents] | |
217 |
|
218 | |||
218 | seen[:] = nextseen |
|
219 | seen[:] = nextseen | |
219 | while len(newparents) > 2: |
|
220 | while len(newparents) > 2: | |
220 | # ascii() only knows how to add or remove a single column between two |
|
221 | # ascii() only knows how to add or remove a single column between two | |
221 | # calls. Nodes with more than two parents break this constraint so we |
|
222 | # calls. Nodes with more than two parents break this constraint so we | |
222 | # introduce intermediate expansion lines to grow the active node list |
|
223 | # introduce intermediate expansion lines to grow the active node list | |
223 | # slowly. |
|
224 | # slowly. | |
224 | edges.append((nodeidx, nodeidx)) |
|
225 | edges.append((nodeidx, nodeidx)) | |
225 | edges.append((nodeidx, nodeidx + 1)) |
|
226 | edges.append((nodeidx, nodeidx + 1)) | |
226 | nmorecols = 1 |
|
227 | nmorecols = 1 | |
227 | width += 2 |
|
228 | width += 2 | |
228 | yield (type, char, width, (nodeidx, edges, ncols, nmorecols)) |
|
229 | yield (type, char, width, (nodeidx, edges, ncols, nmorecols)) | |
229 | char = b'\\' |
|
230 | char = b'\\' | |
230 | nodeidx += 1 |
|
231 | nodeidx += 1 | |
231 | ncols += 1 |
|
232 | ncols += 1 | |
232 | edges = [] |
|
233 | edges = [] | |
233 | del newparents[0] |
|
234 | del newparents[0] | |
234 |
|
235 | |||
235 | if len(newparents) > 0: |
|
236 | if len(newparents) > 0: | |
236 | edges.append((nodeidx, nodeidx)) |
|
237 | edges.append((nodeidx, nodeidx)) | |
237 | if len(newparents) > 1: |
|
238 | if len(newparents) > 1: | |
238 | edges.append((nodeidx, nodeidx + 1)) |
|
239 | edges.append((nodeidx, nodeidx + 1)) | |
239 | nmorecols = len(nextseen) - ncols |
|
240 | nmorecols = len(nextseen) - ncols | |
240 | if nmorecols > 0: |
|
241 | if nmorecols > 0: | |
241 | width += 2 |
|
242 | width += 2 | |
242 | # remove current node from edge characters, no longer needed |
|
243 | # remove current node from edge characters, no longer needed | |
243 |
state |
|
244 | state.edges.pop(rev, None) | |
244 | yield (type, char, width, (nodeidx, edges, ncols, nmorecols)) |
|
245 | yield (type, char, width, (nodeidx, edges, ncols, nmorecols)) | |
245 |
|
246 | |||
246 |
|
247 | |||
247 | def _fixlongrightedges(edges): |
|
248 | def _fixlongrightedges(edges): | |
248 | for (i, (start, end)) in enumerate(edges): |
|
249 | for (i, (start, end)) in enumerate(edges): | |
249 | if end > start: |
|
250 | if end > start: | |
250 | edges[i] = (start, end + 1) |
|
251 | edges[i] = (start, end + 1) | |
251 |
|
252 | |||
252 |
|
253 | |||
253 | def _getnodelineedgestail(echars, idx, pidx, ncols, coldiff, pdiff, fix_tail): |
|
254 | def _getnodelineedgestail(echars, idx, pidx, ncols, coldiff, pdiff, fix_tail): | |
254 | if fix_tail and coldiff == pdiff and coldiff != 0: |
|
255 | if fix_tail and coldiff == pdiff and coldiff != 0: | |
255 | # Still going in the same non-vertical direction. |
|
256 | # Still going in the same non-vertical direction. | |
256 | if coldiff == -1: |
|
257 | if coldiff == -1: | |
257 | start = max(idx + 1, pidx) |
|
258 | start = max(idx + 1, pidx) | |
258 | tail = echars[idx * 2 : (start - 1) * 2] |
|
259 | tail = echars[idx * 2 : (start - 1) * 2] | |
259 | tail.extend([b"/", b" "] * (ncols - start)) |
|
260 | tail.extend([b"/", b" "] * (ncols - start)) | |
260 | return tail |
|
261 | return tail | |
261 | else: |
|
262 | else: | |
262 | return [b"\\", b" "] * (ncols - idx - 1) |
|
263 | return [b"\\", b" "] * (ncols - idx - 1) | |
263 | else: |
|
264 | else: | |
264 | remainder = ncols - idx - 1 |
|
265 | remainder = ncols - idx - 1 | |
265 | return echars[-(remainder * 2) :] if remainder > 0 else [] |
|
266 | return echars[-(remainder * 2) :] if remainder > 0 else [] | |
266 |
|
267 | |||
267 |
|
268 | |||
268 | def _drawedges(echars, edges, nodeline, interline): |
|
269 | def _drawedges(echars, edges, nodeline, interline): | |
269 | for (start, end) in edges: |
|
270 | for (start, end) in edges: | |
270 | if start == end + 1: |
|
271 | if start == end + 1: | |
271 | interline[2 * end + 1] = b"/" |
|
272 | interline[2 * end + 1] = b"/" | |
272 | elif start == end - 1: |
|
273 | elif start == end - 1: | |
273 | interline[2 * start + 1] = b"\\" |
|
274 | interline[2 * start + 1] = b"\\" | |
274 | elif start == end: |
|
275 | elif start == end: | |
275 | interline[2 * start] = echars[2 * start] |
|
276 | interline[2 * start] = echars[2 * start] | |
276 | else: |
|
277 | else: | |
277 | if 2 * end >= len(nodeline): |
|
278 | if 2 * end >= len(nodeline): | |
278 | continue |
|
279 | continue | |
279 | nodeline[2 * end] = b"+" |
|
280 | nodeline[2 * end] = b"+" | |
280 | if start > end: |
|
281 | if start > end: | |
281 | (start, end) = (end, start) |
|
282 | (start, end) = (end, start) | |
282 | for i in range(2 * start + 1, 2 * end): |
|
283 | for i in range(2 * start + 1, 2 * end): | |
283 | if nodeline[i] != b"+": |
|
284 | if nodeline[i] != b"+": | |
284 | nodeline[i] = b"-" |
|
285 | nodeline[i] = b"-" | |
285 |
|
286 | |||
286 |
|
287 | |||
287 | def _getpaddingline(echars, idx, ncols, edges): |
|
288 | def _getpaddingline(echars, idx, ncols, edges): | |
288 | # all edges up to the current node |
|
289 | # all edges up to the current node | |
289 | line = echars[: idx * 2] |
|
290 | line = echars[: idx * 2] | |
290 | # an edge for the current node, if there is one |
|
291 | # an edge for the current node, if there is one | |
291 | if (idx, idx - 1) in edges or (idx, idx) in edges: |
|
292 | if (idx, idx - 1) in edges or (idx, idx) in edges: | |
292 | # (idx, idx - 1) (idx, idx) |
|
293 | # (idx, idx - 1) (idx, idx) | |
293 | # | | | | | | | | |
|
294 | # | | | | | | | | | |
294 | # +---o | | o---+ |
|
295 | # +---o | | o---+ | |
295 | # | | X | | X | | |
|
296 | # | | X | | X | | | |
296 | # | |/ / | |/ / |
|
297 | # | |/ / | |/ / | |
297 | # | | | | | | |
|
298 | # | | | | | | | |
298 | line.extend(echars[idx * 2 : (idx + 1) * 2]) |
|
299 | line.extend(echars[idx * 2 : (idx + 1) * 2]) | |
299 | else: |
|
300 | else: | |
300 | line.extend([b' ', b' ']) |
|
301 | line.extend([b' ', b' ']) | |
301 | # all edges to the right of the current node |
|
302 | # all edges to the right of the current node | |
302 | remainder = ncols - idx - 1 |
|
303 | remainder = ncols - idx - 1 | |
303 | if remainder > 0: |
|
304 | if remainder > 0: | |
304 | line.extend(echars[-(remainder * 2) :]) |
|
305 | line.extend(echars[-(remainder * 2) :]) | |
305 | return line |
|
306 | return line | |
306 |
|
307 | |||
307 |
|
308 | |||
308 | def _drawendinglines(lines, extra, edgemap, seen, state): |
|
309 | def _drawendinglines(lines, extra, edgemap, seen, state): | |
309 | """Draw ending lines for missing parent edges |
|
310 | """Draw ending lines for missing parent edges | |
310 |
|
311 | |||
311 | None indicates an edge that ends at between this node and the next |
|
312 | None indicates an edge that ends at between this node and the next | |
312 | Replace with a short line ending in ~ and add / lines to any edges to |
|
313 | Replace with a short line ending in ~ and add / lines to any edges to | |
313 | the right. |
|
314 | the right. | |
314 |
|
315 | |||
315 | """ |
|
316 | """ | |
316 | if None not in edgemap.values(): |
|
317 | if None not in edgemap.values(): | |
317 | return |
|
318 | return | |
318 |
|
319 | |||
319 | # Check for more edges to the right of our ending edges. |
|
320 | # Check for more edges to the right of our ending edges. | |
320 | # We need enough space to draw adjustment lines for these. |
|
321 | # We need enough space to draw adjustment lines for these. | |
321 | edgechars = extra[::2] |
|
322 | edgechars = extra[::2] | |
322 | while edgechars and edgechars[-1] is None: |
|
323 | while edgechars and edgechars[-1] is None: | |
323 | edgechars.pop() |
|
324 | edgechars.pop() | |
324 | shift_size = max((edgechars.count(None) * 2) - 1, 0) |
|
325 | shift_size = max((edgechars.count(None) * 2) - 1, 0) | |
325 |
minlines = 3 if not state |
|
326 | minlines = 3 if not state.graphshorten else 2 | |
326 | while len(lines) < minlines + shift_size: |
|
327 | while len(lines) < minlines + shift_size: | |
327 | lines.append(extra[:]) |
|
328 | lines.append(extra[:]) | |
328 |
|
329 | |||
329 | if shift_size: |
|
330 | if shift_size: | |
330 | empties = [] |
|
331 | empties = [] | |
331 | toshift = [] |
|
332 | toshift = [] | |
332 | first_empty = extra.index(None) |
|
333 | first_empty = extra.index(None) | |
333 | for i, c in enumerate(extra[first_empty::2], first_empty // 2): |
|
334 | for i, c in enumerate(extra[first_empty::2], first_empty // 2): | |
334 | if c is None: |
|
335 | if c is None: | |
335 | empties.append(i * 2) |
|
336 | empties.append(i * 2) | |
336 | else: |
|
337 | else: | |
337 | toshift.append(i * 2) |
|
338 | toshift.append(i * 2) | |
338 | targets = list(range(first_empty, first_empty + len(toshift) * 2, 2)) |
|
339 | targets = list(range(first_empty, first_empty + len(toshift) * 2, 2)) | |
339 | positions = toshift[:] |
|
340 | positions = toshift[:] | |
340 | for line in lines[-shift_size:]: |
|
341 | for line in lines[-shift_size:]: | |
341 | line[first_empty:] = [b' '] * (len(line) - first_empty) |
|
342 | line[first_empty:] = [b' '] * (len(line) - first_empty) | |
342 | for i in range(len(positions)): |
|
343 | for i in range(len(positions)): | |
343 | pos = positions[i] - 1 |
|
344 | pos = positions[i] - 1 | |
344 | positions[i] = max(pos, targets[i]) |
|
345 | positions[i] = max(pos, targets[i]) | |
345 | line[pos] = b'/' if pos > targets[i] else extra[toshift[i]] |
|
346 | line[pos] = b'/' if pos > targets[i] else extra[toshift[i]] | |
346 |
|
347 | |||
347 |
map = {1: b'|', 2: b'~'} if not state |
|
348 | map = {1: b'|', 2: b'~'} if not state.graphshorten else {1: b'~'} | |
348 | for i, line in enumerate(lines): |
|
349 | for i, line in enumerate(lines): | |
349 | if None not in line: |
|
350 | if None not in line: | |
350 | continue |
|
351 | continue | |
351 | line[:] = [c or map.get(i, b' ') for c in line] |
|
352 | line[:] = [c or map.get(i, b' ') for c in line] | |
352 |
|
353 | |||
353 | # remove edges that ended |
|
354 | # remove edges that ended | |
354 | remove = [p for p, c in edgemap.items() if c is None] |
|
355 | remove = [p for p, c in edgemap.items() if c is None] | |
355 | for parent in remove: |
|
356 | for parent in remove: | |
356 | del edgemap[parent] |
|
357 | del edgemap[parent] | |
357 | seen.remove(parent) |
|
358 | seen.remove(parent) | |
358 |
|
359 | |||
359 |
|
360 | |||
360 | def asciistate(): |
|
361 | @attr.s | |
361 | """returns the initial value for the "state" argument to ascii()""" |
|
362 | class asciistate(object): | |
362 | return { |
|
363 | """State of ascii() graph rendering""" | |
363 | b'seen': [], |
|
364 | ||
364 | b'edges': {}, |
|
365 | seen = attr.ib(init=False, default=attr.Factory(list)) | |
365 | b'lastcoldiff': 0, |
|
366 | edges = attr.ib(init=False, default=attr.Factory(dict)) | |
366 | b'lastindex': 0, |
|
367 | lastcoldiff = attr.ib(init=False, default=0) | |
367 | b'styles': EDGES.copy(), |
|
368 | lastindex = attr.ib(init=False, default=0) | |
368 | b'graphshorten': False, |
|
369 | styles = attr.ib(init=False, default=attr.Factory(EDGES.copy)) | |
369 | } |
|
370 | graphshorten = attr.ib(init=False, default=False) | |
370 |
|
371 | |||
371 |
|
372 | |||
372 | def outputgraph(ui, graph): |
|
373 | def outputgraph(ui, graph): | |
373 | """outputs an ASCII graph of a DAG |
|
374 | """outputs an ASCII graph of a DAG | |
374 |
|
375 | |||
375 | this is a helper function for 'ascii' below. |
|
376 | this is a helper function for 'ascii' below. | |
376 |
|
377 | |||
377 | takes the following arguments: |
|
378 | takes the following arguments: | |
378 |
|
379 | |||
379 | - ui to write to |
|
380 | - ui to write to | |
380 | - graph data: list of { graph nodes/edges, text } |
|
381 | - graph data: list of { graph nodes/edges, text } | |
381 |
|
382 | |||
382 | this function can be monkey-patched by extensions to alter graph display |
|
383 | this function can be monkey-patched by extensions to alter graph display | |
383 | without needing to mimic all of the edge-fixup logic in ascii() |
|
384 | without needing to mimic all of the edge-fixup logic in ascii() | |
384 | """ |
|
385 | """ | |
385 | for (ln, logstr) in graph: |
|
386 | for (ln, logstr) in graph: | |
386 | ui.write((ln + logstr).rstrip() + b"\n") |
|
387 | ui.write((ln + logstr).rstrip() + b"\n") | |
387 |
|
388 | |||
388 |
|
389 | |||
389 | def ascii(ui, state, type, char, text, coldata): |
|
390 | def ascii(ui, state, type, char, text, coldata): | |
390 | """prints an ASCII graph of the DAG |
|
391 | """prints an ASCII graph of the DAG | |
391 |
|
392 | |||
392 | takes the following arguments (one call per node in the graph): |
|
393 | takes the following arguments (one call per node in the graph): | |
393 |
|
394 | |||
394 | - ui to write to |
|
395 | - ui to write to | |
395 | - Somewhere to keep the needed state in (init to asciistate()) |
|
396 | - Somewhere to keep the needed state in (init to asciistate()) | |
396 | - Column of the current node in the set of ongoing edges. |
|
397 | - Column of the current node in the set of ongoing edges. | |
397 | - Type indicator of node data, usually 'C' for changesets. |
|
398 | - Type indicator of node data, usually 'C' for changesets. | |
398 | - Payload: (char, lines): |
|
399 | - Payload: (char, lines): | |
399 | - Character to use as node's symbol. |
|
400 | - Character to use as node's symbol. | |
400 | - List of lines to display as the node's text. |
|
401 | - List of lines to display as the node's text. | |
401 | - Edges; a list of (col, next_col) indicating the edges between |
|
402 | - Edges; a list of (col, next_col) indicating the edges between | |
402 | the current node and its parents. |
|
403 | the current node and its parents. | |
403 | - Number of columns (ongoing edges) in the current revision. |
|
404 | - Number of columns (ongoing edges) in the current revision. | |
404 | - The difference between the number of columns (ongoing edges) |
|
405 | - The difference between the number of columns (ongoing edges) | |
405 | in the next revision and the number of columns (ongoing edges) |
|
406 | in the next revision and the number of columns (ongoing edges) | |
406 | in the current revision. That is: -1 means one column removed; |
|
407 | in the current revision. That is: -1 means one column removed; | |
407 | 0 means no columns added or removed; 1 means one column added. |
|
408 | 0 means no columns added or removed; 1 means one column added. | |
408 | """ |
|
409 | """ | |
409 | idx, edges, ncols, coldiff = coldata |
|
410 | idx, edges, ncols, coldiff = coldata | |
410 | assert -2 < coldiff < 2 |
|
411 | assert -2 < coldiff < 2 | |
411 |
|
412 | |||
412 |
edgemap, seen = state |
|
413 | edgemap, seen = state.edges, state.seen | |
413 | # Be tolerant of history issues; make sure we have at least ncols + coldiff |
|
414 | # Be tolerant of history issues; make sure we have at least ncols + coldiff | |
414 | # elements to work with. See test-glog.t for broken history test cases. |
|
415 | # elements to work with. See test-glog.t for broken history test cases. | |
415 | echars = [c for p in seen for c in (edgemap.get(p, b'|'), b' ')] |
|
416 | echars = [c for p in seen for c in (edgemap.get(p, b'|'), b' ')] | |
416 | echars.extend((b'|', b' ') * max(ncols + coldiff - len(seen), 0)) |
|
417 | echars.extend((b'|', b' ') * max(ncols + coldiff - len(seen), 0)) | |
417 |
|
418 | |||
418 | if coldiff == -1: |
|
419 | if coldiff == -1: | |
419 | # Transform |
|
420 | # Transform | |
420 | # |
|
421 | # | |
421 | # | | | | | | |
|
422 | # | | | | | | | |
422 | # o | | into o---+ |
|
423 | # o | | into o---+ | |
423 | # |X / |/ / |
|
424 | # |X / |/ / | |
424 | # | | | | |
|
425 | # | | | | | |
425 | _fixlongrightedges(edges) |
|
426 | _fixlongrightedges(edges) | |
426 |
|
427 | |||
427 | # add_padding_line says whether to rewrite |
|
428 | # add_padding_line says whether to rewrite | |
428 | # |
|
429 | # | |
429 | # | | | | | | | | |
|
430 | # | | | | | | | | | |
430 | # | o---+ into | o---+ |
|
431 | # | o---+ into | o---+ | |
431 | # | / / | | | # <--- padding line |
|
432 | # | / / | | | # <--- padding line | |
432 | # o | | | / / |
|
433 | # o | | | / / | |
433 | # o | | |
|
434 | # o | | | |
434 | add_padding_line = ( |
|
435 | add_padding_line = ( | |
435 | len(text) > 2 and coldiff == -1 and [x for (x, y) in edges if x + 1 < y] |
|
436 | len(text) > 2 and coldiff == -1 and [x for (x, y) in edges if x + 1 < y] | |
436 | ) |
|
437 | ) | |
437 |
|
438 | |||
438 | # fix_nodeline_tail says whether to rewrite |
|
439 | # fix_nodeline_tail says whether to rewrite | |
439 | # |
|
440 | # | |
440 | # | | o | | | | o | | |
|
441 | # | | o | | | | o | | | |
441 | # | | |/ / | | |/ / |
|
442 | # | | |/ / | | |/ / | |
442 | # | o | | into | o / / # <--- fixed nodeline tail |
|
443 | # | o | | into | o / / # <--- fixed nodeline tail | |
443 | # | |/ / | |/ / |
|
444 | # | |/ / | |/ / | |
444 | # o | | o | | |
|
445 | # o | | o | | | |
445 | fix_nodeline_tail = len(text) <= 2 and not add_padding_line |
|
446 | fix_nodeline_tail = len(text) <= 2 and not add_padding_line | |
446 |
|
447 | |||
447 | # nodeline is the line containing the node character (typically o) |
|
448 | # nodeline is the line containing the node character (typically o) | |
448 | nodeline = echars[: idx * 2] |
|
449 | nodeline = echars[: idx * 2] | |
449 | nodeline.extend([char, b" "]) |
|
450 | nodeline.extend([char, b" "]) | |
450 |
|
451 | |||
451 | nodeline.extend( |
|
452 | nodeline.extend( | |
452 | _getnodelineedgestail( |
|
453 | _getnodelineedgestail( | |
453 | echars, |
|
454 | echars, | |
454 | idx, |
|
455 | idx, | |
455 |
state |
|
456 | state.lastindex, | |
456 | ncols, |
|
457 | ncols, | |
457 | coldiff, |
|
458 | coldiff, | |
458 |
state |
|
459 | state.lastcoldiff, | |
459 | fix_nodeline_tail, |
|
460 | fix_nodeline_tail, | |
460 | ) |
|
461 | ) | |
461 | ) |
|
462 | ) | |
462 |
|
463 | |||
463 | # shift_interline is the line containing the non-vertical |
|
464 | # shift_interline is the line containing the non-vertical | |
464 | # edges between this entry and the next |
|
465 | # edges between this entry and the next | |
465 | shift_interline = echars[: idx * 2] |
|
466 | shift_interline = echars[: idx * 2] | |
466 | for i in pycompat.xrange(2 + coldiff): |
|
467 | for i in pycompat.xrange(2 + coldiff): | |
467 | shift_interline.append(b' ') |
|
468 | shift_interline.append(b' ') | |
468 | count = ncols - idx - 1 |
|
469 | count = ncols - idx - 1 | |
469 | if coldiff == -1: |
|
470 | if coldiff == -1: | |
470 | for i in pycompat.xrange(count): |
|
471 | for i in pycompat.xrange(count): | |
471 | shift_interline.extend([b'/', b' ']) |
|
472 | shift_interline.extend([b'/', b' ']) | |
472 | elif coldiff == 0: |
|
473 | elif coldiff == 0: | |
473 | shift_interline.extend(echars[(idx + 1) * 2 : ncols * 2]) |
|
474 | shift_interline.extend(echars[(idx + 1) * 2 : ncols * 2]) | |
474 | else: |
|
475 | else: | |
475 | for i in pycompat.xrange(count): |
|
476 | for i in pycompat.xrange(count): | |
476 | shift_interline.extend([b'\\', b' ']) |
|
477 | shift_interline.extend([b'\\', b' ']) | |
477 |
|
478 | |||
478 | # draw edges from the current node to its parents |
|
479 | # draw edges from the current node to its parents | |
479 | _drawedges(echars, edges, nodeline, shift_interline) |
|
480 | _drawedges(echars, edges, nodeline, shift_interline) | |
480 |
|
481 | |||
481 | # lines is the list of all graph lines to print |
|
482 | # lines is the list of all graph lines to print | |
482 | lines = [nodeline] |
|
483 | lines = [nodeline] | |
483 | if add_padding_line: |
|
484 | if add_padding_line: | |
484 | lines.append(_getpaddingline(echars, idx, ncols, edges)) |
|
485 | lines.append(_getpaddingline(echars, idx, ncols, edges)) | |
485 |
|
486 | |||
486 | # If 'graphshorten' config, only draw shift_interline |
|
487 | # If 'graphshorten' config, only draw shift_interline | |
487 | # when there is any non vertical flow in graph. |
|
488 | # when there is any non vertical flow in graph. | |
488 |
if state |
|
489 | if state.graphshorten: | |
489 | if any(c in br'\/' for c in shift_interline if c): |
|
490 | if any(c in br'\/' for c in shift_interline if c): | |
490 | lines.append(shift_interline) |
|
491 | lines.append(shift_interline) | |
491 | # Else, no 'graphshorten' config so draw shift_interline. |
|
492 | # Else, no 'graphshorten' config so draw shift_interline. | |
492 | else: |
|
493 | else: | |
493 | lines.append(shift_interline) |
|
494 | lines.append(shift_interline) | |
494 |
|
495 | |||
495 | # make sure that there are as many graph lines as there are |
|
496 | # make sure that there are as many graph lines as there are | |
496 | # log strings |
|
497 | # log strings | |
497 | extra_interline = echars[: (ncols + coldiff) * 2] |
|
498 | extra_interline = echars[: (ncols + coldiff) * 2] | |
498 | if len(lines) < len(text): |
|
499 | if len(lines) < len(text): | |
499 | while len(lines) < len(text): |
|
500 | while len(lines) < len(text): | |
500 | lines.append(extra_interline[:]) |
|
501 | lines.append(extra_interline[:]) | |
501 |
|
502 | |||
502 | _drawendinglines(lines, extra_interline, edgemap, seen, state) |
|
503 | _drawendinglines(lines, extra_interline, edgemap, seen, state) | |
503 |
|
504 | |||
504 | while len(text) < len(lines): |
|
505 | while len(text) < len(lines): | |
505 | text.append(b"") |
|
506 | text.append(b"") | |
506 |
|
507 | |||
507 | # print lines |
|
508 | # print lines | |
508 | indentation_level = max(ncols, ncols + coldiff) |
|
509 | indentation_level = max(ncols, ncols + coldiff) | |
509 | lines = [ |
|
510 | lines = [ | |
510 | b"%-*s " % (2 * indentation_level, b"".join(line)) for line in lines |
|
511 | b"%-*s " % (2 * indentation_level, b"".join(line)) for line in lines | |
511 | ] |
|
512 | ] | |
512 | outputgraph(ui, zip(lines, text)) |
|
513 | outputgraph(ui, zip(lines, text)) | |
513 |
|
514 | |||
514 | # ... and start over |
|
515 | # ... and start over | |
515 |
state |
|
516 | state.lastcoldiff = coldiff | |
516 |
state |
|
517 | state.lastindex = idx |
@@ -1,1085 +1,1085 | |||||
1 | # logcmdutil.py - utility for log-like commands |
|
1 | # logcmdutil.py - utility for log-like commands | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import itertools |
|
10 | import itertools | |
11 | import os |
|
11 | import os | |
12 | import posixpath |
|
12 | import posixpath | |
13 |
|
13 | |||
14 | from .i18n import _ |
|
14 | from .i18n import _ | |
15 | from .node import ( |
|
15 | from .node import ( | |
16 | nullid, |
|
16 | nullid, | |
17 | wdirid, |
|
17 | wdirid, | |
18 | wdirrev, |
|
18 | wdirrev, | |
19 | ) |
|
19 | ) | |
20 |
|
20 | |||
21 | from . import ( |
|
21 | from . import ( | |
22 | dagop, |
|
22 | dagop, | |
23 | error, |
|
23 | error, | |
24 | formatter, |
|
24 | formatter, | |
25 | graphmod, |
|
25 | graphmod, | |
26 | match as matchmod, |
|
26 | match as matchmod, | |
27 | mdiff, |
|
27 | mdiff, | |
28 | patch, |
|
28 | patch, | |
29 | pathutil, |
|
29 | pathutil, | |
30 | pycompat, |
|
30 | pycompat, | |
31 | revset, |
|
31 | revset, | |
32 | revsetlang, |
|
32 | revsetlang, | |
33 | scmutil, |
|
33 | scmutil, | |
34 | smartset, |
|
34 | smartset, | |
35 | templatekw, |
|
35 | templatekw, | |
36 | templater, |
|
36 | templater, | |
37 | util, |
|
37 | util, | |
38 | ) |
|
38 | ) | |
39 | from .utils import ( |
|
39 | from .utils import ( | |
40 | dateutil, |
|
40 | dateutil, | |
41 | stringutil, |
|
41 | stringutil, | |
42 | ) |
|
42 | ) | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | if pycompat.TYPE_CHECKING: |
|
45 | if pycompat.TYPE_CHECKING: | |
46 | from typing import ( |
|
46 | from typing import ( | |
47 | Any, |
|
47 | Any, | |
48 | Optional, |
|
48 | Optional, | |
49 | Tuple, |
|
49 | Tuple, | |
50 | ) |
|
50 | ) | |
51 |
|
51 | |||
52 | for t in (Any, Optional, Tuple): |
|
52 | for t in (Any, Optional, Tuple): | |
53 | assert t |
|
53 | assert t | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | def getlimit(opts): |
|
56 | def getlimit(opts): | |
57 | """get the log limit according to option -l/--limit""" |
|
57 | """get the log limit according to option -l/--limit""" | |
58 | limit = opts.get(b'limit') |
|
58 | limit = opts.get(b'limit') | |
59 | if limit: |
|
59 | if limit: | |
60 | try: |
|
60 | try: | |
61 | limit = int(limit) |
|
61 | limit = int(limit) | |
62 | except ValueError: |
|
62 | except ValueError: | |
63 | raise error.Abort(_(b'limit must be a positive integer')) |
|
63 | raise error.Abort(_(b'limit must be a positive integer')) | |
64 | if limit <= 0: |
|
64 | if limit <= 0: | |
65 | raise error.Abort(_(b'limit must be positive')) |
|
65 | raise error.Abort(_(b'limit must be positive')) | |
66 | else: |
|
66 | else: | |
67 | limit = None |
|
67 | limit = None | |
68 | return limit |
|
68 | return limit | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | def diffordiffstat( |
|
71 | def diffordiffstat( | |
72 | ui, |
|
72 | ui, | |
73 | repo, |
|
73 | repo, | |
74 | diffopts, |
|
74 | diffopts, | |
75 | node1, |
|
75 | node1, | |
76 | node2, |
|
76 | node2, | |
77 | match, |
|
77 | match, | |
78 | changes=None, |
|
78 | changes=None, | |
79 | stat=False, |
|
79 | stat=False, | |
80 | fp=None, |
|
80 | fp=None, | |
81 | graphwidth=0, |
|
81 | graphwidth=0, | |
82 | prefix=b'', |
|
82 | prefix=b'', | |
83 | root=b'', |
|
83 | root=b'', | |
84 | listsubrepos=False, |
|
84 | listsubrepos=False, | |
85 | hunksfilterfn=None, |
|
85 | hunksfilterfn=None, | |
86 | ): |
|
86 | ): | |
87 | '''show diff or diffstat.''' |
|
87 | '''show diff or diffstat.''' | |
88 | ctx1 = repo[node1] |
|
88 | ctx1 = repo[node1] | |
89 | ctx2 = repo[node2] |
|
89 | ctx2 = repo[node2] | |
90 | if root: |
|
90 | if root: | |
91 | relroot = pathutil.canonpath(repo.root, repo.getcwd(), root) |
|
91 | relroot = pathutil.canonpath(repo.root, repo.getcwd(), root) | |
92 | else: |
|
92 | else: | |
93 | relroot = b'' |
|
93 | relroot = b'' | |
94 | copysourcematch = None |
|
94 | copysourcematch = None | |
95 |
|
95 | |||
96 | def compose(f, g): |
|
96 | def compose(f, g): | |
97 | return lambda x: f(g(x)) |
|
97 | return lambda x: f(g(x)) | |
98 |
|
98 | |||
99 | def pathfn(f): |
|
99 | def pathfn(f): | |
100 | return posixpath.join(prefix, f) |
|
100 | return posixpath.join(prefix, f) | |
101 |
|
101 | |||
102 | if relroot != b'': |
|
102 | if relroot != b'': | |
103 | # XXX relative roots currently don't work if the root is within a |
|
103 | # XXX relative roots currently don't work if the root is within a | |
104 | # subrepo |
|
104 | # subrepo | |
105 | uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) |
|
105 | uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True) | |
106 | uirelroot = uipathfn(pathfn(relroot)) |
|
106 | uirelroot = uipathfn(pathfn(relroot)) | |
107 | relroot += b'/' |
|
107 | relroot += b'/' | |
108 | for matchroot in match.files(): |
|
108 | for matchroot in match.files(): | |
109 | if not matchroot.startswith(relroot): |
|
109 | if not matchroot.startswith(relroot): | |
110 | ui.warn( |
|
110 | ui.warn( | |
111 | _(b'warning: %s not inside relative root %s\n') |
|
111 | _(b'warning: %s not inside relative root %s\n') | |
112 | % (uipathfn(pathfn(matchroot)), uirelroot) |
|
112 | % (uipathfn(pathfn(matchroot)), uirelroot) | |
113 | ) |
|
113 | ) | |
114 |
|
114 | |||
115 | relrootmatch = scmutil.match(ctx2, pats=[relroot], default=b'path') |
|
115 | relrootmatch = scmutil.match(ctx2, pats=[relroot], default=b'path') | |
116 | match = matchmod.intersectmatchers(match, relrootmatch) |
|
116 | match = matchmod.intersectmatchers(match, relrootmatch) | |
117 | copysourcematch = relrootmatch |
|
117 | copysourcematch = relrootmatch | |
118 |
|
118 | |||
119 | checkroot = repo.ui.configbool( |
|
119 | checkroot = repo.ui.configbool( | |
120 | b'devel', b'all-warnings' |
|
120 | b'devel', b'all-warnings' | |
121 | ) or repo.ui.configbool(b'devel', b'check-relroot') |
|
121 | ) or repo.ui.configbool(b'devel', b'check-relroot') | |
122 |
|
122 | |||
123 | def relrootpathfn(f): |
|
123 | def relrootpathfn(f): | |
124 | if checkroot and not f.startswith(relroot): |
|
124 | if checkroot and not f.startswith(relroot): | |
125 | raise AssertionError( |
|
125 | raise AssertionError( | |
126 | b"file %s doesn't start with relroot %s" % (f, relroot) |
|
126 | b"file %s doesn't start with relroot %s" % (f, relroot) | |
127 | ) |
|
127 | ) | |
128 | return f[len(relroot) :] |
|
128 | return f[len(relroot) :] | |
129 |
|
129 | |||
130 | pathfn = compose(relrootpathfn, pathfn) |
|
130 | pathfn = compose(relrootpathfn, pathfn) | |
131 |
|
131 | |||
132 | if stat: |
|
132 | if stat: | |
133 | diffopts = diffopts.copy(context=0, noprefix=False) |
|
133 | diffopts = diffopts.copy(context=0, noprefix=False) | |
134 | width = 80 |
|
134 | width = 80 | |
135 | if not ui.plain(): |
|
135 | if not ui.plain(): | |
136 | width = ui.termwidth() - graphwidth |
|
136 | width = ui.termwidth() - graphwidth | |
137 | # If an explicit --root was given, don't respect ui.relative-paths |
|
137 | # If an explicit --root was given, don't respect ui.relative-paths | |
138 | if not relroot: |
|
138 | if not relroot: | |
139 | pathfn = compose(scmutil.getuipathfn(repo), pathfn) |
|
139 | pathfn = compose(scmutil.getuipathfn(repo), pathfn) | |
140 |
|
140 | |||
141 | chunks = ctx2.diff( |
|
141 | chunks = ctx2.diff( | |
142 | ctx1, |
|
142 | ctx1, | |
143 | match, |
|
143 | match, | |
144 | changes, |
|
144 | changes, | |
145 | opts=diffopts, |
|
145 | opts=diffopts, | |
146 | pathfn=pathfn, |
|
146 | pathfn=pathfn, | |
147 | copysourcematch=copysourcematch, |
|
147 | copysourcematch=copysourcematch, | |
148 | hunksfilterfn=hunksfilterfn, |
|
148 | hunksfilterfn=hunksfilterfn, | |
149 | ) |
|
149 | ) | |
150 |
|
150 | |||
151 | if fp is not None or ui.canwritewithoutlabels(): |
|
151 | if fp is not None or ui.canwritewithoutlabels(): | |
152 | out = fp or ui |
|
152 | out = fp or ui | |
153 | if stat: |
|
153 | if stat: | |
154 | chunks = [patch.diffstat(util.iterlines(chunks), width=width)] |
|
154 | chunks = [patch.diffstat(util.iterlines(chunks), width=width)] | |
155 | for chunk in util.filechunkiter(util.chunkbuffer(chunks)): |
|
155 | for chunk in util.filechunkiter(util.chunkbuffer(chunks)): | |
156 | out.write(chunk) |
|
156 | out.write(chunk) | |
157 | else: |
|
157 | else: | |
158 | if stat: |
|
158 | if stat: | |
159 | chunks = patch.diffstatui(util.iterlines(chunks), width=width) |
|
159 | chunks = patch.diffstatui(util.iterlines(chunks), width=width) | |
160 | else: |
|
160 | else: | |
161 | chunks = patch.difflabel( |
|
161 | chunks = patch.difflabel( | |
162 | lambda chunks, **kwargs: chunks, chunks, opts=diffopts |
|
162 | lambda chunks, **kwargs: chunks, chunks, opts=diffopts | |
163 | ) |
|
163 | ) | |
164 | if ui.canbatchlabeledwrites(): |
|
164 | if ui.canbatchlabeledwrites(): | |
165 |
|
165 | |||
166 | def gen(): |
|
166 | def gen(): | |
167 | for chunk, label in chunks: |
|
167 | for chunk, label in chunks: | |
168 | yield ui.label(chunk, label=label) |
|
168 | yield ui.label(chunk, label=label) | |
169 |
|
169 | |||
170 | for chunk in util.filechunkiter(util.chunkbuffer(gen())): |
|
170 | for chunk in util.filechunkiter(util.chunkbuffer(gen())): | |
171 | ui.write(chunk) |
|
171 | ui.write(chunk) | |
172 | else: |
|
172 | else: | |
173 | for chunk, label in chunks: |
|
173 | for chunk, label in chunks: | |
174 | ui.write(chunk, label=label) |
|
174 | ui.write(chunk, label=label) | |
175 |
|
175 | |||
176 | for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): |
|
176 | for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): | |
177 | tempnode2 = node2 |
|
177 | tempnode2 = node2 | |
178 | try: |
|
178 | try: | |
179 | if node2 is not None: |
|
179 | if node2 is not None: | |
180 | tempnode2 = ctx2.substate[subpath][1] |
|
180 | tempnode2 = ctx2.substate[subpath][1] | |
181 | except KeyError: |
|
181 | except KeyError: | |
182 | # A subrepo that existed in node1 was deleted between node1 and |
|
182 | # A subrepo that existed in node1 was deleted between node1 and | |
183 | # node2 (inclusive). Thus, ctx2's substate won't contain that |
|
183 | # node2 (inclusive). Thus, ctx2's substate won't contain that | |
184 | # subpath. The best we can do is to ignore it. |
|
184 | # subpath. The best we can do is to ignore it. | |
185 | tempnode2 = None |
|
185 | tempnode2 = None | |
186 | submatch = matchmod.subdirmatcher(subpath, match) |
|
186 | submatch = matchmod.subdirmatcher(subpath, match) | |
187 | subprefix = repo.wvfs.reljoin(prefix, subpath) |
|
187 | subprefix = repo.wvfs.reljoin(prefix, subpath) | |
188 | if listsubrepos or match.exact(subpath) or any(submatch.files()): |
|
188 | if listsubrepos or match.exact(subpath) or any(submatch.files()): | |
189 | sub.diff( |
|
189 | sub.diff( | |
190 | ui, |
|
190 | ui, | |
191 | diffopts, |
|
191 | diffopts, | |
192 | tempnode2, |
|
192 | tempnode2, | |
193 | submatch, |
|
193 | submatch, | |
194 | changes=changes, |
|
194 | changes=changes, | |
195 | stat=stat, |
|
195 | stat=stat, | |
196 | fp=fp, |
|
196 | fp=fp, | |
197 | prefix=subprefix, |
|
197 | prefix=subprefix, | |
198 | ) |
|
198 | ) | |
199 |
|
199 | |||
200 |
|
200 | |||
201 | class changesetdiffer(object): |
|
201 | class changesetdiffer(object): | |
202 | """Generate diff of changeset with pre-configured filtering functions""" |
|
202 | """Generate diff of changeset with pre-configured filtering functions""" | |
203 |
|
203 | |||
204 | def _makefilematcher(self, ctx): |
|
204 | def _makefilematcher(self, ctx): | |
205 | return scmutil.matchall(ctx.repo()) |
|
205 | return scmutil.matchall(ctx.repo()) | |
206 |
|
206 | |||
207 | def _makehunksfilter(self, ctx): |
|
207 | def _makehunksfilter(self, ctx): | |
208 | return None |
|
208 | return None | |
209 |
|
209 | |||
210 | def showdiff(self, ui, ctx, diffopts, graphwidth=0, stat=False): |
|
210 | def showdiff(self, ui, ctx, diffopts, graphwidth=0, stat=False): | |
211 | repo = ctx.repo() |
|
211 | repo = ctx.repo() | |
212 | node = ctx.node() |
|
212 | node = ctx.node() | |
213 | prev = ctx.p1().node() |
|
213 | prev = ctx.p1().node() | |
214 | diffordiffstat( |
|
214 | diffordiffstat( | |
215 | ui, |
|
215 | ui, | |
216 | repo, |
|
216 | repo, | |
217 | diffopts, |
|
217 | diffopts, | |
218 | prev, |
|
218 | prev, | |
219 | node, |
|
219 | node, | |
220 | match=self._makefilematcher(ctx), |
|
220 | match=self._makefilematcher(ctx), | |
221 | stat=stat, |
|
221 | stat=stat, | |
222 | graphwidth=graphwidth, |
|
222 | graphwidth=graphwidth, | |
223 | hunksfilterfn=self._makehunksfilter(ctx), |
|
223 | hunksfilterfn=self._makehunksfilter(ctx), | |
224 | ) |
|
224 | ) | |
225 |
|
225 | |||
226 |
|
226 | |||
227 | def changesetlabels(ctx): |
|
227 | def changesetlabels(ctx): | |
228 | labels = [b'log.changeset', b'changeset.%s' % ctx.phasestr()] |
|
228 | labels = [b'log.changeset', b'changeset.%s' % ctx.phasestr()] | |
229 | if ctx.obsolete(): |
|
229 | if ctx.obsolete(): | |
230 | labels.append(b'changeset.obsolete') |
|
230 | labels.append(b'changeset.obsolete') | |
231 | if ctx.isunstable(): |
|
231 | if ctx.isunstable(): | |
232 | labels.append(b'changeset.unstable') |
|
232 | labels.append(b'changeset.unstable') | |
233 | for instability in ctx.instabilities(): |
|
233 | for instability in ctx.instabilities(): | |
234 | labels.append(b'instability.%s' % instability) |
|
234 | labels.append(b'instability.%s' % instability) | |
235 | return b' '.join(labels) |
|
235 | return b' '.join(labels) | |
236 |
|
236 | |||
237 |
|
237 | |||
238 | class changesetprinter(object): |
|
238 | class changesetprinter(object): | |
239 | '''show changeset information when templating not requested.''' |
|
239 | '''show changeset information when templating not requested.''' | |
240 |
|
240 | |||
241 | def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False): |
|
241 | def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False): | |
242 | self.ui = ui |
|
242 | self.ui = ui | |
243 | self.repo = repo |
|
243 | self.repo = repo | |
244 | self.buffered = buffered |
|
244 | self.buffered = buffered | |
245 | self._differ = differ or changesetdiffer() |
|
245 | self._differ = differ or changesetdiffer() | |
246 | self._diffopts = patch.diffallopts(ui, diffopts) |
|
246 | self._diffopts = patch.diffallopts(ui, diffopts) | |
247 | self._includestat = diffopts and diffopts.get(b'stat') |
|
247 | self._includestat = diffopts and diffopts.get(b'stat') | |
248 | self._includediff = diffopts and diffopts.get(b'patch') |
|
248 | self._includediff = diffopts and diffopts.get(b'patch') | |
249 | self.header = {} |
|
249 | self.header = {} | |
250 | self.hunk = {} |
|
250 | self.hunk = {} | |
251 | self.lastheader = None |
|
251 | self.lastheader = None | |
252 | self.footer = None |
|
252 | self.footer = None | |
253 | self._columns = templatekw.getlogcolumns() |
|
253 | self._columns = templatekw.getlogcolumns() | |
254 |
|
254 | |||
255 | def flush(self, ctx): |
|
255 | def flush(self, ctx): | |
256 | rev = ctx.rev() |
|
256 | rev = ctx.rev() | |
257 | if rev in self.header: |
|
257 | if rev in self.header: | |
258 | h = self.header[rev] |
|
258 | h = self.header[rev] | |
259 | if h != self.lastheader: |
|
259 | if h != self.lastheader: | |
260 | self.lastheader = h |
|
260 | self.lastheader = h | |
261 | self.ui.write(h) |
|
261 | self.ui.write(h) | |
262 | del self.header[rev] |
|
262 | del self.header[rev] | |
263 | if rev in self.hunk: |
|
263 | if rev in self.hunk: | |
264 | self.ui.write(self.hunk[rev]) |
|
264 | self.ui.write(self.hunk[rev]) | |
265 | del self.hunk[rev] |
|
265 | del self.hunk[rev] | |
266 |
|
266 | |||
267 | def close(self): |
|
267 | def close(self): | |
268 | if self.footer: |
|
268 | if self.footer: | |
269 | self.ui.write(self.footer) |
|
269 | self.ui.write(self.footer) | |
270 |
|
270 | |||
271 | def show(self, ctx, copies=None, **props): |
|
271 | def show(self, ctx, copies=None, **props): | |
272 | props = pycompat.byteskwargs(props) |
|
272 | props = pycompat.byteskwargs(props) | |
273 | if self.buffered: |
|
273 | if self.buffered: | |
274 | self.ui.pushbuffer(labeled=True) |
|
274 | self.ui.pushbuffer(labeled=True) | |
275 | self._show(ctx, copies, props) |
|
275 | self._show(ctx, copies, props) | |
276 | self.hunk[ctx.rev()] = self.ui.popbuffer() |
|
276 | self.hunk[ctx.rev()] = self.ui.popbuffer() | |
277 | else: |
|
277 | else: | |
278 | self._show(ctx, copies, props) |
|
278 | self._show(ctx, copies, props) | |
279 |
|
279 | |||
280 | def _show(self, ctx, copies, props): |
|
280 | def _show(self, ctx, copies, props): | |
281 | '''show a single changeset or file revision''' |
|
281 | '''show a single changeset or file revision''' | |
282 | changenode = ctx.node() |
|
282 | changenode = ctx.node() | |
283 | graphwidth = props.get(b'graphwidth', 0) |
|
283 | graphwidth = props.get(b'graphwidth', 0) | |
284 |
|
284 | |||
285 | if self.ui.quiet: |
|
285 | if self.ui.quiet: | |
286 | self.ui.write( |
|
286 | self.ui.write( | |
287 | b"%s\n" % scmutil.formatchangeid(ctx), label=b'log.node' |
|
287 | b"%s\n" % scmutil.formatchangeid(ctx), label=b'log.node' | |
288 | ) |
|
288 | ) | |
289 | return |
|
289 | return | |
290 |
|
290 | |||
291 | columns = self._columns |
|
291 | columns = self._columns | |
292 | self.ui.write( |
|
292 | self.ui.write( | |
293 | columns[b'changeset'] % scmutil.formatchangeid(ctx), |
|
293 | columns[b'changeset'] % scmutil.formatchangeid(ctx), | |
294 | label=changesetlabels(ctx), |
|
294 | label=changesetlabels(ctx), | |
295 | ) |
|
295 | ) | |
296 |
|
296 | |||
297 | # branches are shown first before any other names due to backwards |
|
297 | # branches are shown first before any other names due to backwards | |
298 | # compatibility |
|
298 | # compatibility | |
299 | branch = ctx.branch() |
|
299 | branch = ctx.branch() | |
300 | # don't show the default branch name |
|
300 | # don't show the default branch name | |
301 | if branch != b'default': |
|
301 | if branch != b'default': | |
302 | self.ui.write(columns[b'branch'] % branch, label=b'log.branch') |
|
302 | self.ui.write(columns[b'branch'] % branch, label=b'log.branch') | |
303 |
|
303 | |||
304 | for nsname, ns in pycompat.iteritems(self.repo.names): |
|
304 | for nsname, ns in pycompat.iteritems(self.repo.names): | |
305 | # branches has special logic already handled above, so here we just |
|
305 | # branches has special logic already handled above, so here we just | |
306 | # skip it |
|
306 | # skip it | |
307 | if nsname == b'branches': |
|
307 | if nsname == b'branches': | |
308 | continue |
|
308 | continue | |
309 | # we will use the templatename as the color name since those two |
|
309 | # we will use the templatename as the color name since those two | |
310 | # should be the same |
|
310 | # should be the same | |
311 | for name in ns.names(self.repo, changenode): |
|
311 | for name in ns.names(self.repo, changenode): | |
312 | self.ui.write(ns.logfmt % name, label=b'log.%s' % ns.colorname) |
|
312 | self.ui.write(ns.logfmt % name, label=b'log.%s' % ns.colorname) | |
313 | if self.ui.debugflag: |
|
313 | if self.ui.debugflag: | |
314 | self.ui.write( |
|
314 | self.ui.write( | |
315 | columns[b'phase'] % ctx.phasestr(), label=b'log.phase' |
|
315 | columns[b'phase'] % ctx.phasestr(), label=b'log.phase' | |
316 | ) |
|
316 | ) | |
317 | for pctx in scmutil.meaningfulparents(self.repo, ctx): |
|
317 | for pctx in scmutil.meaningfulparents(self.repo, ctx): | |
318 | label = b'log.parent changeset.%s' % pctx.phasestr() |
|
318 | label = b'log.parent changeset.%s' % pctx.phasestr() | |
319 | self.ui.write( |
|
319 | self.ui.write( | |
320 | columns[b'parent'] % scmutil.formatchangeid(pctx), label=label |
|
320 | columns[b'parent'] % scmutil.formatchangeid(pctx), label=label | |
321 | ) |
|
321 | ) | |
322 |
|
322 | |||
323 | if self.ui.debugflag: |
|
323 | if self.ui.debugflag: | |
324 | mnode = ctx.manifestnode() |
|
324 | mnode = ctx.manifestnode() | |
325 | if mnode is None: |
|
325 | if mnode is None: | |
326 | mnode = wdirid |
|
326 | mnode = wdirid | |
327 | mrev = wdirrev |
|
327 | mrev = wdirrev | |
328 | else: |
|
328 | else: | |
329 | mrev = self.repo.manifestlog.rev(mnode) |
|
329 | mrev = self.repo.manifestlog.rev(mnode) | |
330 | self.ui.write( |
|
330 | self.ui.write( | |
331 | columns[b'manifest'] |
|
331 | columns[b'manifest'] | |
332 | % scmutil.formatrevnode(self.ui, mrev, mnode), |
|
332 | % scmutil.formatrevnode(self.ui, mrev, mnode), | |
333 | label=b'ui.debug log.manifest', |
|
333 | label=b'ui.debug log.manifest', | |
334 | ) |
|
334 | ) | |
335 | self.ui.write(columns[b'user'] % ctx.user(), label=b'log.user') |
|
335 | self.ui.write(columns[b'user'] % ctx.user(), label=b'log.user') | |
336 | self.ui.write( |
|
336 | self.ui.write( | |
337 | columns[b'date'] % dateutil.datestr(ctx.date()), label=b'log.date' |
|
337 | columns[b'date'] % dateutil.datestr(ctx.date()), label=b'log.date' | |
338 | ) |
|
338 | ) | |
339 |
|
339 | |||
340 | if ctx.isunstable(): |
|
340 | if ctx.isunstable(): | |
341 | instabilities = ctx.instabilities() |
|
341 | instabilities = ctx.instabilities() | |
342 | self.ui.write( |
|
342 | self.ui.write( | |
343 | columns[b'instability'] % b', '.join(instabilities), |
|
343 | columns[b'instability'] % b', '.join(instabilities), | |
344 | label=b'log.instability', |
|
344 | label=b'log.instability', | |
345 | ) |
|
345 | ) | |
346 |
|
346 | |||
347 | elif ctx.obsolete(): |
|
347 | elif ctx.obsolete(): | |
348 | self._showobsfate(ctx) |
|
348 | self._showobsfate(ctx) | |
349 |
|
349 | |||
350 | self._exthook(ctx) |
|
350 | self._exthook(ctx) | |
351 |
|
351 | |||
352 | if self.ui.debugflag: |
|
352 | if self.ui.debugflag: | |
353 | files = ctx.p1().status(ctx) |
|
353 | files = ctx.p1().status(ctx) | |
354 | for key, value in zip( |
|
354 | for key, value in zip( | |
355 | [b'files', b'files+', b'files-'], |
|
355 | [b'files', b'files+', b'files-'], | |
356 | [files.modified, files.added, files.removed], |
|
356 | [files.modified, files.added, files.removed], | |
357 | ): |
|
357 | ): | |
358 | if value: |
|
358 | if value: | |
359 | self.ui.write( |
|
359 | self.ui.write( | |
360 | columns[key] % b" ".join(value), |
|
360 | columns[key] % b" ".join(value), | |
361 | label=b'ui.debug log.files', |
|
361 | label=b'ui.debug log.files', | |
362 | ) |
|
362 | ) | |
363 | elif ctx.files() and self.ui.verbose: |
|
363 | elif ctx.files() and self.ui.verbose: | |
364 | self.ui.write( |
|
364 | self.ui.write( | |
365 | columns[b'files'] % b" ".join(ctx.files()), |
|
365 | columns[b'files'] % b" ".join(ctx.files()), | |
366 | label=b'ui.note log.files', |
|
366 | label=b'ui.note log.files', | |
367 | ) |
|
367 | ) | |
368 | if copies and self.ui.verbose: |
|
368 | if copies and self.ui.verbose: | |
369 | copies = [b'%s (%s)' % c for c in copies] |
|
369 | copies = [b'%s (%s)' % c for c in copies] | |
370 | self.ui.write( |
|
370 | self.ui.write( | |
371 | columns[b'copies'] % b' '.join(copies), |
|
371 | columns[b'copies'] % b' '.join(copies), | |
372 | label=b'ui.note log.copies', |
|
372 | label=b'ui.note log.copies', | |
373 | ) |
|
373 | ) | |
374 |
|
374 | |||
375 | extra = ctx.extra() |
|
375 | extra = ctx.extra() | |
376 | if extra and self.ui.debugflag: |
|
376 | if extra and self.ui.debugflag: | |
377 | for key, value in sorted(extra.items()): |
|
377 | for key, value in sorted(extra.items()): | |
378 | self.ui.write( |
|
378 | self.ui.write( | |
379 | columns[b'extra'] % (key, stringutil.escapestr(value)), |
|
379 | columns[b'extra'] % (key, stringutil.escapestr(value)), | |
380 | label=b'ui.debug log.extra', |
|
380 | label=b'ui.debug log.extra', | |
381 | ) |
|
381 | ) | |
382 |
|
382 | |||
383 | description = ctx.description().strip() |
|
383 | description = ctx.description().strip() | |
384 | if description: |
|
384 | if description: | |
385 | if self.ui.verbose: |
|
385 | if self.ui.verbose: | |
386 | self.ui.write( |
|
386 | self.ui.write( | |
387 | _(b"description:\n"), label=b'ui.note log.description' |
|
387 | _(b"description:\n"), label=b'ui.note log.description' | |
388 | ) |
|
388 | ) | |
389 | self.ui.write(description, label=b'ui.note log.description') |
|
389 | self.ui.write(description, label=b'ui.note log.description') | |
390 | self.ui.write(b"\n\n") |
|
390 | self.ui.write(b"\n\n") | |
391 | else: |
|
391 | else: | |
392 | self.ui.write( |
|
392 | self.ui.write( | |
393 | columns[b'summary'] % description.splitlines()[0], |
|
393 | columns[b'summary'] % description.splitlines()[0], | |
394 | label=b'log.summary', |
|
394 | label=b'log.summary', | |
395 | ) |
|
395 | ) | |
396 | self.ui.write(b"\n") |
|
396 | self.ui.write(b"\n") | |
397 |
|
397 | |||
398 | self._showpatch(ctx, graphwidth) |
|
398 | self._showpatch(ctx, graphwidth) | |
399 |
|
399 | |||
400 | def _showobsfate(self, ctx): |
|
400 | def _showobsfate(self, ctx): | |
401 | # TODO: do not depend on templater |
|
401 | # TODO: do not depend on templater | |
402 | tres = formatter.templateresources(self.repo.ui, self.repo) |
|
402 | tres = formatter.templateresources(self.repo.ui, self.repo) | |
403 | t = formatter.maketemplater( |
|
403 | t = formatter.maketemplater( | |
404 | self.repo.ui, |
|
404 | self.repo.ui, | |
405 | b'{join(obsfate, "\n")}', |
|
405 | b'{join(obsfate, "\n")}', | |
406 | defaults=templatekw.keywords, |
|
406 | defaults=templatekw.keywords, | |
407 | resources=tres, |
|
407 | resources=tres, | |
408 | ) |
|
408 | ) | |
409 | obsfate = t.renderdefault({b'ctx': ctx}).splitlines() |
|
409 | obsfate = t.renderdefault({b'ctx': ctx}).splitlines() | |
410 |
|
410 | |||
411 | if obsfate: |
|
411 | if obsfate: | |
412 | for obsfateline in obsfate: |
|
412 | for obsfateline in obsfate: | |
413 | self.ui.write( |
|
413 | self.ui.write( | |
414 | self._columns[b'obsolete'] % obsfateline, |
|
414 | self._columns[b'obsolete'] % obsfateline, | |
415 | label=b'log.obsfate', |
|
415 | label=b'log.obsfate', | |
416 | ) |
|
416 | ) | |
417 |
|
417 | |||
418 | def _exthook(self, ctx): |
|
418 | def _exthook(self, ctx): | |
419 | '''empty method used by extension as a hook point |
|
419 | '''empty method used by extension as a hook point | |
420 | ''' |
|
420 | ''' | |
421 |
|
421 | |||
422 | def _showpatch(self, ctx, graphwidth=0): |
|
422 | def _showpatch(self, ctx, graphwidth=0): | |
423 | if self._includestat: |
|
423 | if self._includestat: | |
424 | self._differ.showdiff( |
|
424 | self._differ.showdiff( | |
425 | self.ui, ctx, self._diffopts, graphwidth, stat=True |
|
425 | self.ui, ctx, self._diffopts, graphwidth, stat=True | |
426 | ) |
|
426 | ) | |
427 | if self._includestat and self._includediff: |
|
427 | if self._includestat and self._includediff: | |
428 | self.ui.write(b"\n") |
|
428 | self.ui.write(b"\n") | |
429 | if self._includediff: |
|
429 | if self._includediff: | |
430 | self._differ.showdiff( |
|
430 | self._differ.showdiff( | |
431 | self.ui, ctx, self._diffopts, graphwidth, stat=False |
|
431 | self.ui, ctx, self._diffopts, graphwidth, stat=False | |
432 | ) |
|
432 | ) | |
433 | if self._includestat or self._includediff: |
|
433 | if self._includestat or self._includediff: | |
434 | self.ui.write(b"\n") |
|
434 | self.ui.write(b"\n") | |
435 |
|
435 | |||
436 |
|
436 | |||
437 | class changesetformatter(changesetprinter): |
|
437 | class changesetformatter(changesetprinter): | |
438 | """Format changeset information by generic formatter""" |
|
438 | """Format changeset information by generic formatter""" | |
439 |
|
439 | |||
440 | def __init__( |
|
440 | def __init__( | |
441 | self, ui, repo, fm, differ=None, diffopts=None, buffered=False |
|
441 | self, ui, repo, fm, differ=None, diffopts=None, buffered=False | |
442 | ): |
|
442 | ): | |
443 | changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered) |
|
443 | changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered) | |
444 | self._diffopts = patch.difffeatureopts(ui, diffopts, git=True) |
|
444 | self._diffopts = patch.difffeatureopts(ui, diffopts, git=True) | |
445 | self._fm = fm |
|
445 | self._fm = fm | |
446 |
|
446 | |||
447 | def close(self): |
|
447 | def close(self): | |
448 | self._fm.end() |
|
448 | self._fm.end() | |
449 |
|
449 | |||
450 | def _show(self, ctx, copies, props): |
|
450 | def _show(self, ctx, copies, props): | |
451 | '''show a single changeset or file revision''' |
|
451 | '''show a single changeset or file revision''' | |
452 | fm = self._fm |
|
452 | fm = self._fm | |
453 | fm.startitem() |
|
453 | fm.startitem() | |
454 | fm.context(ctx=ctx) |
|
454 | fm.context(ctx=ctx) | |
455 | fm.data(rev=scmutil.intrev(ctx), node=fm.hexfunc(scmutil.binnode(ctx))) |
|
455 | fm.data(rev=scmutil.intrev(ctx), node=fm.hexfunc(scmutil.binnode(ctx))) | |
456 |
|
456 | |||
457 | datahint = fm.datahint() |
|
457 | datahint = fm.datahint() | |
458 | if self.ui.quiet and not datahint: |
|
458 | if self.ui.quiet and not datahint: | |
459 | return |
|
459 | return | |
460 |
|
460 | |||
461 | fm.data( |
|
461 | fm.data( | |
462 | branch=ctx.branch(), |
|
462 | branch=ctx.branch(), | |
463 | phase=ctx.phasestr(), |
|
463 | phase=ctx.phasestr(), | |
464 | user=ctx.user(), |
|
464 | user=ctx.user(), | |
465 | date=fm.formatdate(ctx.date()), |
|
465 | date=fm.formatdate(ctx.date()), | |
466 | desc=ctx.description(), |
|
466 | desc=ctx.description(), | |
467 | bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'), |
|
467 | bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'), | |
468 | tags=fm.formatlist(ctx.tags(), name=b'tag'), |
|
468 | tags=fm.formatlist(ctx.tags(), name=b'tag'), | |
469 | parents=fm.formatlist( |
|
469 | parents=fm.formatlist( | |
470 | [fm.hexfunc(c.node()) for c in ctx.parents()], name=b'node' |
|
470 | [fm.hexfunc(c.node()) for c in ctx.parents()], name=b'node' | |
471 | ), |
|
471 | ), | |
472 | ) |
|
472 | ) | |
473 |
|
473 | |||
474 | if self.ui.debugflag or b'manifest' in datahint: |
|
474 | if self.ui.debugflag or b'manifest' in datahint: | |
475 | fm.data(manifest=fm.hexfunc(ctx.manifestnode() or wdirid)) |
|
475 | fm.data(manifest=fm.hexfunc(ctx.manifestnode() or wdirid)) | |
476 | if self.ui.debugflag or b'extra' in datahint: |
|
476 | if self.ui.debugflag or b'extra' in datahint: | |
477 | fm.data(extra=fm.formatdict(ctx.extra())) |
|
477 | fm.data(extra=fm.formatdict(ctx.extra())) | |
478 |
|
478 | |||
479 | if ( |
|
479 | if ( | |
480 | self.ui.debugflag |
|
480 | self.ui.debugflag | |
481 | or b'modified' in datahint |
|
481 | or b'modified' in datahint | |
482 | or b'added' in datahint |
|
482 | or b'added' in datahint | |
483 | or b'removed' in datahint |
|
483 | or b'removed' in datahint | |
484 | ): |
|
484 | ): | |
485 | files = ctx.p1().status(ctx) |
|
485 | files = ctx.p1().status(ctx) | |
486 | fm.data( |
|
486 | fm.data( | |
487 | modified=fm.formatlist(files.modified, name=b'file'), |
|
487 | modified=fm.formatlist(files.modified, name=b'file'), | |
488 | added=fm.formatlist(files.added, name=b'file'), |
|
488 | added=fm.formatlist(files.added, name=b'file'), | |
489 | removed=fm.formatlist(files.removed, name=b'file'), |
|
489 | removed=fm.formatlist(files.removed, name=b'file'), | |
490 | ) |
|
490 | ) | |
491 |
|
491 | |||
492 | verbose = not self.ui.debugflag and self.ui.verbose |
|
492 | verbose = not self.ui.debugflag and self.ui.verbose | |
493 | if verbose or b'files' in datahint: |
|
493 | if verbose or b'files' in datahint: | |
494 | fm.data(files=fm.formatlist(ctx.files(), name=b'file')) |
|
494 | fm.data(files=fm.formatlist(ctx.files(), name=b'file')) | |
495 | if verbose and copies or b'copies' in datahint: |
|
495 | if verbose and copies or b'copies' in datahint: | |
496 | fm.data( |
|
496 | fm.data( | |
497 | copies=fm.formatdict(copies or {}, key=b'name', value=b'source') |
|
497 | copies=fm.formatdict(copies or {}, key=b'name', value=b'source') | |
498 | ) |
|
498 | ) | |
499 |
|
499 | |||
500 | if self._includestat or b'diffstat' in datahint: |
|
500 | if self._includestat or b'diffstat' in datahint: | |
501 | self.ui.pushbuffer() |
|
501 | self.ui.pushbuffer() | |
502 | self._differ.showdiff(self.ui, ctx, self._diffopts, stat=True) |
|
502 | self._differ.showdiff(self.ui, ctx, self._diffopts, stat=True) | |
503 | fm.data(diffstat=self.ui.popbuffer()) |
|
503 | fm.data(diffstat=self.ui.popbuffer()) | |
504 | if self._includediff or b'diff' in datahint: |
|
504 | if self._includediff or b'diff' in datahint: | |
505 | self.ui.pushbuffer() |
|
505 | self.ui.pushbuffer() | |
506 | self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False) |
|
506 | self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False) | |
507 | fm.data(diff=self.ui.popbuffer()) |
|
507 | fm.data(diff=self.ui.popbuffer()) | |
508 |
|
508 | |||
509 |
|
509 | |||
510 | class changesettemplater(changesetprinter): |
|
510 | class changesettemplater(changesetprinter): | |
511 | '''format changeset information. |
|
511 | '''format changeset information. | |
512 |
|
512 | |||
513 | Note: there are a variety of convenience functions to build a |
|
513 | Note: there are a variety of convenience functions to build a | |
514 | changesettemplater for common cases. See functions such as: |
|
514 | changesettemplater for common cases. See functions such as: | |
515 | maketemplater, changesetdisplayer, buildcommittemplate, or other |
|
515 | maketemplater, changesetdisplayer, buildcommittemplate, or other | |
516 | functions that use changesest_templater. |
|
516 | functions that use changesest_templater. | |
517 | ''' |
|
517 | ''' | |
518 |
|
518 | |||
519 | # Arguments before "buffered" used to be positional. Consider not |
|
519 | # Arguments before "buffered" used to be positional. Consider not | |
520 | # adding/removing arguments before "buffered" to not break callers. |
|
520 | # adding/removing arguments before "buffered" to not break callers. | |
521 | def __init__( |
|
521 | def __init__( | |
522 | self, ui, repo, tmplspec, differ=None, diffopts=None, buffered=False |
|
522 | self, ui, repo, tmplspec, differ=None, diffopts=None, buffered=False | |
523 | ): |
|
523 | ): | |
524 | changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered) |
|
524 | changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered) | |
525 | # tres is shared with _graphnodeformatter() |
|
525 | # tres is shared with _graphnodeformatter() | |
526 | self._tresources = tres = formatter.templateresources(ui, repo) |
|
526 | self._tresources = tres = formatter.templateresources(ui, repo) | |
527 | self.t = formatter.loadtemplater( |
|
527 | self.t = formatter.loadtemplater( | |
528 | ui, |
|
528 | ui, | |
529 | tmplspec, |
|
529 | tmplspec, | |
530 | defaults=templatekw.keywords, |
|
530 | defaults=templatekw.keywords, | |
531 | resources=tres, |
|
531 | resources=tres, | |
532 | cache=templatekw.defaulttempl, |
|
532 | cache=templatekw.defaulttempl, | |
533 | ) |
|
533 | ) | |
534 | self._counter = itertools.count() |
|
534 | self._counter = itertools.count() | |
535 |
|
535 | |||
536 | self._tref = tmplspec.ref |
|
536 | self._tref = tmplspec.ref | |
537 | self._parts = { |
|
537 | self._parts = { | |
538 | b'header': b'', |
|
538 | b'header': b'', | |
539 | b'footer': b'', |
|
539 | b'footer': b'', | |
540 | tmplspec.ref: tmplspec.ref, |
|
540 | tmplspec.ref: tmplspec.ref, | |
541 | b'docheader': b'', |
|
541 | b'docheader': b'', | |
542 | b'docfooter': b'', |
|
542 | b'docfooter': b'', | |
543 | b'separator': b'', |
|
543 | b'separator': b'', | |
544 | } |
|
544 | } | |
545 | if tmplspec.mapfile: |
|
545 | if tmplspec.mapfile: | |
546 | # find correct templates for current mode, for backward |
|
546 | # find correct templates for current mode, for backward | |
547 | # compatibility with 'log -v/-q/--debug' using a mapfile |
|
547 | # compatibility with 'log -v/-q/--debug' using a mapfile | |
548 | tmplmodes = [ |
|
548 | tmplmodes = [ | |
549 | (True, b''), |
|
549 | (True, b''), | |
550 | (self.ui.verbose, b'_verbose'), |
|
550 | (self.ui.verbose, b'_verbose'), | |
551 | (self.ui.quiet, b'_quiet'), |
|
551 | (self.ui.quiet, b'_quiet'), | |
552 | (self.ui.debugflag, b'_debug'), |
|
552 | (self.ui.debugflag, b'_debug'), | |
553 | ] |
|
553 | ] | |
554 | for mode, postfix in tmplmodes: |
|
554 | for mode, postfix in tmplmodes: | |
555 | for t in self._parts: |
|
555 | for t in self._parts: | |
556 | cur = t + postfix |
|
556 | cur = t + postfix | |
557 | if mode and cur in self.t: |
|
557 | if mode and cur in self.t: | |
558 | self._parts[t] = cur |
|
558 | self._parts[t] = cur | |
559 | else: |
|
559 | else: | |
560 | partnames = [p for p in self._parts.keys() if p != tmplspec.ref] |
|
560 | partnames = [p for p in self._parts.keys() if p != tmplspec.ref] | |
561 | m = formatter.templatepartsmap(tmplspec, self.t, partnames) |
|
561 | m = formatter.templatepartsmap(tmplspec, self.t, partnames) | |
562 | self._parts.update(m) |
|
562 | self._parts.update(m) | |
563 |
|
563 | |||
564 | if self._parts[b'docheader']: |
|
564 | if self._parts[b'docheader']: | |
565 | self.ui.write(self.t.render(self._parts[b'docheader'], {})) |
|
565 | self.ui.write(self.t.render(self._parts[b'docheader'], {})) | |
566 |
|
566 | |||
567 | def close(self): |
|
567 | def close(self): | |
568 | if self._parts[b'docfooter']: |
|
568 | if self._parts[b'docfooter']: | |
569 | if not self.footer: |
|
569 | if not self.footer: | |
570 | self.footer = b"" |
|
570 | self.footer = b"" | |
571 | self.footer += self.t.render(self._parts[b'docfooter'], {}) |
|
571 | self.footer += self.t.render(self._parts[b'docfooter'], {}) | |
572 | return super(changesettemplater, self).close() |
|
572 | return super(changesettemplater, self).close() | |
573 |
|
573 | |||
574 | def _show(self, ctx, copies, props): |
|
574 | def _show(self, ctx, copies, props): | |
575 | '''show a single changeset or file revision''' |
|
575 | '''show a single changeset or file revision''' | |
576 | props = props.copy() |
|
576 | props = props.copy() | |
577 | props[b'ctx'] = ctx |
|
577 | props[b'ctx'] = ctx | |
578 | props[b'index'] = index = next(self._counter) |
|
578 | props[b'index'] = index = next(self._counter) | |
579 | props[b'revcache'] = {b'copies': copies} |
|
579 | props[b'revcache'] = {b'copies': copies} | |
580 | graphwidth = props.get(b'graphwidth', 0) |
|
580 | graphwidth = props.get(b'graphwidth', 0) | |
581 |
|
581 | |||
582 | # write separator, which wouldn't work well with the header part below |
|
582 | # write separator, which wouldn't work well with the header part below | |
583 | # since there's inherently a conflict between header (across items) and |
|
583 | # since there's inherently a conflict between header (across items) and | |
584 | # separator (per item) |
|
584 | # separator (per item) | |
585 | if self._parts[b'separator'] and index > 0: |
|
585 | if self._parts[b'separator'] and index > 0: | |
586 | self.ui.write(self.t.render(self._parts[b'separator'], {})) |
|
586 | self.ui.write(self.t.render(self._parts[b'separator'], {})) | |
587 |
|
587 | |||
588 | # write header |
|
588 | # write header | |
589 | if self._parts[b'header']: |
|
589 | if self._parts[b'header']: | |
590 | h = self.t.render(self._parts[b'header'], props) |
|
590 | h = self.t.render(self._parts[b'header'], props) | |
591 | if self.buffered: |
|
591 | if self.buffered: | |
592 | self.header[ctx.rev()] = h |
|
592 | self.header[ctx.rev()] = h | |
593 | else: |
|
593 | else: | |
594 | if self.lastheader != h: |
|
594 | if self.lastheader != h: | |
595 | self.lastheader = h |
|
595 | self.lastheader = h | |
596 | self.ui.write(h) |
|
596 | self.ui.write(h) | |
597 |
|
597 | |||
598 | # write changeset metadata, then patch if requested |
|
598 | # write changeset metadata, then patch if requested | |
599 | key = self._parts[self._tref] |
|
599 | key = self._parts[self._tref] | |
600 | self.ui.write(self.t.render(key, props)) |
|
600 | self.ui.write(self.t.render(key, props)) | |
601 | self._showpatch(ctx, graphwidth) |
|
601 | self._showpatch(ctx, graphwidth) | |
602 |
|
602 | |||
603 | if self._parts[b'footer']: |
|
603 | if self._parts[b'footer']: | |
604 | if not self.footer: |
|
604 | if not self.footer: | |
605 | self.footer = self.t.render(self._parts[b'footer'], props) |
|
605 | self.footer = self.t.render(self._parts[b'footer'], props) | |
606 |
|
606 | |||
607 |
|
607 | |||
608 | def templatespec(tmpl, mapfile): |
|
608 | def templatespec(tmpl, mapfile): | |
609 | if pycompat.ispy3: |
|
609 | if pycompat.ispy3: | |
610 | assert not isinstance(tmpl, str), b'tmpl must not be a str' |
|
610 | assert not isinstance(tmpl, str), b'tmpl must not be a str' | |
611 | if mapfile: |
|
611 | if mapfile: | |
612 | return formatter.templatespec(b'changeset', tmpl, mapfile) |
|
612 | return formatter.templatespec(b'changeset', tmpl, mapfile) | |
613 | else: |
|
613 | else: | |
614 | return formatter.templatespec(b'', tmpl, None) |
|
614 | return formatter.templatespec(b'', tmpl, None) | |
615 |
|
615 | |||
616 |
|
616 | |||
617 | def _lookuptemplate(ui, tmpl, style): |
|
617 | def _lookuptemplate(ui, tmpl, style): | |
618 | """Find the template matching the given template spec or style |
|
618 | """Find the template matching the given template spec or style | |
619 |
|
619 | |||
620 | See formatter.lookuptemplate() for details. |
|
620 | See formatter.lookuptemplate() for details. | |
621 | """ |
|
621 | """ | |
622 |
|
622 | |||
623 | # ui settings |
|
623 | # ui settings | |
624 | if not tmpl and not style: # template are stronger than style |
|
624 | if not tmpl and not style: # template are stronger than style | |
625 | tmpl = ui.config(b'ui', b'logtemplate') |
|
625 | tmpl = ui.config(b'ui', b'logtemplate') | |
626 | if tmpl: |
|
626 | if tmpl: | |
627 | return templatespec(templater.unquotestring(tmpl), None) |
|
627 | return templatespec(templater.unquotestring(tmpl), None) | |
628 | else: |
|
628 | else: | |
629 | style = util.expandpath(ui.config(b'ui', b'style')) |
|
629 | style = util.expandpath(ui.config(b'ui', b'style')) | |
630 |
|
630 | |||
631 | if not tmpl and style: |
|
631 | if not tmpl and style: | |
632 | mapfile = style |
|
632 | mapfile = style | |
633 | if not os.path.split(mapfile)[0]: |
|
633 | if not os.path.split(mapfile)[0]: | |
634 | mapname = templater.templatepath( |
|
634 | mapname = templater.templatepath( | |
635 | b'map-cmdline.' + mapfile |
|
635 | b'map-cmdline.' + mapfile | |
636 | ) or templater.templatepath(mapfile) |
|
636 | ) or templater.templatepath(mapfile) | |
637 | if mapname: |
|
637 | if mapname: | |
638 | mapfile = mapname |
|
638 | mapfile = mapname | |
639 | return templatespec(None, mapfile) |
|
639 | return templatespec(None, mapfile) | |
640 |
|
640 | |||
641 | return formatter.lookuptemplate(ui, b'changeset', tmpl) |
|
641 | return formatter.lookuptemplate(ui, b'changeset', tmpl) | |
642 |
|
642 | |||
643 |
|
643 | |||
644 | def maketemplater(ui, repo, tmpl, buffered=False): |
|
644 | def maketemplater(ui, repo, tmpl, buffered=False): | |
645 | """Create a changesettemplater from a literal template 'tmpl' |
|
645 | """Create a changesettemplater from a literal template 'tmpl' | |
646 | byte-string.""" |
|
646 | byte-string.""" | |
647 | spec = templatespec(tmpl, None) |
|
647 | spec = templatespec(tmpl, None) | |
648 | return changesettemplater(ui, repo, spec, buffered=buffered) |
|
648 | return changesettemplater(ui, repo, spec, buffered=buffered) | |
649 |
|
649 | |||
650 |
|
650 | |||
651 | def changesetdisplayer(ui, repo, opts, differ=None, buffered=False): |
|
651 | def changesetdisplayer(ui, repo, opts, differ=None, buffered=False): | |
652 | """show one changeset using template or regular display. |
|
652 | """show one changeset using template or regular display. | |
653 |
|
653 | |||
654 | Display format will be the first non-empty hit of: |
|
654 | Display format will be the first non-empty hit of: | |
655 | 1. option 'template' |
|
655 | 1. option 'template' | |
656 | 2. option 'style' |
|
656 | 2. option 'style' | |
657 | 3. [ui] setting 'logtemplate' |
|
657 | 3. [ui] setting 'logtemplate' | |
658 | 4. [ui] setting 'style' |
|
658 | 4. [ui] setting 'style' | |
659 | If all of these values are either the unset or the empty string, |
|
659 | If all of these values are either the unset or the empty string, | |
660 | regular display via changesetprinter() is done. |
|
660 | regular display via changesetprinter() is done. | |
661 | """ |
|
661 | """ | |
662 | postargs = (differ, opts, buffered) |
|
662 | postargs = (differ, opts, buffered) | |
663 | spec = _lookuptemplate(ui, opts.get(b'template'), opts.get(b'style')) |
|
663 | spec = _lookuptemplate(ui, opts.get(b'template'), opts.get(b'style')) | |
664 |
|
664 | |||
665 | # machine-readable formats have slightly different keyword set than |
|
665 | # machine-readable formats have slightly different keyword set than | |
666 | # plain templates, which are handled by changesetformatter. |
|
666 | # plain templates, which are handled by changesetformatter. | |
667 | # note that {b'pickle', b'debug'} can also be added to the list if needed. |
|
667 | # note that {b'pickle', b'debug'} can also be added to the list if needed. | |
668 | if spec.ref in {b'cbor', b'json'}: |
|
668 | if spec.ref in {b'cbor', b'json'}: | |
669 | fm = ui.formatter(b'log', opts) |
|
669 | fm = ui.formatter(b'log', opts) | |
670 | return changesetformatter(ui, repo, fm, *postargs) |
|
670 | return changesetformatter(ui, repo, fm, *postargs) | |
671 |
|
671 | |||
672 | if not spec.ref and not spec.tmpl and not spec.mapfile: |
|
672 | if not spec.ref and not spec.tmpl and not spec.mapfile: | |
673 | return changesetprinter(ui, repo, *postargs) |
|
673 | return changesetprinter(ui, repo, *postargs) | |
674 |
|
674 | |||
675 | return changesettemplater(ui, repo, spec, *postargs) |
|
675 | return changesettemplater(ui, repo, spec, *postargs) | |
676 |
|
676 | |||
677 |
|
677 | |||
678 | def _makematcher(repo, revs, pats, opts): |
|
678 | def _makematcher(repo, revs, pats, opts): | |
679 | """Build matcher and expanded patterns from log options |
|
679 | """Build matcher and expanded patterns from log options | |
680 |
|
680 | |||
681 | If --follow, revs are the revisions to follow from. |
|
681 | If --follow, revs are the revisions to follow from. | |
682 |
|
682 | |||
683 | Returns (match, pats, slowpath) where |
|
683 | Returns (match, pats, slowpath) where | |
684 | - match: a matcher built from the given pats and -I/-X opts |
|
684 | - match: a matcher built from the given pats and -I/-X opts | |
685 | - pats: patterns used (globs are expanded on Windows) |
|
685 | - pats: patterns used (globs are expanded on Windows) | |
686 | - slowpath: True if patterns aren't as simple as scanning filelogs |
|
686 | - slowpath: True if patterns aren't as simple as scanning filelogs | |
687 | """ |
|
687 | """ | |
688 | # pats/include/exclude are passed to match.match() directly in |
|
688 | # pats/include/exclude are passed to match.match() directly in | |
689 | # _matchfiles() revset but walkchangerevs() builds its matcher with |
|
689 | # _matchfiles() revset but walkchangerevs() builds its matcher with | |
690 | # scmutil.match(). The difference is input pats are globbed on |
|
690 | # scmutil.match(). The difference is input pats are globbed on | |
691 | # platforms without shell expansion (windows). |
|
691 | # platforms without shell expansion (windows). | |
692 | wctx = repo[None] |
|
692 | wctx = repo[None] | |
693 | match, pats = scmutil.matchandpats(wctx, pats, opts) |
|
693 | match, pats = scmutil.matchandpats(wctx, pats, opts) | |
694 | slowpath = match.anypats() or (not match.always() and opts.get(b'removed')) |
|
694 | slowpath = match.anypats() or (not match.always() and opts.get(b'removed')) | |
695 | if not slowpath: |
|
695 | if not slowpath: | |
696 | follow = opts.get(b'follow') or opts.get(b'follow_first') |
|
696 | follow = opts.get(b'follow') or opts.get(b'follow_first') | |
697 | startctxs = [] |
|
697 | startctxs = [] | |
698 | if follow and opts.get(b'rev'): |
|
698 | if follow and opts.get(b'rev'): | |
699 | startctxs = [repo[r] for r in revs] |
|
699 | startctxs = [repo[r] for r in revs] | |
700 | for f in match.files(): |
|
700 | for f in match.files(): | |
701 | if follow and startctxs: |
|
701 | if follow and startctxs: | |
702 | # No idea if the path was a directory at that revision, so |
|
702 | # No idea if the path was a directory at that revision, so | |
703 | # take the slow path. |
|
703 | # take the slow path. | |
704 | if any(f not in c for c in startctxs): |
|
704 | if any(f not in c for c in startctxs): | |
705 | slowpath = True |
|
705 | slowpath = True | |
706 | continue |
|
706 | continue | |
707 | elif follow and f not in wctx: |
|
707 | elif follow and f not in wctx: | |
708 | # If the file exists, it may be a directory, so let it |
|
708 | # If the file exists, it may be a directory, so let it | |
709 | # take the slow path. |
|
709 | # take the slow path. | |
710 | if os.path.exists(repo.wjoin(f)): |
|
710 | if os.path.exists(repo.wjoin(f)): | |
711 | slowpath = True |
|
711 | slowpath = True | |
712 | continue |
|
712 | continue | |
713 | else: |
|
713 | else: | |
714 | raise error.Abort( |
|
714 | raise error.Abort( | |
715 | _( |
|
715 | _( | |
716 | b'cannot follow file not in parent ' |
|
716 | b'cannot follow file not in parent ' | |
717 | b'revision: "%s"' |
|
717 | b'revision: "%s"' | |
718 | ) |
|
718 | ) | |
719 | % f |
|
719 | % f | |
720 | ) |
|
720 | ) | |
721 | filelog = repo.file(f) |
|
721 | filelog = repo.file(f) | |
722 | if not filelog: |
|
722 | if not filelog: | |
723 | # A zero count may be a directory or deleted file, so |
|
723 | # A zero count may be a directory or deleted file, so | |
724 | # try to find matching entries on the slow path. |
|
724 | # try to find matching entries on the slow path. | |
725 | if follow: |
|
725 | if follow: | |
726 | raise error.Abort( |
|
726 | raise error.Abort( | |
727 | _(b'cannot follow nonexistent file: "%s"') % f |
|
727 | _(b'cannot follow nonexistent file: "%s"') % f | |
728 | ) |
|
728 | ) | |
729 | slowpath = True |
|
729 | slowpath = True | |
730 |
|
730 | |||
731 | # We decided to fall back to the slowpath because at least one |
|
731 | # We decided to fall back to the slowpath because at least one | |
732 | # of the paths was not a file. Check to see if at least one of them |
|
732 | # of the paths was not a file. Check to see if at least one of them | |
733 | # existed in history - in that case, we'll continue down the |
|
733 | # existed in history - in that case, we'll continue down the | |
734 | # slowpath; otherwise, we can turn off the slowpath |
|
734 | # slowpath; otherwise, we can turn off the slowpath | |
735 | if slowpath: |
|
735 | if slowpath: | |
736 | for path in match.files(): |
|
736 | for path in match.files(): | |
737 | if path == b'.' or path in repo.store: |
|
737 | if path == b'.' or path in repo.store: | |
738 | break |
|
738 | break | |
739 | else: |
|
739 | else: | |
740 | slowpath = False |
|
740 | slowpath = False | |
741 |
|
741 | |||
742 | return match, pats, slowpath |
|
742 | return match, pats, slowpath | |
743 |
|
743 | |||
744 |
|
744 | |||
745 | def _fileancestors(repo, revs, match, followfirst): |
|
745 | def _fileancestors(repo, revs, match, followfirst): | |
746 | fctxs = [] |
|
746 | fctxs = [] | |
747 | for r in revs: |
|
747 | for r in revs: | |
748 | ctx = repo[r] |
|
748 | ctx = repo[r] | |
749 | fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match)) |
|
749 | fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match)) | |
750 |
|
750 | |||
751 | # When displaying a revision with --patch --follow FILE, we have |
|
751 | # When displaying a revision with --patch --follow FILE, we have | |
752 | # to know which file of the revision must be diffed. With |
|
752 | # to know which file of the revision must be diffed. With | |
753 | # --follow, we want the names of the ancestors of FILE in the |
|
753 | # --follow, we want the names of the ancestors of FILE in the | |
754 | # revision, stored in "fcache". "fcache" is populated as a side effect |
|
754 | # revision, stored in "fcache". "fcache" is populated as a side effect | |
755 | # of the graph traversal. |
|
755 | # of the graph traversal. | |
756 | fcache = {} |
|
756 | fcache = {} | |
757 |
|
757 | |||
758 | def filematcher(ctx): |
|
758 | def filematcher(ctx): | |
759 | return scmutil.matchfiles(repo, fcache.get(ctx.rev(), [])) |
|
759 | return scmutil.matchfiles(repo, fcache.get(ctx.rev(), [])) | |
760 |
|
760 | |||
761 | def revgen(): |
|
761 | def revgen(): | |
762 | for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst): |
|
762 | for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst): | |
763 | fcache[rev] = [c.path() for c in cs] |
|
763 | fcache[rev] = [c.path() for c in cs] | |
764 | yield rev |
|
764 | yield rev | |
765 |
|
765 | |||
766 | return smartset.generatorset(revgen(), iterasc=False), filematcher |
|
766 | return smartset.generatorset(revgen(), iterasc=False), filematcher | |
767 |
|
767 | |||
768 |
|
768 | |||
769 | def _makenofollowfilematcher(repo, pats, opts): |
|
769 | def _makenofollowfilematcher(repo, pats, opts): | |
770 | '''hook for extensions to override the filematcher for non-follow cases''' |
|
770 | '''hook for extensions to override the filematcher for non-follow cases''' | |
771 | return None |
|
771 | return None | |
772 |
|
772 | |||
773 |
|
773 | |||
774 | _opt2logrevset = { |
|
774 | _opt2logrevset = { | |
775 | b'no_merges': (b'not merge()', None), |
|
775 | b'no_merges': (b'not merge()', None), | |
776 | b'only_merges': (b'merge()', None), |
|
776 | b'only_merges': (b'merge()', None), | |
777 | b'_matchfiles': (None, b'_matchfiles(%ps)'), |
|
777 | b'_matchfiles': (None, b'_matchfiles(%ps)'), | |
778 | b'date': (b'date(%s)', None), |
|
778 | b'date': (b'date(%s)', None), | |
779 | b'branch': (b'branch(%s)', b'%lr'), |
|
779 | b'branch': (b'branch(%s)', b'%lr'), | |
780 | b'_patslog': (b'filelog(%s)', b'%lr'), |
|
780 | b'_patslog': (b'filelog(%s)', b'%lr'), | |
781 | b'keyword': (b'keyword(%s)', b'%lr'), |
|
781 | b'keyword': (b'keyword(%s)', b'%lr'), | |
782 | b'prune': (b'ancestors(%s)', b'not %lr'), |
|
782 | b'prune': (b'ancestors(%s)', b'not %lr'), | |
783 | b'user': (b'user(%s)', b'%lr'), |
|
783 | b'user': (b'user(%s)', b'%lr'), | |
784 | } |
|
784 | } | |
785 |
|
785 | |||
786 |
|
786 | |||
787 | def _makerevset(repo, match, pats, slowpath, opts): |
|
787 | def _makerevset(repo, match, pats, slowpath, opts): | |
788 | """Return a revset string built from log options and file patterns""" |
|
788 | """Return a revset string built from log options and file patterns""" | |
789 | opts = dict(opts) |
|
789 | opts = dict(opts) | |
790 | # follow or not follow? |
|
790 | # follow or not follow? | |
791 | follow = opts.get(b'follow') or opts.get(b'follow_first') |
|
791 | follow = opts.get(b'follow') or opts.get(b'follow_first') | |
792 |
|
792 | |||
793 | # branch and only_branch are really aliases and must be handled at |
|
793 | # branch and only_branch are really aliases and must be handled at | |
794 | # the same time |
|
794 | # the same time | |
795 | opts[b'branch'] = opts.get(b'branch', []) + opts.get(b'only_branch', []) |
|
795 | opts[b'branch'] = opts.get(b'branch', []) + opts.get(b'only_branch', []) | |
796 | opts[b'branch'] = [repo.lookupbranch(b) for b in opts[b'branch']] |
|
796 | opts[b'branch'] = [repo.lookupbranch(b) for b in opts[b'branch']] | |
797 |
|
797 | |||
798 | if slowpath: |
|
798 | if slowpath: | |
799 | # See walkchangerevs() slow path. |
|
799 | # See walkchangerevs() slow path. | |
800 | # |
|
800 | # | |
801 | # pats/include/exclude cannot be represented as separate |
|
801 | # pats/include/exclude cannot be represented as separate | |
802 | # revset expressions as their filtering logic applies at file |
|
802 | # revset expressions as their filtering logic applies at file | |
803 | # level. For instance "-I a -X b" matches a revision touching |
|
803 | # level. For instance "-I a -X b" matches a revision touching | |
804 | # "a" and "b" while "file(a) and not file(b)" does |
|
804 | # "a" and "b" while "file(a) and not file(b)" does | |
805 | # not. Besides, filesets are evaluated against the working |
|
805 | # not. Besides, filesets are evaluated against the working | |
806 | # directory. |
|
806 | # directory. | |
807 | matchargs = [b'r:', b'd:relpath'] |
|
807 | matchargs = [b'r:', b'd:relpath'] | |
808 | for p in pats: |
|
808 | for p in pats: | |
809 | matchargs.append(b'p:' + p) |
|
809 | matchargs.append(b'p:' + p) | |
810 | for p in opts.get(b'include', []): |
|
810 | for p in opts.get(b'include', []): | |
811 | matchargs.append(b'i:' + p) |
|
811 | matchargs.append(b'i:' + p) | |
812 | for p in opts.get(b'exclude', []): |
|
812 | for p in opts.get(b'exclude', []): | |
813 | matchargs.append(b'x:' + p) |
|
813 | matchargs.append(b'x:' + p) | |
814 | opts[b'_matchfiles'] = matchargs |
|
814 | opts[b'_matchfiles'] = matchargs | |
815 | elif not follow: |
|
815 | elif not follow: | |
816 | opts[b'_patslog'] = list(pats) |
|
816 | opts[b'_patslog'] = list(pats) | |
817 |
|
817 | |||
818 | expr = [] |
|
818 | expr = [] | |
819 | for op, val in sorted(pycompat.iteritems(opts)): |
|
819 | for op, val in sorted(pycompat.iteritems(opts)): | |
820 | if not val: |
|
820 | if not val: | |
821 | continue |
|
821 | continue | |
822 | if op not in _opt2logrevset: |
|
822 | if op not in _opt2logrevset: | |
823 | continue |
|
823 | continue | |
824 | revop, listop = _opt2logrevset[op] |
|
824 | revop, listop = _opt2logrevset[op] | |
825 | if revop and b'%' not in revop: |
|
825 | if revop and b'%' not in revop: | |
826 | expr.append(revop) |
|
826 | expr.append(revop) | |
827 | elif not listop: |
|
827 | elif not listop: | |
828 | expr.append(revsetlang.formatspec(revop, val)) |
|
828 | expr.append(revsetlang.formatspec(revop, val)) | |
829 | else: |
|
829 | else: | |
830 | if revop: |
|
830 | if revop: | |
831 | val = [revsetlang.formatspec(revop, v) for v in val] |
|
831 | val = [revsetlang.formatspec(revop, v) for v in val] | |
832 | expr.append(revsetlang.formatspec(listop, val)) |
|
832 | expr.append(revsetlang.formatspec(listop, val)) | |
833 |
|
833 | |||
834 | if expr: |
|
834 | if expr: | |
835 | expr = b'(' + b' and '.join(expr) + b')' |
|
835 | expr = b'(' + b' and '.join(expr) + b')' | |
836 | else: |
|
836 | else: | |
837 | expr = None |
|
837 | expr = None | |
838 | return expr |
|
838 | return expr | |
839 |
|
839 | |||
840 |
|
840 | |||
841 | def _initialrevs(repo, opts): |
|
841 | def _initialrevs(repo, opts): | |
842 | """Return the initial set of revisions to be filtered or followed""" |
|
842 | """Return the initial set of revisions to be filtered or followed""" | |
843 | follow = opts.get(b'follow') or opts.get(b'follow_first') |
|
843 | follow = opts.get(b'follow') or opts.get(b'follow_first') | |
844 | if opts.get(b'rev'): |
|
844 | if opts.get(b'rev'): | |
845 | revs = scmutil.revrange(repo, opts[b'rev']) |
|
845 | revs = scmutil.revrange(repo, opts[b'rev']) | |
846 | elif follow and repo.dirstate.p1() == nullid: |
|
846 | elif follow and repo.dirstate.p1() == nullid: | |
847 | revs = smartset.baseset() |
|
847 | revs = smartset.baseset() | |
848 | elif follow: |
|
848 | elif follow: | |
849 | revs = repo.revs(b'.') |
|
849 | revs = repo.revs(b'.') | |
850 | else: |
|
850 | else: | |
851 | revs = smartset.spanset(repo) |
|
851 | revs = smartset.spanset(repo) | |
852 | revs.reverse() |
|
852 | revs.reverse() | |
853 | return revs |
|
853 | return revs | |
854 |
|
854 | |||
855 |
|
855 | |||
856 | def getrevs(repo, pats, opts): |
|
856 | def getrevs(repo, pats, opts): | |
857 | # type: (Any, Any, Any) -> Tuple[smartset.abstractsmartset, Optional[changesetdiffer]] |
|
857 | # type: (Any, Any, Any) -> Tuple[smartset.abstractsmartset, Optional[changesetdiffer]] | |
858 | """Return (revs, differ) where revs is a smartset |
|
858 | """Return (revs, differ) where revs is a smartset | |
859 |
|
859 | |||
860 | differ is a changesetdiffer with pre-configured file matcher. |
|
860 | differ is a changesetdiffer with pre-configured file matcher. | |
861 | """ |
|
861 | """ | |
862 | follow = opts.get(b'follow') or opts.get(b'follow_first') |
|
862 | follow = opts.get(b'follow') or opts.get(b'follow_first') | |
863 | followfirst = opts.get(b'follow_first') |
|
863 | followfirst = opts.get(b'follow_first') | |
864 | limit = getlimit(opts) |
|
864 | limit = getlimit(opts) | |
865 | revs = _initialrevs(repo, opts) |
|
865 | revs = _initialrevs(repo, opts) | |
866 | if not revs: |
|
866 | if not revs: | |
867 | return smartset.baseset(), None |
|
867 | return smartset.baseset(), None | |
868 | match, pats, slowpath = _makematcher(repo, revs, pats, opts) |
|
868 | match, pats, slowpath = _makematcher(repo, revs, pats, opts) | |
869 | filematcher = None |
|
869 | filematcher = None | |
870 | if follow: |
|
870 | if follow: | |
871 | if slowpath or match.always(): |
|
871 | if slowpath or match.always(): | |
872 | revs = dagop.revancestors(repo, revs, followfirst=followfirst) |
|
872 | revs = dagop.revancestors(repo, revs, followfirst=followfirst) | |
873 | else: |
|
873 | else: | |
874 | revs, filematcher = _fileancestors(repo, revs, match, followfirst) |
|
874 | revs, filematcher = _fileancestors(repo, revs, match, followfirst) | |
875 | revs.reverse() |
|
875 | revs.reverse() | |
876 | if filematcher is None: |
|
876 | if filematcher is None: | |
877 | filematcher = _makenofollowfilematcher(repo, pats, opts) |
|
877 | filematcher = _makenofollowfilematcher(repo, pats, opts) | |
878 | if filematcher is None: |
|
878 | if filematcher is None: | |
879 |
|
879 | |||
880 | def filematcher(ctx): |
|
880 | def filematcher(ctx): | |
881 | return match |
|
881 | return match | |
882 |
|
882 | |||
883 | expr = _makerevset(repo, match, pats, slowpath, opts) |
|
883 | expr = _makerevset(repo, match, pats, slowpath, opts) | |
884 | if opts.get(b'graph'): |
|
884 | if opts.get(b'graph'): | |
885 | # User-specified revs might be unsorted, but don't sort before |
|
885 | # User-specified revs might be unsorted, but don't sort before | |
886 | # _makerevset because it might depend on the order of revs |
|
886 | # _makerevset because it might depend on the order of revs | |
887 | if repo.ui.configbool(b'experimental', b'log.topo'): |
|
887 | if repo.ui.configbool(b'experimental', b'log.topo'): | |
888 | if not revs.istopo(): |
|
888 | if not revs.istopo(): | |
889 | revs = dagop.toposort(revs, repo.changelog.parentrevs) |
|
889 | revs = dagop.toposort(revs, repo.changelog.parentrevs) | |
890 | # TODO: try to iterate the set lazily |
|
890 | # TODO: try to iterate the set lazily | |
891 | revs = revset.baseset(list(revs), istopo=True) |
|
891 | revs = revset.baseset(list(revs), istopo=True) | |
892 | elif not (revs.isdescending() or revs.istopo()): |
|
892 | elif not (revs.isdescending() or revs.istopo()): | |
893 | revs.sort(reverse=True) |
|
893 | revs.sort(reverse=True) | |
894 | if expr: |
|
894 | if expr: | |
895 | matcher = revset.match(None, expr) |
|
895 | matcher = revset.match(None, expr) | |
896 | revs = matcher(repo, revs) |
|
896 | revs = matcher(repo, revs) | |
897 | if limit is not None: |
|
897 | if limit is not None: | |
898 | revs = revs.slice(0, limit) |
|
898 | revs = revs.slice(0, limit) | |
899 |
|
899 | |||
900 | differ = changesetdiffer() |
|
900 | differ = changesetdiffer() | |
901 | differ._makefilematcher = filematcher |
|
901 | differ._makefilematcher = filematcher | |
902 | return revs, differ |
|
902 | return revs, differ | |
903 |
|
903 | |||
904 |
|
904 | |||
905 | def _parselinerangeopt(repo, opts): |
|
905 | def _parselinerangeopt(repo, opts): | |
906 | """Parse --line-range log option and return a list of tuples (filename, |
|
906 | """Parse --line-range log option and return a list of tuples (filename, | |
907 | (fromline, toline)). |
|
907 | (fromline, toline)). | |
908 | """ |
|
908 | """ | |
909 | linerangebyfname = [] |
|
909 | linerangebyfname = [] | |
910 | for pat in opts.get(b'line_range', []): |
|
910 | for pat in opts.get(b'line_range', []): | |
911 | try: |
|
911 | try: | |
912 | pat, linerange = pat.rsplit(b',', 1) |
|
912 | pat, linerange = pat.rsplit(b',', 1) | |
913 | except ValueError: |
|
913 | except ValueError: | |
914 | raise error.Abort(_(b'malformatted line-range pattern %s') % pat) |
|
914 | raise error.Abort(_(b'malformatted line-range pattern %s') % pat) | |
915 | try: |
|
915 | try: | |
916 | fromline, toline = map(int, linerange.split(b':')) |
|
916 | fromline, toline = map(int, linerange.split(b':')) | |
917 | except ValueError: |
|
917 | except ValueError: | |
918 | raise error.Abort(_(b"invalid line range for %s") % pat) |
|
918 | raise error.Abort(_(b"invalid line range for %s") % pat) | |
919 | msg = _(b"line range pattern '%s' must match exactly one file") % pat |
|
919 | msg = _(b"line range pattern '%s' must match exactly one file") % pat | |
920 | fname = scmutil.parsefollowlinespattern(repo, None, pat, msg) |
|
920 | fname = scmutil.parsefollowlinespattern(repo, None, pat, msg) | |
921 | linerangebyfname.append( |
|
921 | linerangebyfname.append( | |
922 | (fname, util.processlinerange(fromline, toline)) |
|
922 | (fname, util.processlinerange(fromline, toline)) | |
923 | ) |
|
923 | ) | |
924 | return linerangebyfname |
|
924 | return linerangebyfname | |
925 |
|
925 | |||
926 |
|
926 | |||
927 | def getlinerangerevs(repo, userrevs, opts): |
|
927 | def getlinerangerevs(repo, userrevs, opts): | |
928 | """Return (revs, differ). |
|
928 | """Return (revs, differ). | |
929 |
|
929 | |||
930 | "revs" are revisions obtained by processing "line-range" log options and |
|
930 | "revs" are revisions obtained by processing "line-range" log options and | |
931 | walking block ancestors of each specified file/line-range. |
|
931 | walking block ancestors of each specified file/line-range. | |
932 |
|
932 | |||
933 | "differ" is a changesetdiffer with pre-configured file matcher and hunks |
|
933 | "differ" is a changesetdiffer with pre-configured file matcher and hunks | |
934 | filter. |
|
934 | filter. | |
935 | """ |
|
935 | """ | |
936 | wctx = repo[None] |
|
936 | wctx = repo[None] | |
937 |
|
937 | |||
938 | # Two-levels map of "rev -> file ctx -> [line range]". |
|
938 | # Two-levels map of "rev -> file ctx -> [line range]". | |
939 | linerangesbyrev = {} |
|
939 | linerangesbyrev = {} | |
940 | for fname, (fromline, toline) in _parselinerangeopt(repo, opts): |
|
940 | for fname, (fromline, toline) in _parselinerangeopt(repo, opts): | |
941 | if fname not in wctx: |
|
941 | if fname not in wctx: | |
942 | raise error.Abort( |
|
942 | raise error.Abort( | |
943 | _(b'cannot follow file not in parent revision: "%s"') % fname |
|
943 | _(b'cannot follow file not in parent revision: "%s"') % fname | |
944 | ) |
|
944 | ) | |
945 | fctx = wctx.filectx(fname) |
|
945 | fctx = wctx.filectx(fname) | |
946 | for fctx, linerange in dagop.blockancestors(fctx, fromline, toline): |
|
946 | for fctx, linerange in dagop.blockancestors(fctx, fromline, toline): | |
947 | rev = fctx.introrev() |
|
947 | rev = fctx.introrev() | |
948 | if rev not in userrevs: |
|
948 | if rev not in userrevs: | |
949 | continue |
|
949 | continue | |
950 | linerangesbyrev.setdefault(rev, {}).setdefault( |
|
950 | linerangesbyrev.setdefault(rev, {}).setdefault( | |
951 | fctx.path(), [] |
|
951 | fctx.path(), [] | |
952 | ).append(linerange) |
|
952 | ).append(linerange) | |
953 |
|
953 | |||
954 | def nofilterhunksfn(fctx, hunks): |
|
954 | def nofilterhunksfn(fctx, hunks): | |
955 | return hunks |
|
955 | return hunks | |
956 |
|
956 | |||
957 | def hunksfilter(ctx): |
|
957 | def hunksfilter(ctx): | |
958 | fctxlineranges = linerangesbyrev.get(ctx.rev()) |
|
958 | fctxlineranges = linerangesbyrev.get(ctx.rev()) | |
959 | if fctxlineranges is None: |
|
959 | if fctxlineranges is None: | |
960 | return nofilterhunksfn |
|
960 | return nofilterhunksfn | |
961 |
|
961 | |||
962 | def filterfn(fctx, hunks): |
|
962 | def filterfn(fctx, hunks): | |
963 | lineranges = fctxlineranges.get(fctx.path()) |
|
963 | lineranges = fctxlineranges.get(fctx.path()) | |
964 | if lineranges is not None: |
|
964 | if lineranges is not None: | |
965 | for hr, lines in hunks: |
|
965 | for hr, lines in hunks: | |
966 | if hr is None: # binary |
|
966 | if hr is None: # binary | |
967 | yield hr, lines |
|
967 | yield hr, lines | |
968 | continue |
|
968 | continue | |
969 | if any(mdiff.hunkinrange(hr[2:], lr) for lr in lineranges): |
|
969 | if any(mdiff.hunkinrange(hr[2:], lr) for lr in lineranges): | |
970 | yield hr, lines |
|
970 | yield hr, lines | |
971 | else: |
|
971 | else: | |
972 | for hunk in hunks: |
|
972 | for hunk in hunks: | |
973 | yield hunk |
|
973 | yield hunk | |
974 |
|
974 | |||
975 | return filterfn |
|
975 | return filterfn | |
976 |
|
976 | |||
977 | def filematcher(ctx): |
|
977 | def filematcher(ctx): | |
978 | files = list(linerangesbyrev.get(ctx.rev(), [])) |
|
978 | files = list(linerangesbyrev.get(ctx.rev(), [])) | |
979 | return scmutil.matchfiles(repo, files) |
|
979 | return scmutil.matchfiles(repo, files) | |
980 |
|
980 | |||
981 | revs = sorted(linerangesbyrev, reverse=True) |
|
981 | revs = sorted(linerangesbyrev, reverse=True) | |
982 |
|
982 | |||
983 | differ = changesetdiffer() |
|
983 | differ = changesetdiffer() | |
984 | differ._makefilematcher = filematcher |
|
984 | differ._makefilematcher = filematcher | |
985 | differ._makehunksfilter = hunksfilter |
|
985 | differ._makehunksfilter = hunksfilter | |
986 | return smartset.baseset(revs), differ |
|
986 | return smartset.baseset(revs), differ | |
987 |
|
987 | |||
988 |
|
988 | |||
989 | def _graphnodeformatter(ui, displayer): |
|
989 | def _graphnodeformatter(ui, displayer): | |
990 | spec = ui.config(b'ui', b'graphnodetemplate') |
|
990 | spec = ui.config(b'ui', b'graphnodetemplate') | |
991 | if not spec: |
|
991 | if not spec: | |
992 | return templatekw.getgraphnode # fast path for "{graphnode}" |
|
992 | return templatekw.getgraphnode # fast path for "{graphnode}" | |
993 |
|
993 | |||
994 | spec = templater.unquotestring(spec) |
|
994 | spec = templater.unquotestring(spec) | |
995 | if isinstance(displayer, changesettemplater): |
|
995 | if isinstance(displayer, changesettemplater): | |
996 | # reuse cache of slow templates |
|
996 | # reuse cache of slow templates | |
997 | tres = displayer._tresources |
|
997 | tres = displayer._tresources | |
998 | else: |
|
998 | else: | |
999 | tres = formatter.templateresources(ui) |
|
999 | tres = formatter.templateresources(ui) | |
1000 | templ = formatter.maketemplater( |
|
1000 | templ = formatter.maketemplater( | |
1001 | ui, spec, defaults=templatekw.keywords, resources=tres |
|
1001 | ui, spec, defaults=templatekw.keywords, resources=tres | |
1002 | ) |
|
1002 | ) | |
1003 |
|
1003 | |||
1004 | def formatnode(repo, ctx): |
|
1004 | def formatnode(repo, ctx): | |
1005 | props = {b'ctx': ctx, b'repo': repo} |
|
1005 | props = {b'ctx': ctx, b'repo': repo} | |
1006 | return templ.renderdefault(props) |
|
1006 | return templ.renderdefault(props) | |
1007 |
|
1007 | |||
1008 | return formatnode |
|
1008 | return formatnode | |
1009 |
|
1009 | |||
1010 |
|
1010 | |||
1011 | def displaygraph(ui, repo, dag, displayer, edgefn, getcopies=None, props=None): |
|
1011 | def displaygraph(ui, repo, dag, displayer, edgefn, getcopies=None, props=None): | |
1012 | props = props or {} |
|
1012 | props = props or {} | |
1013 | formatnode = _graphnodeformatter(ui, displayer) |
|
1013 | formatnode = _graphnodeformatter(ui, displayer) | |
1014 | state = graphmod.asciistate() |
|
1014 | state = graphmod.asciistate() | |
1015 |
styles = state |
|
1015 | styles = state.styles | |
1016 |
|
1016 | |||
1017 | # only set graph styling if HGPLAIN is not set. |
|
1017 | # only set graph styling if HGPLAIN is not set. | |
1018 | if ui.plain(b'graph'): |
|
1018 | if ui.plain(b'graph'): | |
1019 | # set all edge styles to |, the default pre-3.8 behaviour |
|
1019 | # set all edge styles to |, the default pre-3.8 behaviour | |
1020 | styles.update(dict.fromkeys(styles, b'|')) |
|
1020 | styles.update(dict.fromkeys(styles, b'|')) | |
1021 | else: |
|
1021 | else: | |
1022 | edgetypes = { |
|
1022 | edgetypes = { | |
1023 | b'parent': graphmod.PARENT, |
|
1023 | b'parent': graphmod.PARENT, | |
1024 | b'grandparent': graphmod.GRANDPARENT, |
|
1024 | b'grandparent': graphmod.GRANDPARENT, | |
1025 | b'missing': graphmod.MISSINGPARENT, |
|
1025 | b'missing': graphmod.MISSINGPARENT, | |
1026 | } |
|
1026 | } | |
1027 | for name, key in edgetypes.items(): |
|
1027 | for name, key in edgetypes.items(): | |
1028 | # experimental config: experimental.graphstyle.* |
|
1028 | # experimental config: experimental.graphstyle.* | |
1029 | styles[key] = ui.config( |
|
1029 | styles[key] = ui.config( | |
1030 | b'experimental', b'graphstyle.%s' % name, styles[key] |
|
1030 | b'experimental', b'graphstyle.%s' % name, styles[key] | |
1031 | ) |
|
1031 | ) | |
1032 | if not styles[key]: |
|
1032 | if not styles[key]: | |
1033 | styles[key] = None |
|
1033 | styles[key] = None | |
1034 |
|
1034 | |||
1035 | # experimental config: experimental.graphshorten |
|
1035 | # experimental config: experimental.graphshorten | |
1036 |
state |
|
1036 | state.graphshorten = ui.configbool(b'experimental', b'graphshorten') | |
1037 |
|
1037 | |||
1038 | for rev, type, ctx, parents in dag: |
|
1038 | for rev, type, ctx, parents in dag: | |
1039 | char = formatnode(repo, ctx) |
|
1039 | char = formatnode(repo, ctx) | |
1040 | copies = getcopies(ctx) if getcopies else None |
|
1040 | copies = getcopies(ctx) if getcopies else None | |
1041 | edges = edgefn(type, char, state, rev, parents) |
|
1041 | edges = edgefn(type, char, state, rev, parents) | |
1042 | firstedge = next(edges) |
|
1042 | firstedge = next(edges) | |
1043 | width = firstedge[2] |
|
1043 | width = firstedge[2] | |
1044 | displayer.show( |
|
1044 | displayer.show( | |
1045 | ctx, copies=copies, graphwidth=width, **pycompat.strkwargs(props) |
|
1045 | ctx, copies=copies, graphwidth=width, **pycompat.strkwargs(props) | |
1046 | ) |
|
1046 | ) | |
1047 | lines = displayer.hunk.pop(rev).split(b'\n') |
|
1047 | lines = displayer.hunk.pop(rev).split(b'\n') | |
1048 | if not lines[-1]: |
|
1048 | if not lines[-1]: | |
1049 | del lines[-1] |
|
1049 | del lines[-1] | |
1050 | displayer.flush(ctx) |
|
1050 | displayer.flush(ctx) | |
1051 | for type, char, width, coldata in itertools.chain([firstedge], edges): |
|
1051 | for type, char, width, coldata in itertools.chain([firstedge], edges): | |
1052 | graphmod.ascii(ui, state, type, char, lines, coldata) |
|
1052 | graphmod.ascii(ui, state, type, char, lines, coldata) | |
1053 | lines = [] |
|
1053 | lines = [] | |
1054 | displayer.close() |
|
1054 | displayer.close() | |
1055 |
|
1055 | |||
1056 |
|
1056 | |||
1057 | def displaygraphrevs(ui, repo, revs, displayer, getrenamed): |
|
1057 | def displaygraphrevs(ui, repo, revs, displayer, getrenamed): | |
1058 | revdag = graphmod.dagwalker(repo, revs) |
|
1058 | revdag = graphmod.dagwalker(repo, revs) | |
1059 | displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed) |
|
1059 | displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed) | |
1060 |
|
1060 | |||
1061 |
|
1061 | |||
1062 | def displayrevs(ui, repo, revs, displayer, getcopies): |
|
1062 | def displayrevs(ui, repo, revs, displayer, getcopies): | |
1063 | for rev in revs: |
|
1063 | for rev in revs: | |
1064 | ctx = repo[rev] |
|
1064 | ctx = repo[rev] | |
1065 | copies = getcopies(ctx) if getcopies else None |
|
1065 | copies = getcopies(ctx) if getcopies else None | |
1066 | displayer.show(ctx, copies=copies) |
|
1066 | displayer.show(ctx, copies=copies) | |
1067 | displayer.flush(ctx) |
|
1067 | displayer.flush(ctx) | |
1068 | displayer.close() |
|
1068 | displayer.close() | |
1069 |
|
1069 | |||
1070 |
|
1070 | |||
1071 | def checkunsupportedgraphflags(pats, opts): |
|
1071 | def checkunsupportedgraphflags(pats, opts): | |
1072 | for op in [b"newest_first"]: |
|
1072 | for op in [b"newest_first"]: | |
1073 | if op in opts and opts[op]: |
|
1073 | if op in opts and opts[op]: | |
1074 | raise error.Abort( |
|
1074 | raise error.Abort( | |
1075 | _(b"-G/--graph option is incompatible with --%s") |
|
1075 | _(b"-G/--graph option is incompatible with --%s") | |
1076 | % op.replace(b"_", b"-") |
|
1076 | % op.replace(b"_", b"-") | |
1077 | ) |
|
1077 | ) | |
1078 |
|
1078 | |||
1079 |
|
1079 | |||
1080 | def graphrevs(repo, nodes, opts): |
|
1080 | def graphrevs(repo, nodes, opts): | |
1081 | limit = getlimit(opts) |
|
1081 | limit = getlimit(opts) | |
1082 | nodes.reverse() |
|
1082 | nodes.reverse() | |
1083 | if limit is not None: |
|
1083 | if limit is not None: | |
1084 | nodes = nodes[:limit] |
|
1084 | nodes = nodes[:limit] | |
1085 | return graphmod.nodes(repo, nodes) |
|
1085 | return graphmod.nodes(repo, nodes) |
General Comments 0
You need to be logged in to leave comments.
Login now