##// END OF EJS Templates
graphlog: change state dict to attr struct...
Yuya Nishihara -
r44165:e006f09e default draft
parent child Browse files
Show More
@@ -1,516 +1,517
1 1 # Revision graph generator for Mercurial
2 2 #
3 3 # Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl>
4 4 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 """supports walking the history as DAGs suitable for graphical output
10 10
11 11 The most basic format we use is that of::
12 12
13 13 (id, type, data, [parentids])
14 14
15 15 The node and parent ids are arbitrary integers which identify a node in the
16 16 context of the graph returned. Type is a constant specifying the node type.
17 17 Data depends on type.
18 18 """
19 19
20 20 from __future__ import absolute_import
21 21
22 22 from .node import nullrev
23 from .thirdparty import attr
23 24 from . import (
24 25 dagop,
25 26 pycompat,
26 27 smartset,
27 28 util,
28 29 )
29 30
30 31 CHANGESET = b'C'
31 32 PARENT = b'P'
32 33 GRANDPARENT = b'G'
33 34 MISSINGPARENT = b'M'
34 35 # Style of line to draw. None signals a line that ends and is removed at this
35 36 # point. A number prefix means only the last N characters of the current block
36 37 # will use that style, the rest will use the PARENT style. Add a - sign
37 38 # (so making N negative) and all but the first N characters use that style.
38 39 EDGES = {PARENT: b'|', GRANDPARENT: b':', MISSINGPARENT: None}
39 40
40 41
41 42 def dagwalker(repo, revs):
42 43 """cset DAG generator yielding (id, CHANGESET, ctx, [parentinfo]) tuples
43 44
44 45 This generator function walks through revisions (which should be ordered
45 46 from bigger to lower). It returns a tuple for each node.
46 47
47 48 Each parentinfo entry is a tuple with (edgetype, parentid), where edgetype
48 49 is one of PARENT, GRANDPARENT or MISSINGPARENT. The node and parent ids
49 50 are arbitrary integers which identify a node in the context of the graph
50 51 returned.
51 52
52 53 """
53 54 gpcache = {}
54 55
55 56 for rev in revs:
56 57 ctx = repo[rev]
57 58 # partition into parents in the rev set and missing parents, then
58 59 # augment the lists with markers, to inform graph drawing code about
59 60 # what kind of edge to draw between nodes.
60 61 pset = set(p.rev() for p in ctx.parents() if p.rev() in revs)
61 62 mpars = [
62 63 p.rev()
63 64 for p in ctx.parents()
64 65 if p.rev() != nullrev and p.rev() not in pset
65 66 ]
66 67 parents = [(PARENT, p) for p in sorted(pset)]
67 68
68 69 for mpar in mpars:
69 70 gp = gpcache.get(mpar)
70 71 if gp is None:
71 72 # precompute slow query as we know reachableroots() goes
72 73 # through all revs (issue4782)
73 74 if not isinstance(revs, smartset.baseset):
74 75 revs = smartset.baseset(revs)
75 76 gp = gpcache[mpar] = sorted(
76 77 set(dagop.reachableroots(repo, revs, [mpar]))
77 78 )
78 79 if not gp:
79 80 parents.append((MISSINGPARENT, mpar))
80 81 pset.add(mpar)
81 82 else:
82 83 parents.extend((GRANDPARENT, g) for g in gp if g not in pset)
83 84 pset.update(gp)
84 85
85 86 yield (ctx.rev(), CHANGESET, ctx, parents)
86 87
87 88
88 89 def nodes(repo, nodes):
89 90 """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
90 91
91 92 This generator function walks the given nodes. It only returns parents
92 93 that are in nodes, too.
93 94 """
94 95 include = set(nodes)
95 96 for node in nodes:
96 97 ctx = repo[node]
97 98 parents = set(
98 99 (PARENT, p.rev()) for p in ctx.parents() if p.node() in include
99 100 )
100 101 yield (ctx.rev(), CHANGESET, ctx, sorted(parents))
101 102
102 103
103 104 def colored(dag, repo):
104 105 """annotates a DAG with colored edge information
105 106
106 107 For each DAG node this function emits tuples::
107 108
108 109 (id, type, data, (col, color), [(col, nextcol, color)])
109 110
110 111 with the following new elements:
111 112
112 113 - Tuple (col, color) with column and color index for the current node
113 114 - A list of tuples indicating the edges between the current node and its
114 115 parents.
115 116 """
116 117 seen = []
117 118 colors = {}
118 119 newcolor = 1
119 120 config = {}
120 121
121 122 for key, val in repo.ui.configitems(b'graph'):
122 123 if b'.' in key:
123 124 branch, setting = key.rsplit(b'.', 1)
124 125 # Validation
125 126 if setting == b"width" and val.isdigit():
126 127 config.setdefault(branch, {})[setting] = int(val)
127 128 elif setting == b"color" and val.isalnum():
128 129 config.setdefault(branch, {})[setting] = val
129 130
130 131 if config:
131 132 getconf = util.lrucachefunc(
132 133 lambda rev: config.get(repo[rev].branch(), {})
133 134 )
134 135 else:
135 136 getconf = lambda rev: {}
136 137
137 138 for (cur, type, data, parents) in dag:
138 139
139 140 # Compute seen and next
140 141 if cur not in seen:
141 142 seen.append(cur) # new head
142 143 colors[cur] = newcolor
143 144 newcolor += 1
144 145
145 146 col = seen.index(cur)
146 147 color = colors.pop(cur)
147 148 next = seen[:]
148 149
149 150 # Add parents to next
150 151 addparents = [p for pt, p in parents if p not in next]
151 152 next[col : col + 1] = addparents
152 153
153 154 # Set colors for the parents
154 155 for i, p in enumerate(addparents):
155 156 if not i:
156 157 colors[p] = color
157 158 else:
158 159 colors[p] = newcolor
159 160 newcolor += 1
160 161
161 162 # Add edges to the graph
162 163 edges = []
163 164 for ecol, eid in enumerate(seen):
164 165 if eid in next:
165 166 bconf = getconf(eid)
166 167 edges.append(
167 168 (
168 169 ecol,
169 170 next.index(eid),
170 171 colors[eid],
171 172 bconf.get(b'width', -1),
172 173 bconf.get(b'color', b''),
173 174 )
174 175 )
175 176 elif eid == cur:
176 177 for ptype, p in parents:
177 178 bconf = getconf(p)
178 179 edges.append(
179 180 (
180 181 ecol,
181 182 next.index(p),
182 183 color,
183 184 bconf.get(b'width', -1),
184 185 bconf.get(b'color', b''),
185 186 )
186 187 )
187 188
188 189 # Yield and move on
189 190 yield (cur, type, data, (col, color), edges)
190 191 seen = next
191 192
192 193
193 194 def asciiedges(type, char, state, rev, parents):
194 195 """adds edge info to changelog DAG walk suitable for ascii()"""
195 seen = state[b'seen']
196 seen = state.seen
196 197 if rev not in seen:
197 198 seen.append(rev)
198 199 nodeidx = seen.index(rev)
199 200
200 201 knownparents = []
201 202 newparents = []
202 203 for ptype, parent in parents:
203 204 if parent == rev:
204 205 # self reference (should only be seen in null rev)
205 206 continue
206 207 if parent in seen:
207 208 knownparents.append(parent)
208 209 else:
209 210 newparents.append(parent)
210 state[b'edges'][parent] = state[b'styles'].get(ptype, b'|')
211 state.edges[parent] = state.styles.get(ptype, b'|')
211 212
212 213 ncols = len(seen)
213 214 width = 1 + ncols * 2
214 215 nextseen = seen[:]
215 216 nextseen[nodeidx : nodeidx + 1] = newparents
216 217 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
217 218
218 219 seen[:] = nextseen
219 220 while len(newparents) > 2:
220 221 # ascii() only knows how to add or remove a single column between two
221 222 # calls. Nodes with more than two parents break this constraint so we
222 223 # introduce intermediate expansion lines to grow the active node list
223 224 # slowly.
224 225 edges.append((nodeidx, nodeidx))
225 226 edges.append((nodeidx, nodeidx + 1))
226 227 nmorecols = 1
227 228 width += 2
228 229 yield (type, char, width, (nodeidx, edges, ncols, nmorecols))
229 230 char = b'\\'
230 231 nodeidx += 1
231 232 ncols += 1
232 233 edges = []
233 234 del newparents[0]
234 235
235 236 if len(newparents) > 0:
236 237 edges.append((nodeidx, nodeidx))
237 238 if len(newparents) > 1:
238 239 edges.append((nodeidx, nodeidx + 1))
239 240 nmorecols = len(nextseen) - ncols
240 241 if nmorecols > 0:
241 242 width += 2
242 243 # remove current node from edge characters, no longer needed
243 state[b'edges'].pop(rev, None)
244 state.edges.pop(rev, None)
244 245 yield (type, char, width, (nodeidx, edges, ncols, nmorecols))
245 246
246 247
247 248 def _fixlongrightedges(edges):
248 249 for (i, (start, end)) in enumerate(edges):
249 250 if end > start:
250 251 edges[i] = (start, end + 1)
251 252
252 253
253 254 def _getnodelineedgestail(echars, idx, pidx, ncols, coldiff, pdiff, fix_tail):
254 255 if fix_tail and coldiff == pdiff and coldiff != 0:
255 256 # Still going in the same non-vertical direction.
256 257 if coldiff == -1:
257 258 start = max(idx + 1, pidx)
258 259 tail = echars[idx * 2 : (start - 1) * 2]
259 260 tail.extend([b"/", b" "] * (ncols - start))
260 261 return tail
261 262 else:
262 263 return [b"\\", b" "] * (ncols - idx - 1)
263 264 else:
264 265 remainder = ncols - idx - 1
265 266 return echars[-(remainder * 2) :] if remainder > 0 else []
266 267
267 268
268 269 def _drawedges(echars, edges, nodeline, interline):
269 270 for (start, end) in edges:
270 271 if start == end + 1:
271 272 interline[2 * end + 1] = b"/"
272 273 elif start == end - 1:
273 274 interline[2 * start + 1] = b"\\"
274 275 elif start == end:
275 276 interline[2 * start] = echars[2 * start]
276 277 else:
277 278 if 2 * end >= len(nodeline):
278 279 continue
279 280 nodeline[2 * end] = b"+"
280 281 if start > end:
281 282 (start, end) = (end, start)
282 283 for i in range(2 * start + 1, 2 * end):
283 284 if nodeline[i] != b"+":
284 285 nodeline[i] = b"-"
285 286
286 287
287 288 def _getpaddingline(echars, idx, ncols, edges):
288 289 # all edges up to the current node
289 290 line = echars[: idx * 2]
290 291 # an edge for the current node, if there is one
291 292 if (idx, idx - 1) in edges or (idx, idx) in edges:
292 293 # (idx, idx - 1) (idx, idx)
293 294 # | | | | | | | |
294 295 # +---o | | o---+
295 296 # | | X | | X | |
296 297 # | |/ / | |/ /
297 298 # | | | | | |
298 299 line.extend(echars[idx * 2 : (idx + 1) * 2])
299 300 else:
300 301 line.extend([b' ', b' '])
301 302 # all edges to the right of the current node
302 303 remainder = ncols - idx - 1
303 304 if remainder > 0:
304 305 line.extend(echars[-(remainder * 2) :])
305 306 return line
306 307
307 308
308 309 def _drawendinglines(lines, extra, edgemap, seen, state):
309 310 """Draw ending lines for missing parent edges
310 311
311 312 None indicates an edge that ends at between this node and the next
312 313 Replace with a short line ending in ~ and add / lines to any edges to
313 314 the right.
314 315
315 316 """
316 317 if None not in edgemap.values():
317 318 return
318 319
319 320 # Check for more edges to the right of our ending edges.
320 321 # We need enough space to draw adjustment lines for these.
321 322 edgechars = extra[::2]
322 323 while edgechars and edgechars[-1] is None:
323 324 edgechars.pop()
324 325 shift_size = max((edgechars.count(None) * 2) - 1, 0)
325 minlines = 3 if not state[b'graphshorten'] else 2
326 minlines = 3 if not state.graphshorten else 2
326 327 while len(lines) < minlines + shift_size:
327 328 lines.append(extra[:])
328 329
329 330 if shift_size:
330 331 empties = []
331 332 toshift = []
332 333 first_empty = extra.index(None)
333 334 for i, c in enumerate(extra[first_empty::2], first_empty // 2):
334 335 if c is None:
335 336 empties.append(i * 2)
336 337 else:
337 338 toshift.append(i * 2)
338 339 targets = list(range(first_empty, first_empty + len(toshift) * 2, 2))
339 340 positions = toshift[:]
340 341 for line in lines[-shift_size:]:
341 342 line[first_empty:] = [b' '] * (len(line) - first_empty)
342 343 for i in range(len(positions)):
343 344 pos = positions[i] - 1
344 345 positions[i] = max(pos, targets[i])
345 346 line[pos] = b'/' if pos > targets[i] else extra[toshift[i]]
346 347
347 map = {1: b'|', 2: b'~'} if not state[b'graphshorten'] else {1: b'~'}
348 map = {1: b'|', 2: b'~'} if not state.graphshorten else {1: b'~'}
348 349 for i, line in enumerate(lines):
349 350 if None not in line:
350 351 continue
351 352 line[:] = [c or map.get(i, b' ') for c in line]
352 353
353 354 # remove edges that ended
354 355 remove = [p for p, c in edgemap.items() if c is None]
355 356 for parent in remove:
356 357 del edgemap[parent]
357 358 seen.remove(parent)
358 359
359 360
360 def asciistate():
361 """returns the initial value for the "state" argument to ascii()"""
362 return {
363 b'seen': [],
364 b'edges': {},
365 b'lastcoldiff': 0,
366 b'lastindex': 0,
367 b'styles': EDGES.copy(),
368 b'graphshorten': False,
369 }
361 @attr.s
362 class asciistate(object):
363 """State of ascii() graph rendering"""
364
365 seen = attr.ib(init=False, default=attr.Factory(list))
366 edges = attr.ib(init=False, default=attr.Factory(dict))
367 lastcoldiff = attr.ib(init=False, default=0)
368 lastindex = attr.ib(init=False, default=0)
369 styles = attr.ib(init=False, default=attr.Factory(EDGES.copy))
370 graphshorten = attr.ib(init=False, default=False)
370 371
371 372
372 373 def outputgraph(ui, graph):
373 374 """outputs an ASCII graph of a DAG
374 375
375 376 this is a helper function for 'ascii' below.
376 377
377 378 takes the following arguments:
378 379
379 380 - ui to write to
380 381 - graph data: list of { graph nodes/edges, text }
381 382
382 383 this function can be monkey-patched by extensions to alter graph display
383 384 without needing to mimic all of the edge-fixup logic in ascii()
384 385 """
385 386 for (ln, logstr) in graph:
386 387 ui.write((ln + logstr).rstrip() + b"\n")
387 388
388 389
389 390 def ascii(ui, state, type, char, text, coldata):
390 391 """prints an ASCII graph of the DAG
391 392
392 393 takes the following arguments (one call per node in the graph):
393 394
394 395 - ui to write to
395 396 - Somewhere to keep the needed state in (init to asciistate())
396 397 - Column of the current node in the set of ongoing edges.
397 398 - Type indicator of node data, usually 'C' for changesets.
398 399 - Payload: (char, lines):
399 400 - Character to use as node's symbol.
400 401 - List of lines to display as the node's text.
401 402 - Edges; a list of (col, next_col) indicating the edges between
402 403 the current node and its parents.
403 404 - Number of columns (ongoing edges) in the current revision.
404 405 - The difference between the number of columns (ongoing edges)
405 406 in the next revision and the number of columns (ongoing edges)
406 407 in the current revision. That is: -1 means one column removed;
407 408 0 means no columns added or removed; 1 means one column added.
408 409 """
409 410 idx, edges, ncols, coldiff = coldata
410 411 assert -2 < coldiff < 2
411 412
412 edgemap, seen = state[b'edges'], state[b'seen']
413 edgemap, seen = state.edges, state.seen
413 414 # Be tolerant of history issues; make sure we have at least ncols + coldiff
414 415 # elements to work with. See test-glog.t for broken history test cases.
415 416 echars = [c for p in seen for c in (edgemap.get(p, b'|'), b' ')]
416 417 echars.extend((b'|', b' ') * max(ncols + coldiff - len(seen), 0))
417 418
418 419 if coldiff == -1:
419 420 # Transform
420 421 #
421 422 # | | | | | |
422 423 # o | | into o---+
423 424 # |X / |/ /
424 425 # | | | |
425 426 _fixlongrightedges(edges)
426 427
427 428 # add_padding_line says whether to rewrite
428 429 #
429 430 # | | | | | | | |
430 431 # | o---+ into | o---+
431 432 # | / / | | | # <--- padding line
432 433 # o | | | / /
433 434 # o | |
434 435 add_padding_line = (
435 436 len(text) > 2 and coldiff == -1 and [x for (x, y) in edges if x + 1 < y]
436 437 )
437 438
438 439 # fix_nodeline_tail says whether to rewrite
439 440 #
440 441 # | | o | | | | o | |
441 442 # | | |/ / | | |/ /
442 443 # | o | | into | o / / # <--- fixed nodeline tail
443 444 # | |/ / | |/ /
444 445 # o | | o | |
445 446 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
446 447
447 448 # nodeline is the line containing the node character (typically o)
448 449 nodeline = echars[: idx * 2]
449 450 nodeline.extend([char, b" "])
450 451
451 452 nodeline.extend(
452 453 _getnodelineedgestail(
453 454 echars,
454 455 idx,
455 state[b'lastindex'],
456 state.lastindex,
456 457 ncols,
457 458 coldiff,
458 state[b'lastcoldiff'],
459 state.lastcoldiff,
459 460 fix_nodeline_tail,
460 461 )
461 462 )
462 463
463 464 # shift_interline is the line containing the non-vertical
464 465 # edges between this entry and the next
465 466 shift_interline = echars[: idx * 2]
466 467 for i in pycompat.xrange(2 + coldiff):
467 468 shift_interline.append(b' ')
468 469 count = ncols - idx - 1
469 470 if coldiff == -1:
470 471 for i in pycompat.xrange(count):
471 472 shift_interline.extend([b'/', b' '])
472 473 elif coldiff == 0:
473 474 shift_interline.extend(echars[(idx + 1) * 2 : ncols * 2])
474 475 else:
475 476 for i in pycompat.xrange(count):
476 477 shift_interline.extend([b'\\', b' '])
477 478
478 479 # draw edges from the current node to its parents
479 480 _drawedges(echars, edges, nodeline, shift_interline)
480 481
481 482 # lines is the list of all graph lines to print
482 483 lines = [nodeline]
483 484 if add_padding_line:
484 485 lines.append(_getpaddingline(echars, idx, ncols, edges))
485 486
486 487 # If 'graphshorten' config, only draw shift_interline
487 488 # when there is any non vertical flow in graph.
488 if state[b'graphshorten']:
489 if state.graphshorten:
489 490 if any(c in br'\/' for c in shift_interline if c):
490 491 lines.append(shift_interline)
491 492 # Else, no 'graphshorten' config so draw shift_interline.
492 493 else:
493 494 lines.append(shift_interline)
494 495
495 496 # make sure that there are as many graph lines as there are
496 497 # log strings
497 498 extra_interline = echars[: (ncols + coldiff) * 2]
498 499 if len(lines) < len(text):
499 500 while len(lines) < len(text):
500 501 lines.append(extra_interline[:])
501 502
502 503 _drawendinglines(lines, extra_interline, edgemap, seen, state)
503 504
504 505 while len(text) < len(lines):
505 506 text.append(b"")
506 507
507 508 # print lines
508 509 indentation_level = max(ncols, ncols + coldiff)
509 510 lines = [
510 511 b"%-*s " % (2 * indentation_level, b"".join(line)) for line in lines
511 512 ]
512 513 outputgraph(ui, zip(lines, text))
513 514
514 515 # ... and start over
515 state[b'lastcoldiff'] = coldiff
516 state[b'lastindex'] = idx
516 state.lastcoldiff = coldiff
517 state.lastindex = idx
@@ -1,1085 +1,1085
1 1 # logcmdutil.py - utility for log-like commands
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import itertools
11 11 import os
12 12 import posixpath
13 13
14 14 from .i18n import _
15 15 from .node import (
16 16 nullid,
17 17 wdirid,
18 18 wdirrev,
19 19 )
20 20
21 21 from . import (
22 22 dagop,
23 23 error,
24 24 formatter,
25 25 graphmod,
26 26 match as matchmod,
27 27 mdiff,
28 28 patch,
29 29 pathutil,
30 30 pycompat,
31 31 revset,
32 32 revsetlang,
33 33 scmutil,
34 34 smartset,
35 35 templatekw,
36 36 templater,
37 37 util,
38 38 )
39 39 from .utils import (
40 40 dateutil,
41 41 stringutil,
42 42 )
43 43
44 44
45 45 if pycompat.TYPE_CHECKING:
46 46 from typing import (
47 47 Any,
48 48 Optional,
49 49 Tuple,
50 50 )
51 51
52 52 for t in (Any, Optional, Tuple):
53 53 assert t
54 54
55 55
56 56 def getlimit(opts):
57 57 """get the log limit according to option -l/--limit"""
58 58 limit = opts.get(b'limit')
59 59 if limit:
60 60 try:
61 61 limit = int(limit)
62 62 except ValueError:
63 63 raise error.Abort(_(b'limit must be a positive integer'))
64 64 if limit <= 0:
65 65 raise error.Abort(_(b'limit must be positive'))
66 66 else:
67 67 limit = None
68 68 return limit
69 69
70 70
71 71 def diffordiffstat(
72 72 ui,
73 73 repo,
74 74 diffopts,
75 75 node1,
76 76 node2,
77 77 match,
78 78 changes=None,
79 79 stat=False,
80 80 fp=None,
81 81 graphwidth=0,
82 82 prefix=b'',
83 83 root=b'',
84 84 listsubrepos=False,
85 85 hunksfilterfn=None,
86 86 ):
87 87 '''show diff or diffstat.'''
88 88 ctx1 = repo[node1]
89 89 ctx2 = repo[node2]
90 90 if root:
91 91 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
92 92 else:
93 93 relroot = b''
94 94 copysourcematch = None
95 95
96 96 def compose(f, g):
97 97 return lambda x: f(g(x))
98 98
99 99 def pathfn(f):
100 100 return posixpath.join(prefix, f)
101 101
102 102 if relroot != b'':
103 103 # XXX relative roots currently don't work if the root is within a
104 104 # subrepo
105 105 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
106 106 uirelroot = uipathfn(pathfn(relroot))
107 107 relroot += b'/'
108 108 for matchroot in match.files():
109 109 if not matchroot.startswith(relroot):
110 110 ui.warn(
111 111 _(b'warning: %s not inside relative root %s\n')
112 112 % (uipathfn(pathfn(matchroot)), uirelroot)
113 113 )
114 114
115 115 relrootmatch = scmutil.match(ctx2, pats=[relroot], default=b'path')
116 116 match = matchmod.intersectmatchers(match, relrootmatch)
117 117 copysourcematch = relrootmatch
118 118
119 119 checkroot = repo.ui.configbool(
120 120 b'devel', b'all-warnings'
121 121 ) or repo.ui.configbool(b'devel', b'check-relroot')
122 122
123 123 def relrootpathfn(f):
124 124 if checkroot and not f.startswith(relroot):
125 125 raise AssertionError(
126 126 b"file %s doesn't start with relroot %s" % (f, relroot)
127 127 )
128 128 return f[len(relroot) :]
129 129
130 130 pathfn = compose(relrootpathfn, pathfn)
131 131
132 132 if stat:
133 133 diffopts = diffopts.copy(context=0, noprefix=False)
134 134 width = 80
135 135 if not ui.plain():
136 136 width = ui.termwidth() - graphwidth
137 137 # If an explicit --root was given, don't respect ui.relative-paths
138 138 if not relroot:
139 139 pathfn = compose(scmutil.getuipathfn(repo), pathfn)
140 140
141 141 chunks = ctx2.diff(
142 142 ctx1,
143 143 match,
144 144 changes,
145 145 opts=diffopts,
146 146 pathfn=pathfn,
147 147 copysourcematch=copysourcematch,
148 148 hunksfilterfn=hunksfilterfn,
149 149 )
150 150
151 151 if fp is not None or ui.canwritewithoutlabels():
152 152 out = fp or ui
153 153 if stat:
154 154 chunks = [patch.diffstat(util.iterlines(chunks), width=width)]
155 155 for chunk in util.filechunkiter(util.chunkbuffer(chunks)):
156 156 out.write(chunk)
157 157 else:
158 158 if stat:
159 159 chunks = patch.diffstatui(util.iterlines(chunks), width=width)
160 160 else:
161 161 chunks = patch.difflabel(
162 162 lambda chunks, **kwargs: chunks, chunks, opts=diffopts
163 163 )
164 164 if ui.canbatchlabeledwrites():
165 165
166 166 def gen():
167 167 for chunk, label in chunks:
168 168 yield ui.label(chunk, label=label)
169 169
170 170 for chunk in util.filechunkiter(util.chunkbuffer(gen())):
171 171 ui.write(chunk)
172 172 else:
173 173 for chunk, label in chunks:
174 174 ui.write(chunk, label=label)
175 175
176 176 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
177 177 tempnode2 = node2
178 178 try:
179 179 if node2 is not None:
180 180 tempnode2 = ctx2.substate[subpath][1]
181 181 except KeyError:
182 182 # A subrepo that existed in node1 was deleted between node1 and
183 183 # node2 (inclusive). Thus, ctx2's substate won't contain that
184 184 # subpath. The best we can do is to ignore it.
185 185 tempnode2 = None
186 186 submatch = matchmod.subdirmatcher(subpath, match)
187 187 subprefix = repo.wvfs.reljoin(prefix, subpath)
188 188 if listsubrepos or match.exact(subpath) or any(submatch.files()):
189 189 sub.diff(
190 190 ui,
191 191 diffopts,
192 192 tempnode2,
193 193 submatch,
194 194 changes=changes,
195 195 stat=stat,
196 196 fp=fp,
197 197 prefix=subprefix,
198 198 )
199 199
200 200
201 201 class changesetdiffer(object):
202 202 """Generate diff of changeset with pre-configured filtering functions"""
203 203
204 204 def _makefilematcher(self, ctx):
205 205 return scmutil.matchall(ctx.repo())
206 206
207 207 def _makehunksfilter(self, ctx):
208 208 return None
209 209
210 210 def showdiff(self, ui, ctx, diffopts, graphwidth=0, stat=False):
211 211 repo = ctx.repo()
212 212 node = ctx.node()
213 213 prev = ctx.p1().node()
214 214 diffordiffstat(
215 215 ui,
216 216 repo,
217 217 diffopts,
218 218 prev,
219 219 node,
220 220 match=self._makefilematcher(ctx),
221 221 stat=stat,
222 222 graphwidth=graphwidth,
223 223 hunksfilterfn=self._makehunksfilter(ctx),
224 224 )
225 225
226 226
227 227 def changesetlabels(ctx):
228 228 labels = [b'log.changeset', b'changeset.%s' % ctx.phasestr()]
229 229 if ctx.obsolete():
230 230 labels.append(b'changeset.obsolete')
231 231 if ctx.isunstable():
232 232 labels.append(b'changeset.unstable')
233 233 for instability in ctx.instabilities():
234 234 labels.append(b'instability.%s' % instability)
235 235 return b' '.join(labels)
236 236
237 237
238 238 class changesetprinter(object):
239 239 '''show changeset information when templating not requested.'''
240 240
241 241 def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False):
242 242 self.ui = ui
243 243 self.repo = repo
244 244 self.buffered = buffered
245 245 self._differ = differ or changesetdiffer()
246 246 self._diffopts = patch.diffallopts(ui, diffopts)
247 247 self._includestat = diffopts and diffopts.get(b'stat')
248 248 self._includediff = diffopts and diffopts.get(b'patch')
249 249 self.header = {}
250 250 self.hunk = {}
251 251 self.lastheader = None
252 252 self.footer = None
253 253 self._columns = templatekw.getlogcolumns()
254 254
255 255 def flush(self, ctx):
256 256 rev = ctx.rev()
257 257 if rev in self.header:
258 258 h = self.header[rev]
259 259 if h != self.lastheader:
260 260 self.lastheader = h
261 261 self.ui.write(h)
262 262 del self.header[rev]
263 263 if rev in self.hunk:
264 264 self.ui.write(self.hunk[rev])
265 265 del self.hunk[rev]
266 266
267 267 def close(self):
268 268 if self.footer:
269 269 self.ui.write(self.footer)
270 270
271 271 def show(self, ctx, copies=None, **props):
272 272 props = pycompat.byteskwargs(props)
273 273 if self.buffered:
274 274 self.ui.pushbuffer(labeled=True)
275 275 self._show(ctx, copies, props)
276 276 self.hunk[ctx.rev()] = self.ui.popbuffer()
277 277 else:
278 278 self._show(ctx, copies, props)
279 279
280 280 def _show(self, ctx, copies, props):
281 281 '''show a single changeset or file revision'''
282 282 changenode = ctx.node()
283 283 graphwidth = props.get(b'graphwidth', 0)
284 284
285 285 if self.ui.quiet:
286 286 self.ui.write(
287 287 b"%s\n" % scmutil.formatchangeid(ctx), label=b'log.node'
288 288 )
289 289 return
290 290
291 291 columns = self._columns
292 292 self.ui.write(
293 293 columns[b'changeset'] % scmutil.formatchangeid(ctx),
294 294 label=changesetlabels(ctx),
295 295 )
296 296
297 297 # branches are shown first before any other names due to backwards
298 298 # compatibility
299 299 branch = ctx.branch()
300 300 # don't show the default branch name
301 301 if branch != b'default':
302 302 self.ui.write(columns[b'branch'] % branch, label=b'log.branch')
303 303
304 304 for nsname, ns in pycompat.iteritems(self.repo.names):
305 305 # branches has special logic already handled above, so here we just
306 306 # skip it
307 307 if nsname == b'branches':
308 308 continue
309 309 # we will use the templatename as the color name since those two
310 310 # should be the same
311 311 for name in ns.names(self.repo, changenode):
312 312 self.ui.write(ns.logfmt % name, label=b'log.%s' % ns.colorname)
313 313 if self.ui.debugflag:
314 314 self.ui.write(
315 315 columns[b'phase'] % ctx.phasestr(), label=b'log.phase'
316 316 )
317 317 for pctx in scmutil.meaningfulparents(self.repo, ctx):
318 318 label = b'log.parent changeset.%s' % pctx.phasestr()
319 319 self.ui.write(
320 320 columns[b'parent'] % scmutil.formatchangeid(pctx), label=label
321 321 )
322 322
323 323 if self.ui.debugflag:
324 324 mnode = ctx.manifestnode()
325 325 if mnode is None:
326 326 mnode = wdirid
327 327 mrev = wdirrev
328 328 else:
329 329 mrev = self.repo.manifestlog.rev(mnode)
330 330 self.ui.write(
331 331 columns[b'manifest']
332 332 % scmutil.formatrevnode(self.ui, mrev, mnode),
333 333 label=b'ui.debug log.manifest',
334 334 )
335 335 self.ui.write(columns[b'user'] % ctx.user(), label=b'log.user')
336 336 self.ui.write(
337 337 columns[b'date'] % dateutil.datestr(ctx.date()), label=b'log.date'
338 338 )
339 339
340 340 if ctx.isunstable():
341 341 instabilities = ctx.instabilities()
342 342 self.ui.write(
343 343 columns[b'instability'] % b', '.join(instabilities),
344 344 label=b'log.instability',
345 345 )
346 346
347 347 elif ctx.obsolete():
348 348 self._showobsfate(ctx)
349 349
350 350 self._exthook(ctx)
351 351
352 352 if self.ui.debugflag:
353 353 files = ctx.p1().status(ctx)
354 354 for key, value in zip(
355 355 [b'files', b'files+', b'files-'],
356 356 [files.modified, files.added, files.removed],
357 357 ):
358 358 if value:
359 359 self.ui.write(
360 360 columns[key] % b" ".join(value),
361 361 label=b'ui.debug log.files',
362 362 )
363 363 elif ctx.files() and self.ui.verbose:
364 364 self.ui.write(
365 365 columns[b'files'] % b" ".join(ctx.files()),
366 366 label=b'ui.note log.files',
367 367 )
368 368 if copies and self.ui.verbose:
369 369 copies = [b'%s (%s)' % c for c in copies]
370 370 self.ui.write(
371 371 columns[b'copies'] % b' '.join(copies),
372 372 label=b'ui.note log.copies',
373 373 )
374 374
375 375 extra = ctx.extra()
376 376 if extra and self.ui.debugflag:
377 377 for key, value in sorted(extra.items()):
378 378 self.ui.write(
379 379 columns[b'extra'] % (key, stringutil.escapestr(value)),
380 380 label=b'ui.debug log.extra',
381 381 )
382 382
383 383 description = ctx.description().strip()
384 384 if description:
385 385 if self.ui.verbose:
386 386 self.ui.write(
387 387 _(b"description:\n"), label=b'ui.note log.description'
388 388 )
389 389 self.ui.write(description, label=b'ui.note log.description')
390 390 self.ui.write(b"\n\n")
391 391 else:
392 392 self.ui.write(
393 393 columns[b'summary'] % description.splitlines()[0],
394 394 label=b'log.summary',
395 395 )
396 396 self.ui.write(b"\n")
397 397
398 398 self._showpatch(ctx, graphwidth)
399 399
400 400 def _showobsfate(self, ctx):
401 401 # TODO: do not depend on templater
402 402 tres = formatter.templateresources(self.repo.ui, self.repo)
403 403 t = formatter.maketemplater(
404 404 self.repo.ui,
405 405 b'{join(obsfate, "\n")}',
406 406 defaults=templatekw.keywords,
407 407 resources=tres,
408 408 )
409 409 obsfate = t.renderdefault({b'ctx': ctx}).splitlines()
410 410
411 411 if obsfate:
412 412 for obsfateline in obsfate:
413 413 self.ui.write(
414 414 self._columns[b'obsolete'] % obsfateline,
415 415 label=b'log.obsfate',
416 416 )
417 417
418 418 def _exthook(self, ctx):
419 419 '''empty method used by extension as a hook point
420 420 '''
421 421
422 422 def _showpatch(self, ctx, graphwidth=0):
423 423 if self._includestat:
424 424 self._differ.showdiff(
425 425 self.ui, ctx, self._diffopts, graphwidth, stat=True
426 426 )
427 427 if self._includestat and self._includediff:
428 428 self.ui.write(b"\n")
429 429 if self._includediff:
430 430 self._differ.showdiff(
431 431 self.ui, ctx, self._diffopts, graphwidth, stat=False
432 432 )
433 433 if self._includestat or self._includediff:
434 434 self.ui.write(b"\n")
435 435
436 436
437 437 class changesetformatter(changesetprinter):
438 438 """Format changeset information by generic formatter"""
439 439
440 440 def __init__(
441 441 self, ui, repo, fm, differ=None, diffopts=None, buffered=False
442 442 ):
443 443 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
444 444 self._diffopts = patch.difffeatureopts(ui, diffopts, git=True)
445 445 self._fm = fm
446 446
447 447 def close(self):
448 448 self._fm.end()
449 449
450 450 def _show(self, ctx, copies, props):
451 451 '''show a single changeset or file revision'''
452 452 fm = self._fm
453 453 fm.startitem()
454 454 fm.context(ctx=ctx)
455 455 fm.data(rev=scmutil.intrev(ctx), node=fm.hexfunc(scmutil.binnode(ctx)))
456 456
457 457 datahint = fm.datahint()
458 458 if self.ui.quiet and not datahint:
459 459 return
460 460
461 461 fm.data(
462 462 branch=ctx.branch(),
463 463 phase=ctx.phasestr(),
464 464 user=ctx.user(),
465 465 date=fm.formatdate(ctx.date()),
466 466 desc=ctx.description(),
467 467 bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'),
468 468 tags=fm.formatlist(ctx.tags(), name=b'tag'),
469 469 parents=fm.formatlist(
470 470 [fm.hexfunc(c.node()) for c in ctx.parents()], name=b'node'
471 471 ),
472 472 )
473 473
474 474 if self.ui.debugflag or b'manifest' in datahint:
475 475 fm.data(manifest=fm.hexfunc(ctx.manifestnode() or wdirid))
476 476 if self.ui.debugflag or b'extra' in datahint:
477 477 fm.data(extra=fm.formatdict(ctx.extra()))
478 478
479 479 if (
480 480 self.ui.debugflag
481 481 or b'modified' in datahint
482 482 or b'added' in datahint
483 483 or b'removed' in datahint
484 484 ):
485 485 files = ctx.p1().status(ctx)
486 486 fm.data(
487 487 modified=fm.formatlist(files.modified, name=b'file'),
488 488 added=fm.formatlist(files.added, name=b'file'),
489 489 removed=fm.formatlist(files.removed, name=b'file'),
490 490 )
491 491
492 492 verbose = not self.ui.debugflag and self.ui.verbose
493 493 if verbose or b'files' in datahint:
494 494 fm.data(files=fm.formatlist(ctx.files(), name=b'file'))
495 495 if verbose and copies or b'copies' in datahint:
496 496 fm.data(
497 497 copies=fm.formatdict(copies or {}, key=b'name', value=b'source')
498 498 )
499 499
500 500 if self._includestat or b'diffstat' in datahint:
501 501 self.ui.pushbuffer()
502 502 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=True)
503 503 fm.data(diffstat=self.ui.popbuffer())
504 504 if self._includediff or b'diff' in datahint:
505 505 self.ui.pushbuffer()
506 506 self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False)
507 507 fm.data(diff=self.ui.popbuffer())
508 508
509 509
510 510 class changesettemplater(changesetprinter):
511 511 '''format changeset information.
512 512
513 513 Note: there are a variety of convenience functions to build a
514 514 changesettemplater for common cases. See functions such as:
515 515 maketemplater, changesetdisplayer, buildcommittemplate, or other
516 516 functions that use changesest_templater.
517 517 '''
518 518
519 519 # Arguments before "buffered" used to be positional. Consider not
520 520 # adding/removing arguments before "buffered" to not break callers.
521 521 def __init__(
522 522 self, ui, repo, tmplspec, differ=None, diffopts=None, buffered=False
523 523 ):
524 524 changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
525 525 # tres is shared with _graphnodeformatter()
526 526 self._tresources = tres = formatter.templateresources(ui, repo)
527 527 self.t = formatter.loadtemplater(
528 528 ui,
529 529 tmplspec,
530 530 defaults=templatekw.keywords,
531 531 resources=tres,
532 532 cache=templatekw.defaulttempl,
533 533 )
534 534 self._counter = itertools.count()
535 535
536 536 self._tref = tmplspec.ref
537 537 self._parts = {
538 538 b'header': b'',
539 539 b'footer': b'',
540 540 tmplspec.ref: tmplspec.ref,
541 541 b'docheader': b'',
542 542 b'docfooter': b'',
543 543 b'separator': b'',
544 544 }
545 545 if tmplspec.mapfile:
546 546 # find correct templates for current mode, for backward
547 547 # compatibility with 'log -v/-q/--debug' using a mapfile
548 548 tmplmodes = [
549 549 (True, b''),
550 550 (self.ui.verbose, b'_verbose'),
551 551 (self.ui.quiet, b'_quiet'),
552 552 (self.ui.debugflag, b'_debug'),
553 553 ]
554 554 for mode, postfix in tmplmodes:
555 555 for t in self._parts:
556 556 cur = t + postfix
557 557 if mode and cur in self.t:
558 558 self._parts[t] = cur
559 559 else:
560 560 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
561 561 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
562 562 self._parts.update(m)
563 563
564 564 if self._parts[b'docheader']:
565 565 self.ui.write(self.t.render(self._parts[b'docheader'], {}))
566 566
567 567 def close(self):
568 568 if self._parts[b'docfooter']:
569 569 if not self.footer:
570 570 self.footer = b""
571 571 self.footer += self.t.render(self._parts[b'docfooter'], {})
572 572 return super(changesettemplater, self).close()
573 573
574 574 def _show(self, ctx, copies, props):
575 575 '''show a single changeset or file revision'''
576 576 props = props.copy()
577 577 props[b'ctx'] = ctx
578 578 props[b'index'] = index = next(self._counter)
579 579 props[b'revcache'] = {b'copies': copies}
580 580 graphwidth = props.get(b'graphwidth', 0)
581 581
582 582 # write separator, which wouldn't work well with the header part below
583 583 # since there's inherently a conflict between header (across items) and
584 584 # separator (per item)
585 585 if self._parts[b'separator'] and index > 0:
586 586 self.ui.write(self.t.render(self._parts[b'separator'], {}))
587 587
588 588 # write header
589 589 if self._parts[b'header']:
590 590 h = self.t.render(self._parts[b'header'], props)
591 591 if self.buffered:
592 592 self.header[ctx.rev()] = h
593 593 else:
594 594 if self.lastheader != h:
595 595 self.lastheader = h
596 596 self.ui.write(h)
597 597
598 598 # write changeset metadata, then patch if requested
599 599 key = self._parts[self._tref]
600 600 self.ui.write(self.t.render(key, props))
601 601 self._showpatch(ctx, graphwidth)
602 602
603 603 if self._parts[b'footer']:
604 604 if not self.footer:
605 605 self.footer = self.t.render(self._parts[b'footer'], props)
606 606
607 607
608 608 def templatespec(tmpl, mapfile):
609 609 if pycompat.ispy3:
610 610 assert not isinstance(tmpl, str), b'tmpl must not be a str'
611 611 if mapfile:
612 612 return formatter.templatespec(b'changeset', tmpl, mapfile)
613 613 else:
614 614 return formatter.templatespec(b'', tmpl, None)
615 615
616 616
617 617 def _lookuptemplate(ui, tmpl, style):
618 618 """Find the template matching the given template spec or style
619 619
620 620 See formatter.lookuptemplate() for details.
621 621 """
622 622
623 623 # ui settings
624 624 if not tmpl and not style: # template are stronger than style
625 625 tmpl = ui.config(b'ui', b'logtemplate')
626 626 if tmpl:
627 627 return templatespec(templater.unquotestring(tmpl), None)
628 628 else:
629 629 style = util.expandpath(ui.config(b'ui', b'style'))
630 630
631 631 if not tmpl and style:
632 632 mapfile = style
633 633 if not os.path.split(mapfile)[0]:
634 634 mapname = templater.templatepath(
635 635 b'map-cmdline.' + mapfile
636 636 ) or templater.templatepath(mapfile)
637 637 if mapname:
638 638 mapfile = mapname
639 639 return templatespec(None, mapfile)
640 640
641 641 return formatter.lookuptemplate(ui, b'changeset', tmpl)
642 642
643 643
644 644 def maketemplater(ui, repo, tmpl, buffered=False):
645 645 """Create a changesettemplater from a literal template 'tmpl'
646 646 byte-string."""
647 647 spec = templatespec(tmpl, None)
648 648 return changesettemplater(ui, repo, spec, buffered=buffered)
649 649
650 650
651 651 def changesetdisplayer(ui, repo, opts, differ=None, buffered=False):
652 652 """show one changeset using template or regular display.
653 653
654 654 Display format will be the first non-empty hit of:
655 655 1. option 'template'
656 656 2. option 'style'
657 657 3. [ui] setting 'logtemplate'
658 658 4. [ui] setting 'style'
659 659 If all of these values are either the unset or the empty string,
660 660 regular display via changesetprinter() is done.
661 661 """
662 662 postargs = (differ, opts, buffered)
663 663 spec = _lookuptemplate(ui, opts.get(b'template'), opts.get(b'style'))
664 664
665 665 # machine-readable formats have slightly different keyword set than
666 666 # plain templates, which are handled by changesetformatter.
667 667 # note that {b'pickle', b'debug'} can also be added to the list if needed.
668 668 if spec.ref in {b'cbor', b'json'}:
669 669 fm = ui.formatter(b'log', opts)
670 670 return changesetformatter(ui, repo, fm, *postargs)
671 671
672 672 if not spec.ref and not spec.tmpl and not spec.mapfile:
673 673 return changesetprinter(ui, repo, *postargs)
674 674
675 675 return changesettemplater(ui, repo, spec, *postargs)
676 676
677 677
678 678 def _makematcher(repo, revs, pats, opts):
679 679 """Build matcher and expanded patterns from log options
680 680
681 681 If --follow, revs are the revisions to follow from.
682 682
683 683 Returns (match, pats, slowpath) where
684 684 - match: a matcher built from the given pats and -I/-X opts
685 685 - pats: patterns used (globs are expanded on Windows)
686 686 - slowpath: True if patterns aren't as simple as scanning filelogs
687 687 """
688 688 # pats/include/exclude are passed to match.match() directly in
689 689 # _matchfiles() revset but walkchangerevs() builds its matcher with
690 690 # scmutil.match(). The difference is input pats are globbed on
691 691 # platforms without shell expansion (windows).
692 692 wctx = repo[None]
693 693 match, pats = scmutil.matchandpats(wctx, pats, opts)
694 694 slowpath = match.anypats() or (not match.always() and opts.get(b'removed'))
695 695 if not slowpath:
696 696 follow = opts.get(b'follow') or opts.get(b'follow_first')
697 697 startctxs = []
698 698 if follow and opts.get(b'rev'):
699 699 startctxs = [repo[r] for r in revs]
700 700 for f in match.files():
701 701 if follow and startctxs:
702 702 # No idea if the path was a directory at that revision, so
703 703 # take the slow path.
704 704 if any(f not in c for c in startctxs):
705 705 slowpath = True
706 706 continue
707 707 elif follow and f not in wctx:
708 708 # If the file exists, it may be a directory, so let it
709 709 # take the slow path.
710 710 if os.path.exists(repo.wjoin(f)):
711 711 slowpath = True
712 712 continue
713 713 else:
714 714 raise error.Abort(
715 715 _(
716 716 b'cannot follow file not in parent '
717 717 b'revision: "%s"'
718 718 )
719 719 % f
720 720 )
721 721 filelog = repo.file(f)
722 722 if not filelog:
723 723 # A zero count may be a directory or deleted file, so
724 724 # try to find matching entries on the slow path.
725 725 if follow:
726 726 raise error.Abort(
727 727 _(b'cannot follow nonexistent file: "%s"') % f
728 728 )
729 729 slowpath = True
730 730
731 731 # We decided to fall back to the slowpath because at least one
732 732 # of the paths was not a file. Check to see if at least one of them
733 733 # existed in history - in that case, we'll continue down the
734 734 # slowpath; otherwise, we can turn off the slowpath
735 735 if slowpath:
736 736 for path in match.files():
737 737 if path == b'.' or path in repo.store:
738 738 break
739 739 else:
740 740 slowpath = False
741 741
742 742 return match, pats, slowpath
743 743
744 744
745 745 def _fileancestors(repo, revs, match, followfirst):
746 746 fctxs = []
747 747 for r in revs:
748 748 ctx = repo[r]
749 749 fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match))
750 750
751 751 # When displaying a revision with --patch --follow FILE, we have
752 752 # to know which file of the revision must be diffed. With
753 753 # --follow, we want the names of the ancestors of FILE in the
754 754 # revision, stored in "fcache". "fcache" is populated as a side effect
755 755 # of the graph traversal.
756 756 fcache = {}
757 757
758 758 def filematcher(ctx):
759 759 return scmutil.matchfiles(repo, fcache.get(ctx.rev(), []))
760 760
761 761 def revgen():
762 762 for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst):
763 763 fcache[rev] = [c.path() for c in cs]
764 764 yield rev
765 765
766 766 return smartset.generatorset(revgen(), iterasc=False), filematcher
767 767
768 768
769 769 def _makenofollowfilematcher(repo, pats, opts):
770 770 '''hook for extensions to override the filematcher for non-follow cases'''
771 771 return None
772 772
773 773
774 774 _opt2logrevset = {
775 775 b'no_merges': (b'not merge()', None),
776 776 b'only_merges': (b'merge()', None),
777 777 b'_matchfiles': (None, b'_matchfiles(%ps)'),
778 778 b'date': (b'date(%s)', None),
779 779 b'branch': (b'branch(%s)', b'%lr'),
780 780 b'_patslog': (b'filelog(%s)', b'%lr'),
781 781 b'keyword': (b'keyword(%s)', b'%lr'),
782 782 b'prune': (b'ancestors(%s)', b'not %lr'),
783 783 b'user': (b'user(%s)', b'%lr'),
784 784 }
785 785
786 786
787 787 def _makerevset(repo, match, pats, slowpath, opts):
788 788 """Return a revset string built from log options and file patterns"""
789 789 opts = dict(opts)
790 790 # follow or not follow?
791 791 follow = opts.get(b'follow') or opts.get(b'follow_first')
792 792
793 793 # branch and only_branch are really aliases and must be handled at
794 794 # the same time
795 795 opts[b'branch'] = opts.get(b'branch', []) + opts.get(b'only_branch', [])
796 796 opts[b'branch'] = [repo.lookupbranch(b) for b in opts[b'branch']]
797 797
798 798 if slowpath:
799 799 # See walkchangerevs() slow path.
800 800 #
801 801 # pats/include/exclude cannot be represented as separate
802 802 # revset expressions as their filtering logic applies at file
803 803 # level. For instance "-I a -X b" matches a revision touching
804 804 # "a" and "b" while "file(a) and not file(b)" does
805 805 # not. Besides, filesets are evaluated against the working
806 806 # directory.
807 807 matchargs = [b'r:', b'd:relpath']
808 808 for p in pats:
809 809 matchargs.append(b'p:' + p)
810 810 for p in opts.get(b'include', []):
811 811 matchargs.append(b'i:' + p)
812 812 for p in opts.get(b'exclude', []):
813 813 matchargs.append(b'x:' + p)
814 814 opts[b'_matchfiles'] = matchargs
815 815 elif not follow:
816 816 opts[b'_patslog'] = list(pats)
817 817
818 818 expr = []
819 819 for op, val in sorted(pycompat.iteritems(opts)):
820 820 if not val:
821 821 continue
822 822 if op not in _opt2logrevset:
823 823 continue
824 824 revop, listop = _opt2logrevset[op]
825 825 if revop and b'%' not in revop:
826 826 expr.append(revop)
827 827 elif not listop:
828 828 expr.append(revsetlang.formatspec(revop, val))
829 829 else:
830 830 if revop:
831 831 val = [revsetlang.formatspec(revop, v) for v in val]
832 832 expr.append(revsetlang.formatspec(listop, val))
833 833
834 834 if expr:
835 835 expr = b'(' + b' and '.join(expr) + b')'
836 836 else:
837 837 expr = None
838 838 return expr
839 839
840 840
841 841 def _initialrevs(repo, opts):
842 842 """Return the initial set of revisions to be filtered or followed"""
843 843 follow = opts.get(b'follow') or opts.get(b'follow_first')
844 844 if opts.get(b'rev'):
845 845 revs = scmutil.revrange(repo, opts[b'rev'])
846 846 elif follow and repo.dirstate.p1() == nullid:
847 847 revs = smartset.baseset()
848 848 elif follow:
849 849 revs = repo.revs(b'.')
850 850 else:
851 851 revs = smartset.spanset(repo)
852 852 revs.reverse()
853 853 return revs
854 854
855 855
856 856 def getrevs(repo, pats, opts):
857 857 # type: (Any, Any, Any) -> Tuple[smartset.abstractsmartset, Optional[changesetdiffer]]
858 858 """Return (revs, differ) where revs is a smartset
859 859
860 860 differ is a changesetdiffer with pre-configured file matcher.
861 861 """
862 862 follow = opts.get(b'follow') or opts.get(b'follow_first')
863 863 followfirst = opts.get(b'follow_first')
864 864 limit = getlimit(opts)
865 865 revs = _initialrevs(repo, opts)
866 866 if not revs:
867 867 return smartset.baseset(), None
868 868 match, pats, slowpath = _makematcher(repo, revs, pats, opts)
869 869 filematcher = None
870 870 if follow:
871 871 if slowpath or match.always():
872 872 revs = dagop.revancestors(repo, revs, followfirst=followfirst)
873 873 else:
874 874 revs, filematcher = _fileancestors(repo, revs, match, followfirst)
875 875 revs.reverse()
876 876 if filematcher is None:
877 877 filematcher = _makenofollowfilematcher(repo, pats, opts)
878 878 if filematcher is None:
879 879
880 880 def filematcher(ctx):
881 881 return match
882 882
883 883 expr = _makerevset(repo, match, pats, slowpath, opts)
884 884 if opts.get(b'graph'):
885 885 # User-specified revs might be unsorted, but don't sort before
886 886 # _makerevset because it might depend on the order of revs
887 887 if repo.ui.configbool(b'experimental', b'log.topo'):
888 888 if not revs.istopo():
889 889 revs = dagop.toposort(revs, repo.changelog.parentrevs)
890 890 # TODO: try to iterate the set lazily
891 891 revs = revset.baseset(list(revs), istopo=True)
892 892 elif not (revs.isdescending() or revs.istopo()):
893 893 revs.sort(reverse=True)
894 894 if expr:
895 895 matcher = revset.match(None, expr)
896 896 revs = matcher(repo, revs)
897 897 if limit is not None:
898 898 revs = revs.slice(0, limit)
899 899
900 900 differ = changesetdiffer()
901 901 differ._makefilematcher = filematcher
902 902 return revs, differ
903 903
904 904
905 905 def _parselinerangeopt(repo, opts):
906 906 """Parse --line-range log option and return a list of tuples (filename,
907 907 (fromline, toline)).
908 908 """
909 909 linerangebyfname = []
910 910 for pat in opts.get(b'line_range', []):
911 911 try:
912 912 pat, linerange = pat.rsplit(b',', 1)
913 913 except ValueError:
914 914 raise error.Abort(_(b'malformatted line-range pattern %s') % pat)
915 915 try:
916 916 fromline, toline = map(int, linerange.split(b':'))
917 917 except ValueError:
918 918 raise error.Abort(_(b"invalid line range for %s") % pat)
919 919 msg = _(b"line range pattern '%s' must match exactly one file") % pat
920 920 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
921 921 linerangebyfname.append(
922 922 (fname, util.processlinerange(fromline, toline))
923 923 )
924 924 return linerangebyfname
925 925
926 926
927 927 def getlinerangerevs(repo, userrevs, opts):
928 928 """Return (revs, differ).
929 929
930 930 "revs" are revisions obtained by processing "line-range" log options and
931 931 walking block ancestors of each specified file/line-range.
932 932
933 933 "differ" is a changesetdiffer with pre-configured file matcher and hunks
934 934 filter.
935 935 """
936 936 wctx = repo[None]
937 937
938 938 # Two-levels map of "rev -> file ctx -> [line range]".
939 939 linerangesbyrev = {}
940 940 for fname, (fromline, toline) in _parselinerangeopt(repo, opts):
941 941 if fname not in wctx:
942 942 raise error.Abort(
943 943 _(b'cannot follow file not in parent revision: "%s"') % fname
944 944 )
945 945 fctx = wctx.filectx(fname)
946 946 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
947 947 rev = fctx.introrev()
948 948 if rev not in userrevs:
949 949 continue
950 950 linerangesbyrev.setdefault(rev, {}).setdefault(
951 951 fctx.path(), []
952 952 ).append(linerange)
953 953
954 954 def nofilterhunksfn(fctx, hunks):
955 955 return hunks
956 956
957 957 def hunksfilter(ctx):
958 958 fctxlineranges = linerangesbyrev.get(ctx.rev())
959 959 if fctxlineranges is None:
960 960 return nofilterhunksfn
961 961
962 962 def filterfn(fctx, hunks):
963 963 lineranges = fctxlineranges.get(fctx.path())
964 964 if lineranges is not None:
965 965 for hr, lines in hunks:
966 966 if hr is None: # binary
967 967 yield hr, lines
968 968 continue
969 969 if any(mdiff.hunkinrange(hr[2:], lr) for lr in lineranges):
970 970 yield hr, lines
971 971 else:
972 972 for hunk in hunks:
973 973 yield hunk
974 974
975 975 return filterfn
976 976
977 977 def filematcher(ctx):
978 978 files = list(linerangesbyrev.get(ctx.rev(), []))
979 979 return scmutil.matchfiles(repo, files)
980 980
981 981 revs = sorted(linerangesbyrev, reverse=True)
982 982
983 983 differ = changesetdiffer()
984 984 differ._makefilematcher = filematcher
985 985 differ._makehunksfilter = hunksfilter
986 986 return smartset.baseset(revs), differ
987 987
988 988
989 989 def _graphnodeformatter(ui, displayer):
990 990 spec = ui.config(b'ui', b'graphnodetemplate')
991 991 if not spec:
992 992 return templatekw.getgraphnode # fast path for "{graphnode}"
993 993
994 994 spec = templater.unquotestring(spec)
995 995 if isinstance(displayer, changesettemplater):
996 996 # reuse cache of slow templates
997 997 tres = displayer._tresources
998 998 else:
999 999 tres = formatter.templateresources(ui)
1000 1000 templ = formatter.maketemplater(
1001 1001 ui, spec, defaults=templatekw.keywords, resources=tres
1002 1002 )
1003 1003
1004 1004 def formatnode(repo, ctx):
1005 1005 props = {b'ctx': ctx, b'repo': repo}
1006 1006 return templ.renderdefault(props)
1007 1007
1008 1008 return formatnode
1009 1009
1010 1010
1011 1011 def displaygraph(ui, repo, dag, displayer, edgefn, getcopies=None, props=None):
1012 1012 props = props or {}
1013 1013 formatnode = _graphnodeformatter(ui, displayer)
1014 1014 state = graphmod.asciistate()
1015 styles = state[b'styles']
1015 styles = state.styles
1016 1016
1017 1017 # only set graph styling if HGPLAIN is not set.
1018 1018 if ui.plain(b'graph'):
1019 1019 # set all edge styles to |, the default pre-3.8 behaviour
1020 1020 styles.update(dict.fromkeys(styles, b'|'))
1021 1021 else:
1022 1022 edgetypes = {
1023 1023 b'parent': graphmod.PARENT,
1024 1024 b'grandparent': graphmod.GRANDPARENT,
1025 1025 b'missing': graphmod.MISSINGPARENT,
1026 1026 }
1027 1027 for name, key in edgetypes.items():
1028 1028 # experimental config: experimental.graphstyle.*
1029 1029 styles[key] = ui.config(
1030 1030 b'experimental', b'graphstyle.%s' % name, styles[key]
1031 1031 )
1032 1032 if not styles[key]:
1033 1033 styles[key] = None
1034 1034
1035 1035 # experimental config: experimental.graphshorten
1036 state[b'graphshorten'] = ui.configbool(b'experimental', b'graphshorten')
1036 state.graphshorten = ui.configbool(b'experimental', b'graphshorten')
1037 1037
1038 1038 for rev, type, ctx, parents in dag:
1039 1039 char = formatnode(repo, ctx)
1040 1040 copies = getcopies(ctx) if getcopies else None
1041 1041 edges = edgefn(type, char, state, rev, parents)
1042 1042 firstedge = next(edges)
1043 1043 width = firstedge[2]
1044 1044 displayer.show(
1045 1045 ctx, copies=copies, graphwidth=width, **pycompat.strkwargs(props)
1046 1046 )
1047 1047 lines = displayer.hunk.pop(rev).split(b'\n')
1048 1048 if not lines[-1]:
1049 1049 del lines[-1]
1050 1050 displayer.flush(ctx)
1051 1051 for type, char, width, coldata in itertools.chain([firstedge], edges):
1052 1052 graphmod.ascii(ui, state, type, char, lines, coldata)
1053 1053 lines = []
1054 1054 displayer.close()
1055 1055
1056 1056
1057 1057 def displaygraphrevs(ui, repo, revs, displayer, getrenamed):
1058 1058 revdag = graphmod.dagwalker(repo, revs)
1059 1059 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed)
1060 1060
1061 1061
1062 1062 def displayrevs(ui, repo, revs, displayer, getcopies):
1063 1063 for rev in revs:
1064 1064 ctx = repo[rev]
1065 1065 copies = getcopies(ctx) if getcopies else None
1066 1066 displayer.show(ctx, copies=copies)
1067 1067 displayer.flush(ctx)
1068 1068 displayer.close()
1069 1069
1070 1070
1071 1071 def checkunsupportedgraphflags(pats, opts):
1072 1072 for op in [b"newest_first"]:
1073 1073 if op in opts and opts[op]:
1074 1074 raise error.Abort(
1075 1075 _(b"-G/--graph option is incompatible with --%s")
1076 1076 % op.replace(b"_", b"-")
1077 1077 )
1078 1078
1079 1079
1080 1080 def graphrevs(repo, nodes, opts):
1081 1081 limit = getlimit(opts)
1082 1082 nodes.reverse()
1083 1083 if limit is not None:
1084 1084 nodes = nodes[:limit]
1085 1085 return graphmod.nodes(repo, nodes)
General Comments 0
You need to be logged in to leave comments. Login now