##// END OF EJS Templates
absorb: migrate `opts` to native kwargs
Matt Harbison -
r51762:e0cae2b4 default
parent child Browse files
Show More
@@ -1,1165 +1,1162 b''
1 1 # absorb.py
2 2 #
3 3 # Copyright 2016 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """apply working directory changes to changesets (EXPERIMENTAL)
9 9
10 10 The absorb extension provides a command to use annotate information to
11 11 amend modified chunks into the corresponding non-public changesets.
12 12
13 13 ::
14 14
15 15 [absorb]
16 16 # only check 50 recent non-public changesets at most
17 17 max-stack-size = 50
18 18 # whether to add noise to new commits to avoid obsolescence cycle
19 19 add-noise = 1
20 20 # make `amend --correlated` a shortcut to the main command
21 21 amend-flag = correlated
22 22
23 23 [color]
24 24 absorb.description = yellow
25 25 absorb.node = blue bold
26 26 absorb.path = bold
27 27 """
28 28
29 29 # TODO:
30 30 # * Rename config items to [commands] namespace
31 31 # * Converge getdraftstack() with other code in core
32 32 # * move many attributes on fixupstate to be private
33 33
34 34
35 35 import collections
36 36
37 37 from mercurial.i18n import _
38 38 from mercurial.node import (
39 39 hex,
40 40 short,
41 41 )
42 42 from mercurial import (
43 43 cmdutil,
44 44 commands,
45 45 context,
46 46 crecord,
47 47 error,
48 48 linelog,
49 49 mdiff,
50 50 obsolete,
51 51 patch,
52 52 phases,
53 53 pycompat,
54 54 registrar,
55 55 rewriteutil,
56 56 scmutil,
57 57 util,
58 58 )
59 59 from mercurial.utils import stringutil
60 60
61 61 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
62 62 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
63 63 # be specifying the version(s) of Mercurial they are tested with, or
64 64 # leave the attribute unspecified.
65 65 testedwith = b'ships-with-hg-core'
66 66
67 67 cmdtable = {}
68 68 command = registrar.command(cmdtable)
69 69
70 70 configtable = {}
71 71 configitem = registrar.configitem(configtable)
72 72
73 73 configitem(b'absorb', b'add-noise', default=True)
74 74 configitem(b'absorb', b'amend-flag', default=None)
75 75 configitem(b'absorb', b'max-stack-size', default=50)
76 76
77 77 colortable = {
78 78 b'absorb.description': b'yellow',
79 79 b'absorb.node': b'blue bold',
80 80 b'absorb.path': b'bold',
81 81 }
82 82
83 83 defaultdict = collections.defaultdict
84 84
85 85
86 86 class nullui:
87 87 """blank ui object doing nothing"""
88 88
89 89 debugflag = False
90 90 verbose = False
91 91 quiet = True
92 92
93 93 def __getitem__(name):
94 94 def nullfunc(*args, **kwds):
95 95 return
96 96
97 97 return nullfunc
98 98
99 99
100 100 class emptyfilecontext:
101 101 """minimal filecontext representing an empty file"""
102 102
103 103 def __init__(self, repo):
104 104 self._repo = repo
105 105
106 106 def data(self):
107 107 return b''
108 108
109 109 def node(self):
110 110 return self._repo.nullid
111 111
112 112
113 113 def uniq(lst):
114 114 """list -> list. remove duplicated items without changing the order"""
115 115 seen = set()
116 116 result = []
117 117 for x in lst:
118 118 if x not in seen:
119 119 seen.add(x)
120 120 result.append(x)
121 121 return result
122 122
123 123
124 124 def getdraftstack(headctx, limit=None):
125 125 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
126 126
127 127 changesets are sorted in topo order, oldest first.
128 128 return at most limit items, if limit is a positive number.
129 129
130 130 merges are considered as non-draft as well. i.e. every commit
131 131 returned has and only has 1 parent.
132 132 """
133 133 ctx = headctx
134 134 result = []
135 135 while ctx.phase() != phases.public:
136 136 if limit and len(result) >= limit:
137 137 break
138 138 parents = ctx.parents()
139 139 if len(parents) != 1:
140 140 break
141 141 result.append(ctx)
142 142 ctx = parents[0]
143 143 result.reverse()
144 144 return result
145 145
146 146
147 147 def getfilestack(stack, path, seenfctxs=None):
148 148 """([ctx], str, set) -> [fctx], {ctx: fctx}
149 149
150 150 stack is a list of contexts, from old to new. usually they are what
151 151 "getdraftstack" returns.
152 152
153 153 follows renames, but not copies.
154 154
155 155 seenfctxs is a set of filecontexts that will be considered "immutable".
156 156 they are usually what this function returned in earlier calls, useful
157 157 to avoid issues that a file was "moved" to multiple places and was then
158 158 modified differently, like: "a" was copied to "b", "a" was also copied to
159 159 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
160 160 and we enforce only one of them to be able to affect "a"'s content.
161 161
162 162 return an empty list and an empty dict, if the specified path does not
163 163 exist in stack[-1] (the top of the stack).
164 164
165 165 otherwise, return a list of de-duplicated filecontexts, and the map to
166 166 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
167 167 of the list would be outside the stack and should be considered immutable.
168 168 the remaining items are within the stack.
169 169
170 170 for example, given the following changelog and corresponding filelog
171 171 revisions:
172 172
173 173 changelog: 3----4----5----6----7
174 174 filelog: x 0----1----1----2 (x: no such file yet)
175 175
176 176 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
177 177 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
178 178 dummy empty filecontext.
179 179 - if stack = [2], returns ([], {})
180 180 - if stack = [7], returns ([1, 2], {7: 2})
181 181 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
182 182 removed, since 1 is immutable.
183 183 """
184 184 if seenfctxs is None:
185 185 seenfctxs = set()
186 186 assert stack
187 187
188 188 if path not in stack[-1]:
189 189 return [], {}
190 190
191 191 fctxs = []
192 192 fctxmap = {}
193 193
194 194 pctx = stack[0].p1() # the public (immutable) ctx we stop at
195 195 for ctx in reversed(stack):
196 196 if path not in ctx: # the file is added in the next commit
197 197 pctx = ctx
198 198 break
199 199 fctx = ctx[path]
200 200 fctxs.append(fctx)
201 201 if fctx in seenfctxs: # treat fctx as the immutable one
202 202 pctx = None # do not add another immutable fctx
203 203 break
204 204 fctxmap[ctx] = fctx # only for mutable fctxs
205 205 copy = fctx.copysource()
206 206 if copy:
207 207 path = copy # follow rename
208 208 if path in ctx: # but do not follow copy
209 209 pctx = ctx.p1()
210 210 break
211 211
212 212 if pctx is not None: # need an extra immutable fctx
213 213 if path in pctx:
214 214 fctxs.append(pctx[path])
215 215 else:
216 216 fctxs.append(emptyfilecontext(pctx.repo()))
217 217
218 218 fctxs.reverse()
219 219 # note: we rely on a property of hg: filerev is not reused for linear
220 220 # history. i.e. it's impossible to have:
221 221 # changelog: 4----5----6 (linear, no merges)
222 222 # filelog: 1----2----1
223 223 # ^ reuse filerev (impossible)
224 224 # because parents are part of the hash. if that's not true, we need to
225 225 # remove uniq and find a different way to identify fctxs.
226 226 return uniq(fctxs), fctxmap
227 227
228 228
229 229 class overlaystore(patch.filestore):
230 230 """read-only, hybrid store based on a dict and ctx.
231 231 memworkingcopy: {path: content}, overrides file contents.
232 232 """
233 233
234 234 def __init__(self, basectx, memworkingcopy):
235 235 self.basectx = basectx
236 236 self.memworkingcopy = memworkingcopy
237 237
238 238 def getfile(self, path):
239 239 """comply with mercurial.patch.filestore.getfile"""
240 240 if path not in self.basectx:
241 241 return None, None, None
242 242 fctx = self.basectx[path]
243 243 if path in self.memworkingcopy:
244 244 content = self.memworkingcopy[path]
245 245 else:
246 246 content = fctx.data()
247 247 mode = (fctx.islink(), fctx.isexec())
248 248 copy = fctx.copysource()
249 249 return content, mode, copy
250 250
251 251
252 252 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None, desc=None):
253 253 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
254 254 memworkingcopy overrides file contents.
255 255 """
256 256 # parents must contain 2 items: (node1, node2)
257 257 if parents is None:
258 258 parents = ctx.repo().changelog.parents(ctx.node())
259 259 if extra is None:
260 260 extra = ctx.extra()
261 261 if desc is None:
262 262 desc = ctx.description()
263 263 date = ctx.date()
264 264 user = ctx.user()
265 265 files = set(ctx.files()).union(memworkingcopy)
266 266 store = overlaystore(ctx, memworkingcopy)
267 267 return context.memctx(
268 268 repo=ctx.repo(),
269 269 parents=parents,
270 270 text=desc,
271 271 files=files,
272 272 filectxfn=store,
273 273 user=user,
274 274 date=date,
275 275 branch=None,
276 276 extra=extra,
277 277 )
278 278
279 279
280 280 class filefixupstate:
281 281 """state needed to apply fixups to a single file
282 282
283 283 internally, it keeps file contents of several revisions and a linelog.
284 284
285 285 the linelog uses odd revision numbers for original contents (fctxs passed
286 286 to __init__), and even revision numbers for fixups, like:
287 287
288 288 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
289 289 linelog rev 2: fixups made to self.fctxs[0]
290 290 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
291 291 linelog rev 4: fixups made to self.fctxs[1]
292 292 ...
293 293
294 294 a typical use is like:
295 295
296 296 1. call diffwith, to calculate self.fixups
297 297 2. (optionally), present self.fixups to the user, or change it
298 298 3. call apply, to apply changes
299 299 4. read results from "finalcontents", or call getfinalcontent
300 300 """
301 301
302 def __init__(self, fctxs, path, ui=None, opts=None):
302 def __init__(self, fctxs, path, ui=None, **opts):
303 303 """([fctx], ui or None) -> None
304 304
305 305 fctxs should be linear, and sorted by topo order - oldest first.
306 306 fctxs[0] will be considered as "immutable" and will not be changed.
307 307 """
308 308 self.fctxs = fctxs
309 309 self.path = path
310 310 self.ui = ui or nullui()
311 self.opts = opts or {}
311 self.opts = opts
312 312
313 313 # following fields are built from fctxs. they exist for perf reason
314 314 self.contents = [f.data() for f in fctxs]
315 315 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
316 316 self.linelog = self._buildlinelog()
317 317 if self.ui.debugflag:
318 318 assert self._checkoutlinelog() == self.contents
319 319
320 320 # following fields will be filled later
321 321 self.chunkstats = [0, 0] # [adopted, total : int]
322 322 self.targetlines = [] # [str]
323 323 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
324 324 self.finalcontents = [] # [str]
325 325 self.ctxaffected = set()
326 326
327 327 def diffwith(self, targetfctx, fm=None):
328 328 """calculate fixups needed by examining the differences between
329 329 self.fctxs[-1] and targetfctx, chunk by chunk.
330 330
331 331 targetfctx is the target state we move towards. we may or may not be
332 332 able to get there because not all modified chunks can be amended into
333 333 a non-public fctx unambiguously.
334 334
335 335 call this only once, before apply().
336 336
337 337 update self.fixups, self.chunkstats, and self.targetlines.
338 338 """
339 339 a = self.contents[-1]
340 340 alines = self.contentlines[-1]
341 341 b = targetfctx.data()
342 342 blines = mdiff.splitnewlines(b)
343 343 self.targetlines = blines
344 344
345 345 self.linelog.annotate(self.linelog.maxrev)
346 346 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
347 347 assert len(annotated) == len(alines)
348 348 # add a dummy end line to make insertion at the end easier
349 349 if annotated:
350 350 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
351 351 annotated.append(dummyendline)
352 352
353 353 # analyse diff blocks
354 354 for chunk in self._alldiffchunks(a, b, alines, blines):
355 355 newfixups = self._analysediffchunk(chunk, annotated)
356 356 self.chunkstats[0] += bool(newfixups) # 1 or 0
357 357 self.chunkstats[1] += 1
358 358 self.fixups += newfixups
359 359 if fm is not None:
360 360 self._showchanges(fm, alines, blines, chunk, newfixups)
361 361
362 362 def apply(self):
363 363 """apply self.fixups. update self.linelog, self.finalcontents.
364 364
365 365 call this only once, before getfinalcontent(), after diffwith().
366 366 """
367 367 # the following is unnecessary, as it's done by "diffwith":
368 368 # self.linelog.annotate(self.linelog.maxrev)
369 369 for rev, a1, a2, b1, b2 in reversed(self.fixups):
370 370 blines = self.targetlines[b1:b2]
371 371 if self.ui.debugflag:
372 372 idx = (max(rev - 1, 0)) // 2
373 373 self.ui.write(
374 374 _(b'%s: chunk %d:%d -> %d lines\n')
375 375 % (short(self.fctxs[idx].node()), a1, a2, len(blines))
376 376 )
377 377 self.linelog.replacelines(rev, a1, a2, b1, b2)
378 if self.opts.get(b'edit_lines', False):
378 if self.opts.get('edit_lines', False):
379 379 self.finalcontents = self._checkoutlinelogwithedits()
380 380 else:
381 381 self.finalcontents = self._checkoutlinelog()
382 382
383 383 def getfinalcontent(self, fctx):
384 384 """(fctx) -> str. get modified file content for a given filecontext"""
385 385 idx = self.fctxs.index(fctx)
386 386 return self.finalcontents[idx]
387 387
388 388 def _analysediffchunk(self, chunk, annotated):
389 389 """analyse a different chunk and return new fixups found
390 390
391 391 return [] if no lines from the chunk can be safely applied.
392 392
393 393 the chunk (or lines) cannot be safely applied, if, for example:
394 394 - the modified (deleted) lines belong to a public changeset
395 395 (self.fctxs[0])
396 396 - the chunk is a pure insertion and the adjacent lines (at most 2
397 397 lines) belong to different non-public changesets, or do not belong
398 398 to any non-public changesets.
399 399 - the chunk is modifying lines from different changesets.
400 400 in this case, if the number of lines deleted equals to the number
401 401 of lines added, assume it's a simple 1:1 map (could be wrong).
402 402 otherwise, give up.
403 403 - the chunk is modifying lines from a single non-public changeset,
404 404 but other revisions touch the area as well. i.e. the lines are
405 405 not continuous as seen from the linelog.
406 406 """
407 407 a1, a2, b1, b2 = chunk
408 408 # find involved indexes from annotate result
409 409 involved = annotated[a1:a2]
410 410 if not involved and annotated: # a1 == a2 and a is not empty
411 411 # pure insertion, check nearby lines. ignore lines belong
412 412 # to the public (first) changeset (i.e. annotated[i][0] == 1)
413 413 nearbylinenums = {a2, max(0, a1 - 1)}
414 414 involved = [
415 415 annotated[i] for i in nearbylinenums if annotated[i][0] != 1
416 416 ]
417 417 involvedrevs = list({r for r, l in involved})
418 418 newfixups = []
419 419 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
420 420 # chunk belongs to a single revision
421 421 rev = involvedrevs[0]
422 422 if rev > 1:
423 423 fixuprev = rev + 1
424 424 newfixups.append((fixuprev, a1, a2, b1, b2))
425 425 elif a2 - a1 == b2 - b1 or b1 == b2:
426 426 # 1:1 line mapping, or chunk was deleted
427 427 for i in range(a1, a2):
428 428 rev, linenum = annotated[i]
429 429 if rev > 1:
430 430 if b1 == b2: # deletion, simply remove that single line
431 431 nb1 = nb2 = 0
432 432 else: # 1:1 line mapping, change the corresponding rev
433 433 nb1 = b1 + i - a1
434 434 nb2 = nb1 + 1
435 435 fixuprev = rev + 1
436 436 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
437 437 return self._optimizefixups(newfixups)
438 438
439 439 @staticmethod
440 440 def _alldiffchunks(a, b, alines, blines):
441 441 """like mdiff.allblocks, but only care about differences"""
442 442 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
443 443 for chunk, btype in blocks:
444 444 if btype != b'!':
445 445 continue
446 446 yield chunk
447 447
448 448 def _buildlinelog(self):
449 449 """calculate the initial linelog based on self.content{,line}s.
450 450 this is similar to running a partial "annotate".
451 451 """
452 452 llog = linelog.linelog()
453 453 a, alines = b'', []
454 454 for i in range(len(self.contents)):
455 455 b, blines = self.contents[i], self.contentlines[i]
456 456 llrev = i * 2 + 1
457 457 chunks = self._alldiffchunks(a, b, alines, blines)
458 458 for a1, a2, b1, b2 in reversed(list(chunks)):
459 459 llog.replacelines(llrev, a1, a2, b1, b2)
460 460 a, alines = b, blines
461 461 return llog
462 462
463 463 def _checkoutlinelog(self):
464 464 """() -> [str]. check out file contents from linelog"""
465 465 contents = []
466 466 for i in range(len(self.contents)):
467 467 rev = (i + 1) * 2
468 468 self.linelog.annotate(rev)
469 469 content = b''.join(map(self._getline, self.linelog.annotateresult))
470 470 contents.append(content)
471 471 return contents
472 472
473 473 def _checkoutlinelogwithedits(self):
474 474 """() -> [str]. prompt all lines for edit"""
475 475 alllines = self.linelog.getalllines()
476 476 # header
477 477 editortext = (
478 478 _(
479 479 b'HG: editing %s\nHG: "y" means the line to the right '
480 480 b'exists in the changeset to the top\nHG:\n'
481 481 )
482 482 % self.fctxs[-1].path()
483 483 )
484 484 # [(idx, fctx)]. hide the dummy emptyfilecontext
485 485 visiblefctxs = [
486 486 (i, f)
487 487 for i, f in enumerate(self.fctxs)
488 488 if not isinstance(f, emptyfilecontext)
489 489 ]
490 490 for i, (j, f) in enumerate(visiblefctxs):
491 491 editortext += _(b'HG: %s/%s %s %s\n') % (
492 492 b'|' * i,
493 493 b'-' * (len(visiblefctxs) - i + 1),
494 494 short(f.node()),
495 495 f.description().split(b'\n', 1)[0],
496 496 )
497 497 editortext += _(b'HG: %s\n') % (b'|' * len(visiblefctxs))
498 498 # figure out the lifetime of a line, this is relatively inefficient,
499 499 # but probably fine
500 500 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
501 501 for i, f in visiblefctxs:
502 502 self.linelog.annotate((i + 1) * 2)
503 503 for l in self.linelog.annotateresult:
504 504 lineset[l].add(i)
505 505 # append lines
506 506 for l in alllines:
507 507 editortext += b' %s : %s' % (
508 508 b''.join(
509 509 [
510 510 (b'y' if i in lineset[l] else b' ')
511 511 for i, _f in visiblefctxs
512 512 ]
513 513 ),
514 514 self._getline(l),
515 515 )
516 516 # run editor
517 517 editedtext = self.ui.edit(editortext, b'', action=b'absorb')
518 518 if not editedtext:
519 519 raise error.InputError(_(b'empty editor text'))
520 520 # parse edited result
521 521 contents = [b''] * len(self.fctxs)
522 522 leftpadpos = 4
523 523 colonpos = leftpadpos + len(visiblefctxs) + 1
524 524 for l in mdiff.splitnewlines(editedtext):
525 525 if l.startswith(b'HG:'):
526 526 continue
527 527 if l[colonpos - 1 : colonpos + 2] != b' : ':
528 528 raise error.InputError(_(b'malformed line: %s') % l)
529 529 linecontent = l[colonpos + 2 :]
530 530 for i, ch in enumerate(
531 531 pycompat.bytestr(l[leftpadpos : colonpos - 1])
532 532 ):
533 533 if ch == b'y':
534 534 contents[visiblefctxs[i][0]] += linecontent
535 535 # chunkstats is hard to calculate if anything changes, therefore
536 536 # set them to just a simple value (1, 1).
537 537 if editedtext != editortext:
538 538 self.chunkstats = [1, 1]
539 539 return contents
540 540
541 541 def _getline(self, lineinfo):
542 542 """((rev, linenum)) -> str. convert rev+line number to line content"""
543 543 rev, linenum = lineinfo
544 544 if rev & 1: # odd: original line taken from fctxs
545 545 return self.contentlines[rev // 2][linenum]
546 546 else: # even: fixup line from targetfctx
547 547 return self.targetlines[linenum]
548 548
549 549 def _iscontinuous(self, a1, a2, closedinterval=False):
550 550 """(a1, a2 : int) -> bool
551 551
552 552 check if these lines are continuous. i.e. no other insertions or
553 553 deletions (from other revisions) among these lines.
554 554
555 555 closedinterval decides whether a2 should be included or not. i.e. is
556 556 it [a1, a2), or [a1, a2] ?
557 557 """
558 558 if a1 >= a2:
559 559 return True
560 560 llog = self.linelog
561 561 offset1 = llog.getoffset(a1)
562 562 offset2 = llog.getoffset(a2) + int(closedinterval)
563 563 linesinbetween = llog.getalllines(offset1, offset2)
564 564 return len(linesinbetween) == a2 - a1 + int(closedinterval)
565 565
566 566 def _optimizefixups(self, fixups):
567 567 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
568 568 merge adjacent fixups to make them less fragmented.
569 569 """
570 570 result = []
571 571 pcurrentchunk = [[-1, -1, -1, -1, -1]]
572 572
573 573 def pushchunk():
574 574 if pcurrentchunk[0][0] != -1:
575 575 result.append(tuple(pcurrentchunk[0]))
576 576
577 577 for i, chunk in enumerate(fixups):
578 578 rev, a1, a2, b1, b2 = chunk
579 579 lastrev = pcurrentchunk[0][0]
580 580 lasta2 = pcurrentchunk[0][2]
581 581 lastb2 = pcurrentchunk[0][4]
582 582 if (
583 583 a1 == lasta2
584 584 and b1 == lastb2
585 585 and rev == lastrev
586 586 and self._iscontinuous(max(a1 - 1, 0), a1)
587 587 ):
588 588 # merge into currentchunk
589 589 pcurrentchunk[0][2] = a2
590 590 pcurrentchunk[0][4] = b2
591 591 else:
592 592 pushchunk()
593 593 pcurrentchunk[0] = list(chunk)
594 594 pushchunk()
595 595 return result
596 596
597 597 def _showchanges(self, fm, alines, blines, chunk, fixups):
598 598 def trim(line):
599 599 if line.endswith(b'\n'):
600 600 line = line[:-1]
601 601 return line
602 602
603 603 # this is not optimized for perf but _showchanges only gets executed
604 604 # with an extra command-line flag.
605 605 a1, a2, b1, b2 = chunk
606 606 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
607 607 for idx, fa1, fa2, fb1, fb2 in fixups:
608 608 for i in range(fa1, fa2):
609 609 aidxs[i - a1] = (max(idx, 1) - 1) // 2
610 610 for i in range(fb1, fb2):
611 611 bidxs[i - b1] = (max(idx, 1) - 1) // 2
612 612
613 613 fm.startitem()
614 614 fm.write(
615 615 b'hunk',
616 616 b' %s\n',
617 617 b'@@ -%d,%d +%d,%d @@' % (a1, a2 - a1, b1, b2 - b1),
618 618 label=b'diff.hunk',
619 619 )
620 620 fm.data(path=self.path, linetype=b'hunk')
621 621
622 622 def writeline(idx, diffchar, line, linetype, linelabel):
623 623 fm.startitem()
624 624 node = b''
625 625 if idx:
626 626 ctx = self.fctxs[idx]
627 627 fm.context(fctx=ctx)
628 628 node = ctx.hex()
629 629 self.ctxaffected.add(ctx.changectx())
630 630 fm.write(b'node', b'%-7.7s ', node, label=b'absorb.node')
631 631 fm.write(
632 632 b'diffchar ' + linetype,
633 633 b'%s%s\n',
634 634 diffchar,
635 635 line,
636 636 label=linelabel,
637 637 )
638 638 fm.data(path=self.path, linetype=linetype)
639 639
640 640 for i in range(a1, a2):
641 641 writeline(
642 642 aidxs[i - a1],
643 643 b'-',
644 644 trim(alines[i]),
645 645 b'deleted',
646 646 b'diff.deleted',
647 647 )
648 648 for i in range(b1, b2):
649 649 writeline(
650 650 bidxs[i - b1],
651 651 b'+',
652 652 trim(blines[i]),
653 653 b'inserted',
654 654 b'diff.inserted',
655 655 )
656 656
657 657
658 658 class fixupstate:
659 659 """state needed to run absorb
660 660
661 661 internally, it keeps paths and filefixupstates.
662 662
663 663 a typical use is like filefixupstates:
664 664
665 665 1. call diffwith, to calculate fixups
666 666 2. (optionally), present fixups to the user, or edit fixups
667 667 3. call apply, to apply changes to memory
668 668 4. call commit, to commit changes to hg database
669 669 """
670 670
671 def __init__(self, stack, ui=None, opts=None):
671 def __init__(self, stack, ui=None, **opts):
672 672 """([ctx], ui or None) -> None
673 673
674 674 stack: should be linear, and sorted by topo order - oldest first.
675 675 all commits in stack are considered mutable.
676 676 """
677 677 assert stack
678 678 self.ui = ui or nullui()
679 self.opts = opts or {}
679 self.opts = opts
680 680 self.stack = stack
681 681 self.repo = stack[-1].repo().unfiltered()
682 682
683 683 # following fields will be filled later
684 684 self.paths = [] # [str]
685 685 self.status = None # ctx.status output
686 686 self.fctxmap = {} # {path: {ctx: fctx}}
687 687 self.fixupmap = {} # {path: filefixupstate}
688 688 self.replacemap = {} # {oldnode: newnode or None}
689 689 self.finalnode = None # head after all fixups
690 690 self.ctxaffected = set() # ctx that will be absorbed into
691 691
692 692 def diffwith(self, targetctx, match=None, fm=None):
693 693 """diff and prepare fixups. update self.fixupmap, self.paths"""
694 694 # only care about modified files
695 695 self.status = self.stack[-1].status(targetctx, match)
696 696 self.paths = []
697 697 # but if --edit-lines is used, the user may want to edit files
698 698 # even if they are not modified
699 editopt = self.opts.get(b'edit_lines')
699 editopt = self.opts.get('edit_lines')
700 700 if not self.status.modified and editopt and match:
701 701 interestingpaths = match.files()
702 702 else:
703 703 interestingpaths = self.status.modified
704 704 # prepare the filefixupstate
705 705 seenfctxs = set()
706 706 # sorting is necessary to eliminate ambiguity for the "double move"
707 707 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
708 708 for path in sorted(interestingpaths):
709 709 self.ui.debug(b'calculating fixups for %s\n' % path)
710 710 targetfctx = targetctx[path]
711 711 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
712 712 # ignore symbolic links or binary, or unchanged files
713 713 if any(
714 714 f.islink() or stringutil.binary(f.data())
715 715 for f in [targetfctx] + fctxs
716 716 if not isinstance(f, emptyfilecontext)
717 717 ):
718 718 continue
719 719 if targetfctx.data() == fctxs[-1].data() and not editopt:
720 720 continue
721 721 seenfctxs.update(fctxs[1:])
722 722 self.fctxmap[path] = ctx2fctx
723 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
723 fstate = filefixupstate(fctxs, path, ui=self.ui, **self.opts)
724 724 if fm is not None:
725 725 fm.startitem()
726 726 fm.plain(b'showing changes for ')
727 727 fm.write(b'path', b'%s\n', path, label=b'absorb.path')
728 728 fm.data(linetype=b'path')
729 729 fstate.diffwith(targetfctx, fm)
730 730 self.fixupmap[path] = fstate
731 731 self.paths.append(path)
732 732 self.ctxaffected.update(fstate.ctxaffected)
733 733
734 734 def apply(self):
735 735 """apply fixups to individual filefixupstates"""
736 736 for path, state in self.fixupmap.items():
737 737 if self.ui.debugflag:
738 738 self.ui.write(_(b'applying fixups to %s\n') % path)
739 739 state.apply()
740 740
741 741 @property
742 742 def chunkstats(self):
743 743 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
744 744 return {path: state.chunkstats for path, state in self.fixupmap.items()}
745 745
746 746 def commit(self):
747 747 """commit changes. update self.finalnode, self.replacemap"""
748 748 with self.repo.transaction(b'absorb') as tr:
749 749 self._commitstack()
750 750 self._movebookmarks(tr)
751 751 if self.repo[b'.'].node() in self.replacemap:
752 752 self._moveworkingdirectoryparent()
753 753 self._cleanupoldcommits()
754 754 return self.finalnode
755 755
756 756 def printchunkstats(self):
757 757 """print things like '1 of 2 chunk(s) applied'"""
758 758 ui = self.ui
759 759 chunkstats = self.chunkstats
760 760 if ui.verbose:
761 761 # chunkstats for each file
762 762 for path, stat in chunkstats.items():
763 763 if stat[0]:
764 764 ui.write(
765 765 _(b'%s: %d of %d chunk(s) applied\n')
766 766 % (path, stat[0], stat[1])
767 767 )
768 768 elif not ui.quiet:
769 769 # a summary for all files
770 770 stats = chunkstats.values()
771 771 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
772 772 ui.write(_(b'%d of %d chunk(s) applied\n') % (applied, total))
773 773
774 774 def _commitstack(self):
775 775 """make new commits. update self.finalnode, self.replacemap.
776 776 it is splitted from "commit" to avoid too much indentation.
777 777 """
778 778 # last node (20-char) committed by us
779 779 lastcommitted = None
780 780 # p1 which overrides the parent of the next commit, "None" means use
781 781 # the original parent unchanged
782 782 nextp1 = None
783 783 for ctx in self.stack:
784 784 memworkingcopy = self._getnewfilecontents(ctx)
785 785 if not memworkingcopy and not lastcommitted:
786 786 # nothing changed, nothing commited
787 787 nextp1 = ctx
788 788 continue
789 789 willbecomenoop = ctx.files() and self._willbecomenoop(
790 790 memworkingcopy, ctx, nextp1
791 791 )
792 792 if self.skip_empty_successor and willbecomenoop:
793 793 # changeset is no longer necessary
794 794 self.replacemap[ctx.node()] = None
795 795 msg = _(b'became empty and was dropped')
796 796 else:
797 797 # changeset needs re-commit
798 798 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
799 799 lastcommitted = self.repo[nodestr]
800 800 nextp1 = lastcommitted
801 801 self.replacemap[ctx.node()] = lastcommitted.node()
802 802 if memworkingcopy:
803 803 if willbecomenoop:
804 804 msg = _(b'%d file(s) changed, became empty as %s')
805 805 else:
806 806 msg = _(b'%d file(s) changed, became %s')
807 807 msg = msg % (
808 808 len(memworkingcopy),
809 809 self._ctx2str(lastcommitted),
810 810 )
811 811 else:
812 812 msg = _(b'became %s') % self._ctx2str(lastcommitted)
813 813 if self.ui.verbose and msg:
814 814 self.ui.write(_(b'%s: %s\n') % (self._ctx2str(ctx), msg))
815 815 self.finalnode = lastcommitted and lastcommitted.node()
816 816
817 817 def _ctx2str(self, ctx):
818 818 if self.ui.debugflag:
819 819 return b'%d:%s' % (ctx.rev(), ctx.hex())
820 820 else:
821 821 return b'%d:%s' % (ctx.rev(), short(ctx.node()))
822 822
823 823 def _getnewfilecontents(self, ctx):
824 824 """(ctx) -> {path: str}
825 825
826 826 fetch file contents from filefixupstates.
827 827 return the working copy overrides - files different from ctx.
828 828 """
829 829 result = {}
830 830 for path in self.paths:
831 831 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
832 832 if ctx not in ctx2fctx:
833 833 continue
834 834 fctx = ctx2fctx[ctx]
835 835 content = fctx.data()
836 836 newcontent = self.fixupmap[path].getfinalcontent(fctx)
837 837 if content != newcontent:
838 838 result[fctx.path()] = newcontent
839 839 return result
840 840
841 841 def _movebookmarks(self, tr):
842 842 repo = self.repo
843 843 needupdate = [
844 844 (name, self.replacemap[hsh])
845 845 for name, hsh in repo._bookmarks.items()
846 846 if hsh in self.replacemap
847 847 ]
848 848 changes = []
849 849 for name, hsh in needupdate:
850 850 if hsh:
851 851 changes.append((name, hsh))
852 852 if self.ui.verbose:
853 853 self.ui.write(
854 854 _(b'moving bookmark %s to %s\n') % (name, hex(hsh))
855 855 )
856 856 else:
857 857 changes.append((name, None))
858 858 if self.ui.verbose:
859 859 self.ui.write(_(b'deleting bookmark %s\n') % name)
860 860 repo._bookmarks.applychanges(repo, tr, changes)
861 861
862 862 def _moveworkingdirectoryparent(self):
863 863 if not self.finalnode:
864 864 # Find the latest not-{obsoleted,stripped} parent.
865 865 revs = self.repo.revs(b'max(::. - %ln)', self.replacemap.keys())
866 866 ctx = self.repo[revs.first()]
867 867 self.finalnode = ctx.node()
868 868 else:
869 869 ctx = self.repo[self.finalnode]
870 870
871 871 dirstate = self.repo.dirstate
872 872 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
873 873 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
874 874 noop = lambda: 0
875 875 restore = noop
876 876 if util.safehasattr(dirstate, '_fsmonitorstate'):
877 877 bak = dirstate._fsmonitorstate.invalidate
878 878
879 879 def restore():
880 880 dirstate._fsmonitorstate.invalidate = bak
881 881
882 882 dirstate._fsmonitorstate.invalidate = noop
883 883 try:
884 884 with dirstate.changing_parents(self.repo):
885 885 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
886 886 finally:
887 887 restore()
888 888
889 889 @staticmethod
890 890 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
891 891 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
892 892
893 893 if it will become an empty commit (does not change anything, after the
894 894 memworkingcopy overrides), return True. otherwise return False.
895 895 """
896 896 if not pctx:
897 897 parents = ctx.parents()
898 898 if len(parents) != 1:
899 899 return False
900 900 pctx = parents[0]
901 901 if ctx.branch() != pctx.branch():
902 902 return False
903 903 if ctx.extra().get(b'close'):
904 904 return False
905 905 # ctx changes more files (not a subset of memworkingcopy)
906 906 if not set(ctx.files()).issubset(set(memworkingcopy)):
907 907 return False
908 908 for path, content in memworkingcopy.items():
909 909 if path not in pctx or path not in ctx:
910 910 return False
911 911 fctx = ctx[path]
912 912 pfctx = pctx[path]
913 913 if pfctx.flags() != fctx.flags():
914 914 return False
915 915 if pfctx.data() != content:
916 916 return False
917 917 return True
918 918
919 919 def _commitsingle(self, memworkingcopy, ctx, p1=None):
920 920 """(ctx, {path: content}, node) -> node. make a single commit
921 921
922 922 the commit is a clone from ctx, with a (optionally) different p1, and
923 923 different file contents replaced by memworkingcopy.
924 924 """
925 925 parents = p1 and (p1, self.repo.nullid)
926 926 extra = ctx.extra()
927 927 if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'):
928 928 extra[b'absorb_source'] = ctx.hex()
929 929
930 930 desc = rewriteutil.update_hash_refs(
931 931 ctx.repo(),
932 932 ctx.description(),
933 933 {
934 934 oldnode: [newnode]
935 935 for oldnode, newnode in self.replacemap.items()
936 936 },
937 937 )
938 938 mctx = overlaycontext(
939 939 memworkingcopy, ctx, parents, extra=extra, desc=desc
940 940 )
941 941 return mctx.commit()
942 942
943 943 @util.propertycache
944 944 def _useobsolete(self):
945 945 """() -> bool"""
946 946 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
947 947
948 948 def _cleanupoldcommits(self):
949 949 replacements = {
950 950 k: ([v] if v is not None else [])
951 951 for k, v in self.replacemap.items()
952 952 }
953 953 if replacements:
954 954 scmutil.cleanupnodes(
955 955 self.repo, replacements, operation=b'absorb', fixphase=True
956 956 )
957 957
958 958 @util.propertycache
959 959 def skip_empty_successor(self):
960 960 return rewriteutil.skip_empty_successor(self.ui, b'absorb')
961 961
962 962
963 963 def _parsechunk(hunk):
964 964 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
965 965 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
966 966 return None, None
967 967 path = hunk.header.filename()
968 968 a1 = hunk.fromline + len(hunk.before) - 1
969 969 # remove before and after context
970 970 hunk.before = hunk.after = []
971 971 buf = util.stringio()
972 972 hunk.write(buf)
973 973 patchlines = mdiff.splitnewlines(buf.getvalue())
974 974 # hunk.prettystr() will update hunk.removed
975 975 a2 = a1 + hunk.removed
976 976 blines = [l[1:] for l in patchlines[1:] if not l.startswith(b'-')]
977 977 return path, (a1, a2, blines)
978 978
979 979
980 980 def overlaydiffcontext(ctx, chunks):
981 981 """(ctx, [crecord.uihunk]) -> memctx
982 982
983 983 return a memctx with some [1] patches (chunks) applied to ctx.
984 984 [1]: modifications are handled. renames, mode changes, etc. are ignored.
985 985 """
986 986 # sadly the applying-patch logic is hardly reusable, and messy:
987 987 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
988 988 # needs a file stream of a patch and will re-parse it, while we have
989 989 # structured hunk objects at hand.
990 990 # 2. a lot of different implementations about "chunk" (patch.hunk,
991 991 # patch.recordhunk, crecord.uihunk)
992 992 # as we only care about applying changes to modified files, no mode
993 993 # change, no binary diff, and no renames, it's probably okay to
994 994 # re-invent the logic using much simpler code here.
995 995 memworkingcopy = {} # {path: content}
996 996 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
997 997 for path, info in map(_parsechunk, chunks):
998 998 if not path or not info:
999 999 continue
1000 1000 patchmap[path].append(info)
1001 1001 for path, patches in patchmap.items():
1002 1002 if path not in ctx or not patches:
1003 1003 continue
1004 1004 patches.sort(reverse=True)
1005 1005 lines = mdiff.splitnewlines(ctx[path].data())
1006 1006 for a1, a2, blines in patches:
1007 1007 lines[a1:a2] = blines
1008 1008 memworkingcopy[path] = b''.join(lines)
1009 1009 return overlaycontext(memworkingcopy, ctx)
1010 1010
1011 1011
1012 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
1012 def absorb(ui, repo, stack=None, targetctx=None, pats=None, **opts):
1013 1013 """pick fixup chunks from targetctx, apply them to stack.
1014 1014
1015 1015 if targetctx is None, the working copy context will be used.
1016 1016 if stack is None, the current draft stack will be used.
1017 1017 return fixupstate.
1018 1018 """
1019 1019 if stack is None:
1020 1020 limit = ui.configint(b'absorb', b'max-stack-size')
1021 1021 headctx = repo[b'.']
1022 1022 if len(headctx.parents()) > 1:
1023 1023 raise error.InputError(_(b'cannot absorb into a merge'))
1024 1024 stack = getdraftstack(headctx, limit)
1025 1025 if limit and len(stack) >= limit:
1026 1026 ui.warn(
1027 1027 _(
1028 1028 b'absorb: only the recent %d changesets will '
1029 1029 b'be analysed\n'
1030 1030 )
1031 1031 % limit
1032 1032 )
1033 1033 if not stack:
1034 1034 raise error.InputError(_(b'no mutable changeset to change'))
1035 1035 if targetctx is None: # default to working copy
1036 1036 targetctx = repo[None]
1037 1037 if pats is None:
1038 1038 pats = ()
1039 if opts is None:
1040 opts = {}
1041 state = fixupstate(stack, ui=ui, opts=opts)
1042 matcher = scmutil.match(targetctx, pats, opts)
1043 if opts.get(b'interactive'):
1039
1040 state = fixupstate(stack, ui=ui, **opts)
1041 matcher = scmutil.match(targetctx, pats, pycompat.byteskwargs(opts))
1042 if opts.get('interactive'):
1044 1043 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
1045 1044 origchunks = patch.parsepatch(diff)
1046 1045 chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
1047 1046 targetctx = overlaydiffcontext(stack[-1], chunks)
1048 if opts.get(b'edit_lines'):
1047 if opts.get('edit_lines'):
1049 1048 # If we're going to open the editor, don't ask the user to confirm
1050 1049 # first
1051 opts[b'apply_changes'] = True
1050 opts['apply_changes'] = True
1052 1051 fm = None
1053 if opts.get(b'print_changes') or not opts.get(b'apply_changes'):
1054 fm = ui.formatter(b'absorb', opts)
1052 if opts.get('print_changes') or not opts.get('apply_changes'):
1053 fm = ui.formatter(b'absorb', pycompat.byteskwargs(opts))
1055 1054 state.diffwith(targetctx, matcher, fm)
1056 1055 if fm is not None:
1057 1056 fm.startitem()
1058 1057 fm.write(
1059 1058 b"count", b"\n%d changesets affected\n", len(state.ctxaffected)
1060 1059 )
1061 1060 fm.data(linetype=b'summary')
1062 1061 for ctx in reversed(stack):
1063 1062 if ctx not in state.ctxaffected:
1064 1063 continue
1065 1064 fm.startitem()
1066 1065 fm.context(ctx=ctx)
1067 1066 fm.data(linetype=b'changeset')
1068 1067 fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node')
1069 1068 descfirstline = stringutil.firstline(ctx.description())
1070 1069 fm.write(
1071 1070 b'descfirstline',
1072 1071 b'%s\n',
1073 1072 descfirstline,
1074 1073 label=b'absorb.description',
1075 1074 )
1076 1075 fm.end()
1077 if not opts.get(b'dry_run'):
1076 if not opts.get('dry_run'):
1078 1077 if (
1079 not opts.get(b'apply_changes')
1078 not opts.get('apply_changes')
1080 1079 and state.ctxaffected
1081 1080 and ui.promptchoice(
1082 1081 b"apply changes (y/N)? $$ &Yes $$ &No", default=1
1083 1082 )
1084 1083 ):
1085 1084 raise error.CanceledError(_(b'absorb cancelled\n'))
1086 1085
1087 1086 state.apply()
1088 1087 if state.commit():
1089 1088 state.printchunkstats()
1090 1089 elif not ui.quiet:
1091 1090 ui.write(_(b'nothing applied\n'))
1092 1091 return state
1093 1092
1094 1093
1095 1094 @command(
1096 1095 b'absorb',
1097 1096 [
1098 1097 (
1099 1098 b'a',
1100 1099 b'apply-changes',
1101 1100 None,
1102 1101 _(b'apply changes without prompting for confirmation'),
1103 1102 ),
1104 1103 (
1105 1104 b'p',
1106 1105 b'print-changes',
1107 1106 None,
1108 1107 _(b'always print which changesets are modified by which changes'),
1109 1108 ),
1110 1109 (
1111 1110 b'i',
1112 1111 b'interactive',
1113 1112 None,
1114 1113 _(b'interactively select which chunks to apply'),
1115 1114 ),
1116 1115 (
1117 1116 b'e',
1118 1117 b'edit-lines',
1119 1118 None,
1120 1119 _(
1121 1120 b'edit what lines belong to which changesets before commit '
1122 1121 b'(EXPERIMENTAL)'
1123 1122 ),
1124 1123 ),
1125 1124 ]
1126 1125 + commands.dryrunopts
1127 1126 + commands.templateopts
1128 1127 + commands.walkopts,
1129 1128 _(b'hg absorb [OPTION] [FILE]...'),
1130 1129 helpcategory=command.CATEGORY_COMMITTING,
1131 1130 helpbasic=True,
1132 1131 )
1133 1132 def absorbcmd(ui, repo, *pats, **opts):
1134 1133 """incorporate corrections into the stack of draft changesets
1135 1134
1136 1135 absorb analyzes each change in your working directory and attempts to
1137 1136 amend the changed lines into the changesets in your stack that first
1138 1137 introduced those lines.
1139 1138
1140 1139 If absorb cannot find an unambiguous changeset to amend for a change,
1141 1140 that change will be left in the working directory, untouched. They can be
1142 1141 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
1143 1142 absorb does not write to the working directory.
1144 1143
1145 1144 Changesets outside the revset `::. and not public() and not merge()` will
1146 1145 not be changed.
1147 1146
1148 1147 Changesets that become empty after applying the changes will be deleted.
1149 1148
1150 1149 By default, absorb will show what it plans to do and prompt for
1151 1150 confirmation. If you are confident that the changes will be absorbed
1152 1151 to the correct place, run :hg:`absorb -a` to apply the changes
1153 1152 immediately.
1154 1153
1155 1154 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1156 1155 """
1157 opts = pycompat.byteskwargs(opts)
1158
1159 1156 with repo.wlock(), repo.lock():
1160 if not opts[b'dry_run']:
1157 if not opts['dry_run']:
1161 1158 cmdutil.checkunfinished(repo)
1162 1159
1163 state = absorb(ui, repo, pats=pats, opts=opts)
1160 state = absorb(ui, repo, pats=pats, **opts)
1164 1161 if sum(s[0] for s in state.chunkstats.values()) == 0:
1165 1162 return 1
General Comments 0
You need to be logged in to leave comments. Login now