##// END OF EJS Templates
absorb: make `--edit-lines` imply `--apply-changes`...
Martin von Zweigbergk -
r49963:3cd57e2b default
parent child Browse files
Show More
@@ -1,1161 +1,1165 b''
1 # absorb.py
1 # absorb.py
2 #
2 #
3 # Copyright 2016 Facebook, Inc.
3 # Copyright 2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """apply working directory changes to changesets (EXPERIMENTAL)
8 """apply working directory changes to changesets (EXPERIMENTAL)
9
9
10 The absorb extension provides a command to use annotate information to
10 The absorb extension provides a command to use annotate information to
11 amend modified chunks into the corresponding non-public changesets.
11 amend modified chunks into the corresponding non-public changesets.
12
12
13 ::
13 ::
14
14
15 [absorb]
15 [absorb]
16 # only check 50 recent non-public changesets at most
16 # only check 50 recent non-public changesets at most
17 max-stack-size = 50
17 max-stack-size = 50
18 # whether to add noise to new commits to avoid obsolescence cycle
18 # whether to add noise to new commits to avoid obsolescence cycle
19 add-noise = 1
19 add-noise = 1
20 # make `amend --correlated` a shortcut to the main command
20 # make `amend --correlated` a shortcut to the main command
21 amend-flag = correlated
21 amend-flag = correlated
22
22
23 [color]
23 [color]
24 absorb.description = yellow
24 absorb.description = yellow
25 absorb.node = blue bold
25 absorb.node = blue bold
26 absorb.path = bold
26 absorb.path = bold
27 """
27 """
28
28
29 # TODO:
29 # TODO:
30 # * Rename config items to [commands] namespace
30 # * Rename config items to [commands] namespace
31 # * Converge getdraftstack() with other code in core
31 # * Converge getdraftstack() with other code in core
32 # * move many attributes on fixupstate to be private
32 # * move many attributes on fixupstate to be private
33
33
34
34
35 import collections
35 import collections
36
36
37 from mercurial.i18n import _
37 from mercurial.i18n import _
38 from mercurial.node import (
38 from mercurial.node import (
39 hex,
39 hex,
40 short,
40 short,
41 )
41 )
42 from mercurial import (
42 from mercurial import (
43 cmdutil,
43 cmdutil,
44 commands,
44 commands,
45 context,
45 context,
46 crecord,
46 crecord,
47 error,
47 error,
48 linelog,
48 linelog,
49 mdiff,
49 mdiff,
50 obsolete,
50 obsolete,
51 patch,
51 patch,
52 phases,
52 phases,
53 pycompat,
53 pycompat,
54 registrar,
54 registrar,
55 rewriteutil,
55 rewriteutil,
56 scmutil,
56 scmutil,
57 util,
57 util,
58 )
58 )
59 from mercurial.utils import stringutil
59 from mercurial.utils import stringutil
60
60
61 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
62 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
63 # be specifying the version(s) of Mercurial they are tested with, or
63 # be specifying the version(s) of Mercurial they are tested with, or
64 # leave the attribute unspecified.
64 # leave the attribute unspecified.
65 testedwith = b'ships-with-hg-core'
65 testedwith = b'ships-with-hg-core'
66
66
67 cmdtable = {}
67 cmdtable = {}
68 command = registrar.command(cmdtable)
68 command = registrar.command(cmdtable)
69
69
70 configtable = {}
70 configtable = {}
71 configitem = registrar.configitem(configtable)
71 configitem = registrar.configitem(configtable)
72
72
73 configitem(b'absorb', b'add-noise', default=True)
73 configitem(b'absorb', b'add-noise', default=True)
74 configitem(b'absorb', b'amend-flag', default=None)
74 configitem(b'absorb', b'amend-flag', default=None)
75 configitem(b'absorb', b'max-stack-size', default=50)
75 configitem(b'absorb', b'max-stack-size', default=50)
76
76
77 colortable = {
77 colortable = {
78 b'absorb.description': b'yellow',
78 b'absorb.description': b'yellow',
79 b'absorb.node': b'blue bold',
79 b'absorb.node': b'blue bold',
80 b'absorb.path': b'bold',
80 b'absorb.path': b'bold',
81 }
81 }
82
82
83 defaultdict = collections.defaultdict
83 defaultdict = collections.defaultdict
84
84
85
85
86 class nullui:
86 class nullui:
87 """blank ui object doing nothing"""
87 """blank ui object doing nothing"""
88
88
89 debugflag = False
89 debugflag = False
90 verbose = False
90 verbose = False
91 quiet = True
91 quiet = True
92
92
93 def __getitem__(name):
93 def __getitem__(name):
94 def nullfunc(*args, **kwds):
94 def nullfunc(*args, **kwds):
95 return
95 return
96
96
97 return nullfunc
97 return nullfunc
98
98
99
99
100 class emptyfilecontext:
100 class emptyfilecontext:
101 """minimal filecontext representing an empty file"""
101 """minimal filecontext representing an empty file"""
102
102
103 def __init__(self, repo):
103 def __init__(self, repo):
104 self._repo = repo
104 self._repo = repo
105
105
106 def data(self):
106 def data(self):
107 return b''
107 return b''
108
108
109 def node(self):
109 def node(self):
110 return self._repo.nullid
110 return self._repo.nullid
111
111
112
112
113 def uniq(lst):
113 def uniq(lst):
114 """list -> list. remove duplicated items without changing the order"""
114 """list -> list. remove duplicated items without changing the order"""
115 seen = set()
115 seen = set()
116 result = []
116 result = []
117 for x in lst:
117 for x in lst:
118 if x not in seen:
118 if x not in seen:
119 seen.add(x)
119 seen.add(x)
120 result.append(x)
120 result.append(x)
121 return result
121 return result
122
122
123
123
124 def getdraftstack(headctx, limit=None):
124 def getdraftstack(headctx, limit=None):
125 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
125 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
126
126
127 changesets are sorted in topo order, oldest first.
127 changesets are sorted in topo order, oldest first.
128 return at most limit items, if limit is a positive number.
128 return at most limit items, if limit is a positive number.
129
129
130 merges are considered as non-draft as well. i.e. every commit
130 merges are considered as non-draft as well. i.e. every commit
131 returned has and only has 1 parent.
131 returned has and only has 1 parent.
132 """
132 """
133 ctx = headctx
133 ctx = headctx
134 result = []
134 result = []
135 while ctx.phase() != phases.public:
135 while ctx.phase() != phases.public:
136 if limit and len(result) >= limit:
136 if limit and len(result) >= limit:
137 break
137 break
138 parents = ctx.parents()
138 parents = ctx.parents()
139 if len(parents) != 1:
139 if len(parents) != 1:
140 break
140 break
141 result.append(ctx)
141 result.append(ctx)
142 ctx = parents[0]
142 ctx = parents[0]
143 result.reverse()
143 result.reverse()
144 return result
144 return result
145
145
146
146
147 def getfilestack(stack, path, seenfctxs=None):
147 def getfilestack(stack, path, seenfctxs=None):
148 """([ctx], str, set) -> [fctx], {ctx: fctx}
148 """([ctx], str, set) -> [fctx], {ctx: fctx}
149
149
150 stack is a list of contexts, from old to new. usually they are what
150 stack is a list of contexts, from old to new. usually they are what
151 "getdraftstack" returns.
151 "getdraftstack" returns.
152
152
153 follows renames, but not copies.
153 follows renames, but not copies.
154
154
155 seenfctxs is a set of filecontexts that will be considered "immutable".
155 seenfctxs is a set of filecontexts that will be considered "immutable".
156 they are usually what this function returned in earlier calls, useful
156 they are usually what this function returned in earlier calls, useful
157 to avoid issues that a file was "moved" to multiple places and was then
157 to avoid issues that a file was "moved" to multiple places and was then
158 modified differently, like: "a" was copied to "b", "a" was also copied to
158 modified differently, like: "a" was copied to "b", "a" was also copied to
159 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
159 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
160 and we enforce only one of them to be able to affect "a"'s content.
160 and we enforce only one of them to be able to affect "a"'s content.
161
161
162 return an empty list and an empty dict, if the specified path does not
162 return an empty list and an empty dict, if the specified path does not
163 exist in stack[-1] (the top of the stack).
163 exist in stack[-1] (the top of the stack).
164
164
165 otherwise, return a list of de-duplicated filecontexts, and the map to
165 otherwise, return a list of de-duplicated filecontexts, and the map to
166 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
166 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
167 of the list would be outside the stack and should be considered immutable.
167 of the list would be outside the stack and should be considered immutable.
168 the remaining items are within the stack.
168 the remaining items are within the stack.
169
169
170 for example, given the following changelog and corresponding filelog
170 for example, given the following changelog and corresponding filelog
171 revisions:
171 revisions:
172
172
173 changelog: 3----4----5----6----7
173 changelog: 3----4----5----6----7
174 filelog: x 0----1----1----2 (x: no such file yet)
174 filelog: x 0----1----1----2 (x: no such file yet)
175
175
176 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
176 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
177 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
177 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
178 dummy empty filecontext.
178 dummy empty filecontext.
179 - if stack = [2], returns ([], {})
179 - if stack = [2], returns ([], {})
180 - if stack = [7], returns ([1, 2], {7: 2})
180 - if stack = [7], returns ([1, 2], {7: 2})
181 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
181 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
182 removed, since 1 is immutable.
182 removed, since 1 is immutable.
183 """
183 """
184 if seenfctxs is None:
184 if seenfctxs is None:
185 seenfctxs = set()
185 seenfctxs = set()
186 assert stack
186 assert stack
187
187
188 if path not in stack[-1]:
188 if path not in stack[-1]:
189 return [], {}
189 return [], {}
190
190
191 fctxs = []
191 fctxs = []
192 fctxmap = {}
192 fctxmap = {}
193
193
194 pctx = stack[0].p1() # the public (immutable) ctx we stop at
194 pctx = stack[0].p1() # the public (immutable) ctx we stop at
195 for ctx in reversed(stack):
195 for ctx in reversed(stack):
196 if path not in ctx: # the file is added in the next commit
196 if path not in ctx: # the file is added in the next commit
197 pctx = ctx
197 pctx = ctx
198 break
198 break
199 fctx = ctx[path]
199 fctx = ctx[path]
200 fctxs.append(fctx)
200 fctxs.append(fctx)
201 if fctx in seenfctxs: # treat fctx as the immutable one
201 if fctx in seenfctxs: # treat fctx as the immutable one
202 pctx = None # do not add another immutable fctx
202 pctx = None # do not add another immutable fctx
203 break
203 break
204 fctxmap[ctx] = fctx # only for mutable fctxs
204 fctxmap[ctx] = fctx # only for mutable fctxs
205 copy = fctx.copysource()
205 copy = fctx.copysource()
206 if copy:
206 if copy:
207 path = copy # follow rename
207 path = copy # follow rename
208 if path in ctx: # but do not follow copy
208 if path in ctx: # but do not follow copy
209 pctx = ctx.p1()
209 pctx = ctx.p1()
210 break
210 break
211
211
212 if pctx is not None: # need an extra immutable fctx
212 if pctx is not None: # need an extra immutable fctx
213 if path in pctx:
213 if path in pctx:
214 fctxs.append(pctx[path])
214 fctxs.append(pctx[path])
215 else:
215 else:
216 fctxs.append(emptyfilecontext(pctx.repo()))
216 fctxs.append(emptyfilecontext(pctx.repo()))
217
217
218 fctxs.reverse()
218 fctxs.reverse()
219 # note: we rely on a property of hg: filerev is not reused for linear
219 # note: we rely on a property of hg: filerev is not reused for linear
220 # history. i.e. it's impossible to have:
220 # history. i.e. it's impossible to have:
221 # changelog: 4----5----6 (linear, no merges)
221 # changelog: 4----5----6 (linear, no merges)
222 # filelog: 1----2----1
222 # filelog: 1----2----1
223 # ^ reuse filerev (impossible)
223 # ^ reuse filerev (impossible)
224 # because parents are part of the hash. if that's not true, we need to
224 # because parents are part of the hash. if that's not true, we need to
225 # remove uniq and find a different way to identify fctxs.
225 # remove uniq and find a different way to identify fctxs.
226 return uniq(fctxs), fctxmap
226 return uniq(fctxs), fctxmap
227
227
228
228
229 class overlaystore(patch.filestore):
229 class overlaystore(patch.filestore):
230 """read-only, hybrid store based on a dict and ctx.
230 """read-only, hybrid store based on a dict and ctx.
231 memworkingcopy: {path: content}, overrides file contents.
231 memworkingcopy: {path: content}, overrides file contents.
232 """
232 """
233
233
234 def __init__(self, basectx, memworkingcopy):
234 def __init__(self, basectx, memworkingcopy):
235 self.basectx = basectx
235 self.basectx = basectx
236 self.memworkingcopy = memworkingcopy
236 self.memworkingcopy = memworkingcopy
237
237
238 def getfile(self, path):
238 def getfile(self, path):
239 """comply with mercurial.patch.filestore.getfile"""
239 """comply with mercurial.patch.filestore.getfile"""
240 if path not in self.basectx:
240 if path not in self.basectx:
241 return None, None, None
241 return None, None, None
242 fctx = self.basectx[path]
242 fctx = self.basectx[path]
243 if path in self.memworkingcopy:
243 if path in self.memworkingcopy:
244 content = self.memworkingcopy[path]
244 content = self.memworkingcopy[path]
245 else:
245 else:
246 content = fctx.data()
246 content = fctx.data()
247 mode = (fctx.islink(), fctx.isexec())
247 mode = (fctx.islink(), fctx.isexec())
248 copy = fctx.copysource()
248 copy = fctx.copysource()
249 return content, mode, copy
249 return content, mode, copy
250
250
251
251
252 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None, desc=None):
252 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None, desc=None):
253 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
253 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
254 memworkingcopy overrides file contents.
254 memworkingcopy overrides file contents.
255 """
255 """
256 # parents must contain 2 items: (node1, node2)
256 # parents must contain 2 items: (node1, node2)
257 if parents is None:
257 if parents is None:
258 parents = ctx.repo().changelog.parents(ctx.node())
258 parents = ctx.repo().changelog.parents(ctx.node())
259 if extra is None:
259 if extra is None:
260 extra = ctx.extra()
260 extra = ctx.extra()
261 if desc is None:
261 if desc is None:
262 desc = ctx.description()
262 desc = ctx.description()
263 date = ctx.date()
263 date = ctx.date()
264 user = ctx.user()
264 user = ctx.user()
265 files = set(ctx.files()).union(memworkingcopy)
265 files = set(ctx.files()).union(memworkingcopy)
266 store = overlaystore(ctx, memworkingcopy)
266 store = overlaystore(ctx, memworkingcopy)
267 return context.memctx(
267 return context.memctx(
268 repo=ctx.repo(),
268 repo=ctx.repo(),
269 parents=parents,
269 parents=parents,
270 text=desc,
270 text=desc,
271 files=files,
271 files=files,
272 filectxfn=store,
272 filectxfn=store,
273 user=user,
273 user=user,
274 date=date,
274 date=date,
275 branch=None,
275 branch=None,
276 extra=extra,
276 extra=extra,
277 )
277 )
278
278
279
279
280 class filefixupstate:
280 class filefixupstate:
281 """state needed to apply fixups to a single file
281 """state needed to apply fixups to a single file
282
282
283 internally, it keeps file contents of several revisions and a linelog.
283 internally, it keeps file contents of several revisions and a linelog.
284
284
285 the linelog uses odd revision numbers for original contents (fctxs passed
285 the linelog uses odd revision numbers for original contents (fctxs passed
286 to __init__), and even revision numbers for fixups, like:
286 to __init__), and even revision numbers for fixups, like:
287
287
288 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
288 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
289 linelog rev 2: fixups made to self.fctxs[0]
289 linelog rev 2: fixups made to self.fctxs[0]
290 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
290 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
291 linelog rev 4: fixups made to self.fctxs[1]
291 linelog rev 4: fixups made to self.fctxs[1]
292 ...
292 ...
293
293
294 a typical use is like:
294 a typical use is like:
295
295
296 1. call diffwith, to calculate self.fixups
296 1. call diffwith, to calculate self.fixups
297 2. (optionally), present self.fixups to the user, or change it
297 2. (optionally), present self.fixups to the user, or change it
298 3. call apply, to apply changes
298 3. call apply, to apply changes
299 4. read results from "finalcontents", or call getfinalcontent
299 4. read results from "finalcontents", or call getfinalcontent
300 """
300 """
301
301
302 def __init__(self, fctxs, path, ui=None, opts=None):
302 def __init__(self, fctxs, path, ui=None, opts=None):
303 """([fctx], ui or None) -> None
303 """([fctx], ui or None) -> None
304
304
305 fctxs should be linear, and sorted by topo order - oldest first.
305 fctxs should be linear, and sorted by topo order - oldest first.
306 fctxs[0] will be considered as "immutable" and will not be changed.
306 fctxs[0] will be considered as "immutable" and will not be changed.
307 """
307 """
308 self.fctxs = fctxs
308 self.fctxs = fctxs
309 self.path = path
309 self.path = path
310 self.ui = ui or nullui()
310 self.ui = ui or nullui()
311 self.opts = opts or {}
311 self.opts = opts or {}
312
312
313 # following fields are built from fctxs. they exist for perf reason
313 # following fields are built from fctxs. they exist for perf reason
314 self.contents = [f.data() for f in fctxs]
314 self.contents = [f.data() for f in fctxs]
315 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
315 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
316 self.linelog = self._buildlinelog()
316 self.linelog = self._buildlinelog()
317 if self.ui.debugflag:
317 if self.ui.debugflag:
318 assert self._checkoutlinelog() == self.contents
318 assert self._checkoutlinelog() == self.contents
319
319
320 # following fields will be filled later
320 # following fields will be filled later
321 self.chunkstats = [0, 0] # [adopted, total : int]
321 self.chunkstats = [0, 0] # [adopted, total : int]
322 self.targetlines = [] # [str]
322 self.targetlines = [] # [str]
323 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
323 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
324 self.finalcontents = [] # [str]
324 self.finalcontents = [] # [str]
325 self.ctxaffected = set()
325 self.ctxaffected = set()
326
326
327 def diffwith(self, targetfctx, fm=None):
327 def diffwith(self, targetfctx, fm=None):
328 """calculate fixups needed by examining the differences between
328 """calculate fixups needed by examining the differences between
329 self.fctxs[-1] and targetfctx, chunk by chunk.
329 self.fctxs[-1] and targetfctx, chunk by chunk.
330
330
331 targetfctx is the target state we move towards. we may or may not be
331 targetfctx is the target state we move towards. we may or may not be
332 able to get there because not all modified chunks can be amended into
332 able to get there because not all modified chunks can be amended into
333 a non-public fctx unambiguously.
333 a non-public fctx unambiguously.
334
334
335 call this only once, before apply().
335 call this only once, before apply().
336
336
337 update self.fixups, self.chunkstats, and self.targetlines.
337 update self.fixups, self.chunkstats, and self.targetlines.
338 """
338 """
339 a = self.contents[-1]
339 a = self.contents[-1]
340 alines = self.contentlines[-1]
340 alines = self.contentlines[-1]
341 b = targetfctx.data()
341 b = targetfctx.data()
342 blines = mdiff.splitnewlines(b)
342 blines = mdiff.splitnewlines(b)
343 self.targetlines = blines
343 self.targetlines = blines
344
344
345 self.linelog.annotate(self.linelog.maxrev)
345 self.linelog.annotate(self.linelog.maxrev)
346 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
346 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
347 assert len(annotated) == len(alines)
347 assert len(annotated) == len(alines)
348 # add a dummy end line to make insertion at the end easier
348 # add a dummy end line to make insertion at the end easier
349 if annotated:
349 if annotated:
350 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
350 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
351 annotated.append(dummyendline)
351 annotated.append(dummyendline)
352
352
353 # analyse diff blocks
353 # analyse diff blocks
354 for chunk in self._alldiffchunks(a, b, alines, blines):
354 for chunk in self._alldiffchunks(a, b, alines, blines):
355 newfixups = self._analysediffchunk(chunk, annotated)
355 newfixups = self._analysediffchunk(chunk, annotated)
356 self.chunkstats[0] += bool(newfixups) # 1 or 0
356 self.chunkstats[0] += bool(newfixups) # 1 or 0
357 self.chunkstats[1] += 1
357 self.chunkstats[1] += 1
358 self.fixups += newfixups
358 self.fixups += newfixups
359 if fm is not None:
359 if fm is not None:
360 self._showchanges(fm, alines, blines, chunk, newfixups)
360 self._showchanges(fm, alines, blines, chunk, newfixups)
361
361
362 def apply(self):
362 def apply(self):
363 """apply self.fixups. update self.linelog, self.finalcontents.
363 """apply self.fixups. update self.linelog, self.finalcontents.
364
364
365 call this only once, before getfinalcontent(), after diffwith().
365 call this only once, before getfinalcontent(), after diffwith().
366 """
366 """
367 # the following is unnecessary, as it's done by "diffwith":
367 # the following is unnecessary, as it's done by "diffwith":
368 # self.linelog.annotate(self.linelog.maxrev)
368 # self.linelog.annotate(self.linelog.maxrev)
369 for rev, a1, a2, b1, b2 in reversed(self.fixups):
369 for rev, a1, a2, b1, b2 in reversed(self.fixups):
370 blines = self.targetlines[b1:b2]
370 blines = self.targetlines[b1:b2]
371 if self.ui.debugflag:
371 if self.ui.debugflag:
372 idx = (max(rev - 1, 0)) // 2
372 idx = (max(rev - 1, 0)) // 2
373 self.ui.write(
373 self.ui.write(
374 _(b'%s: chunk %d:%d -> %d lines\n')
374 _(b'%s: chunk %d:%d -> %d lines\n')
375 % (short(self.fctxs[idx].node()), a1, a2, len(blines))
375 % (short(self.fctxs[idx].node()), a1, a2, len(blines))
376 )
376 )
377 self.linelog.replacelines(rev, a1, a2, b1, b2)
377 self.linelog.replacelines(rev, a1, a2, b1, b2)
378 if self.opts.get(b'edit_lines', False):
378 if self.opts.get(b'edit_lines', False):
379 self.finalcontents = self._checkoutlinelogwithedits()
379 self.finalcontents = self._checkoutlinelogwithedits()
380 else:
380 else:
381 self.finalcontents = self._checkoutlinelog()
381 self.finalcontents = self._checkoutlinelog()
382
382
383 def getfinalcontent(self, fctx):
383 def getfinalcontent(self, fctx):
384 """(fctx) -> str. get modified file content for a given filecontext"""
384 """(fctx) -> str. get modified file content for a given filecontext"""
385 idx = self.fctxs.index(fctx)
385 idx = self.fctxs.index(fctx)
386 return self.finalcontents[idx]
386 return self.finalcontents[idx]
387
387
388 def _analysediffchunk(self, chunk, annotated):
388 def _analysediffchunk(self, chunk, annotated):
389 """analyse a different chunk and return new fixups found
389 """analyse a different chunk and return new fixups found
390
390
391 return [] if no lines from the chunk can be safely applied.
391 return [] if no lines from the chunk can be safely applied.
392
392
393 the chunk (or lines) cannot be safely applied, if, for example:
393 the chunk (or lines) cannot be safely applied, if, for example:
394 - the modified (deleted) lines belong to a public changeset
394 - the modified (deleted) lines belong to a public changeset
395 (self.fctxs[0])
395 (self.fctxs[0])
396 - the chunk is a pure insertion and the adjacent lines (at most 2
396 - the chunk is a pure insertion and the adjacent lines (at most 2
397 lines) belong to different non-public changesets, or do not belong
397 lines) belong to different non-public changesets, or do not belong
398 to any non-public changesets.
398 to any non-public changesets.
399 - the chunk is modifying lines from different changesets.
399 - the chunk is modifying lines from different changesets.
400 in this case, if the number of lines deleted equals to the number
400 in this case, if the number of lines deleted equals to the number
401 of lines added, assume it's a simple 1:1 map (could be wrong).
401 of lines added, assume it's a simple 1:1 map (could be wrong).
402 otherwise, give up.
402 otherwise, give up.
403 - the chunk is modifying lines from a single non-public changeset,
403 - the chunk is modifying lines from a single non-public changeset,
404 but other revisions touch the area as well. i.e. the lines are
404 but other revisions touch the area as well. i.e. the lines are
405 not continuous as seen from the linelog.
405 not continuous as seen from the linelog.
406 """
406 """
407 a1, a2, b1, b2 = chunk
407 a1, a2, b1, b2 = chunk
408 # find involved indexes from annotate result
408 # find involved indexes from annotate result
409 involved = annotated[a1:a2]
409 involved = annotated[a1:a2]
410 if not involved and annotated: # a1 == a2 and a is not empty
410 if not involved and annotated: # a1 == a2 and a is not empty
411 # pure insertion, check nearby lines. ignore lines belong
411 # pure insertion, check nearby lines. ignore lines belong
412 # to the public (first) changeset (i.e. annotated[i][0] == 1)
412 # to the public (first) changeset (i.e. annotated[i][0] == 1)
413 nearbylinenums = {a2, max(0, a1 - 1)}
413 nearbylinenums = {a2, max(0, a1 - 1)}
414 involved = [
414 involved = [
415 annotated[i] for i in nearbylinenums if annotated[i][0] != 1
415 annotated[i] for i in nearbylinenums if annotated[i][0] != 1
416 ]
416 ]
417 involvedrevs = list({r for r, l in involved})
417 involvedrevs = list({r for r, l in involved})
418 newfixups = []
418 newfixups = []
419 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
419 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
420 # chunk belongs to a single revision
420 # chunk belongs to a single revision
421 rev = involvedrevs[0]
421 rev = involvedrevs[0]
422 if rev > 1:
422 if rev > 1:
423 fixuprev = rev + 1
423 fixuprev = rev + 1
424 newfixups.append((fixuprev, a1, a2, b1, b2))
424 newfixups.append((fixuprev, a1, a2, b1, b2))
425 elif a2 - a1 == b2 - b1 or b1 == b2:
425 elif a2 - a1 == b2 - b1 or b1 == b2:
426 # 1:1 line mapping, or chunk was deleted
426 # 1:1 line mapping, or chunk was deleted
427 for i in pycompat.xrange(a1, a2):
427 for i in pycompat.xrange(a1, a2):
428 rev, linenum = annotated[i]
428 rev, linenum = annotated[i]
429 if rev > 1:
429 if rev > 1:
430 if b1 == b2: # deletion, simply remove that single line
430 if b1 == b2: # deletion, simply remove that single line
431 nb1 = nb2 = 0
431 nb1 = nb2 = 0
432 else: # 1:1 line mapping, change the corresponding rev
432 else: # 1:1 line mapping, change the corresponding rev
433 nb1 = b1 + i - a1
433 nb1 = b1 + i - a1
434 nb2 = nb1 + 1
434 nb2 = nb1 + 1
435 fixuprev = rev + 1
435 fixuprev = rev + 1
436 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
436 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
437 return self._optimizefixups(newfixups)
437 return self._optimizefixups(newfixups)
438
438
439 @staticmethod
439 @staticmethod
440 def _alldiffchunks(a, b, alines, blines):
440 def _alldiffchunks(a, b, alines, blines):
441 """like mdiff.allblocks, but only care about differences"""
441 """like mdiff.allblocks, but only care about differences"""
442 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
442 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
443 for chunk, btype in blocks:
443 for chunk, btype in blocks:
444 if btype != b'!':
444 if btype != b'!':
445 continue
445 continue
446 yield chunk
446 yield chunk
447
447
448 def _buildlinelog(self):
448 def _buildlinelog(self):
449 """calculate the initial linelog based on self.content{,line}s.
449 """calculate the initial linelog based on self.content{,line}s.
450 this is similar to running a partial "annotate".
450 this is similar to running a partial "annotate".
451 """
451 """
452 llog = linelog.linelog()
452 llog = linelog.linelog()
453 a, alines = b'', []
453 a, alines = b'', []
454 for i in pycompat.xrange(len(self.contents)):
454 for i in pycompat.xrange(len(self.contents)):
455 b, blines = self.contents[i], self.contentlines[i]
455 b, blines = self.contents[i], self.contentlines[i]
456 llrev = i * 2 + 1
456 llrev = i * 2 + 1
457 chunks = self._alldiffchunks(a, b, alines, blines)
457 chunks = self._alldiffchunks(a, b, alines, blines)
458 for a1, a2, b1, b2 in reversed(list(chunks)):
458 for a1, a2, b1, b2 in reversed(list(chunks)):
459 llog.replacelines(llrev, a1, a2, b1, b2)
459 llog.replacelines(llrev, a1, a2, b1, b2)
460 a, alines = b, blines
460 a, alines = b, blines
461 return llog
461 return llog
462
462
463 def _checkoutlinelog(self):
463 def _checkoutlinelog(self):
464 """() -> [str]. check out file contents from linelog"""
464 """() -> [str]. check out file contents from linelog"""
465 contents = []
465 contents = []
466 for i in pycompat.xrange(len(self.contents)):
466 for i in pycompat.xrange(len(self.contents)):
467 rev = (i + 1) * 2
467 rev = (i + 1) * 2
468 self.linelog.annotate(rev)
468 self.linelog.annotate(rev)
469 content = b''.join(map(self._getline, self.linelog.annotateresult))
469 content = b''.join(map(self._getline, self.linelog.annotateresult))
470 contents.append(content)
470 contents.append(content)
471 return contents
471 return contents
472
472
473 def _checkoutlinelogwithedits(self):
473 def _checkoutlinelogwithedits(self):
474 """() -> [str]. prompt all lines for edit"""
474 """() -> [str]. prompt all lines for edit"""
475 alllines = self.linelog.getalllines()
475 alllines = self.linelog.getalllines()
476 # header
476 # header
477 editortext = (
477 editortext = (
478 _(
478 _(
479 b'HG: editing %s\nHG: "y" means the line to the right '
479 b'HG: editing %s\nHG: "y" means the line to the right '
480 b'exists in the changeset to the top\nHG:\n'
480 b'exists in the changeset to the top\nHG:\n'
481 )
481 )
482 % self.fctxs[-1].path()
482 % self.fctxs[-1].path()
483 )
483 )
484 # [(idx, fctx)]. hide the dummy emptyfilecontext
484 # [(idx, fctx)]. hide the dummy emptyfilecontext
485 visiblefctxs = [
485 visiblefctxs = [
486 (i, f)
486 (i, f)
487 for i, f in enumerate(self.fctxs)
487 for i, f in enumerate(self.fctxs)
488 if not isinstance(f, emptyfilecontext)
488 if not isinstance(f, emptyfilecontext)
489 ]
489 ]
490 for i, (j, f) in enumerate(visiblefctxs):
490 for i, (j, f) in enumerate(visiblefctxs):
491 editortext += _(b'HG: %s/%s %s %s\n') % (
491 editortext += _(b'HG: %s/%s %s %s\n') % (
492 b'|' * i,
492 b'|' * i,
493 b'-' * (len(visiblefctxs) - i + 1),
493 b'-' * (len(visiblefctxs) - i + 1),
494 short(f.node()),
494 short(f.node()),
495 f.description().split(b'\n', 1)[0],
495 f.description().split(b'\n', 1)[0],
496 )
496 )
497 editortext += _(b'HG: %s\n') % (b'|' * len(visiblefctxs))
497 editortext += _(b'HG: %s\n') % (b'|' * len(visiblefctxs))
498 # figure out the lifetime of a line, this is relatively inefficient,
498 # figure out the lifetime of a line, this is relatively inefficient,
499 # but probably fine
499 # but probably fine
500 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
500 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
501 for i, f in visiblefctxs:
501 for i, f in visiblefctxs:
502 self.linelog.annotate((i + 1) * 2)
502 self.linelog.annotate((i + 1) * 2)
503 for l in self.linelog.annotateresult:
503 for l in self.linelog.annotateresult:
504 lineset[l].add(i)
504 lineset[l].add(i)
505 # append lines
505 # append lines
506 for l in alllines:
506 for l in alllines:
507 editortext += b' %s : %s' % (
507 editortext += b' %s : %s' % (
508 b''.join(
508 b''.join(
509 [
509 [
510 (b'y' if i in lineset[l] else b' ')
510 (b'y' if i in lineset[l] else b' ')
511 for i, _f in visiblefctxs
511 for i, _f in visiblefctxs
512 ]
512 ]
513 ),
513 ),
514 self._getline(l),
514 self._getline(l),
515 )
515 )
516 # run editor
516 # run editor
517 editedtext = self.ui.edit(editortext, b'', action=b'absorb')
517 editedtext = self.ui.edit(editortext, b'', action=b'absorb')
518 if not editedtext:
518 if not editedtext:
519 raise error.InputError(_(b'empty editor text'))
519 raise error.InputError(_(b'empty editor text'))
520 # parse edited result
520 # parse edited result
521 contents = [b''] * len(self.fctxs)
521 contents = [b''] * len(self.fctxs)
522 leftpadpos = 4
522 leftpadpos = 4
523 colonpos = leftpadpos + len(visiblefctxs) + 1
523 colonpos = leftpadpos + len(visiblefctxs) + 1
524 for l in mdiff.splitnewlines(editedtext):
524 for l in mdiff.splitnewlines(editedtext):
525 if l.startswith(b'HG:'):
525 if l.startswith(b'HG:'):
526 continue
526 continue
527 if l[colonpos - 1 : colonpos + 2] != b' : ':
527 if l[colonpos - 1 : colonpos + 2] != b' : ':
528 raise error.InputError(_(b'malformed line: %s') % l)
528 raise error.InputError(_(b'malformed line: %s') % l)
529 linecontent = l[colonpos + 2 :]
529 linecontent = l[colonpos + 2 :]
530 for i, ch in enumerate(
530 for i, ch in enumerate(
531 pycompat.bytestr(l[leftpadpos : colonpos - 1])
531 pycompat.bytestr(l[leftpadpos : colonpos - 1])
532 ):
532 ):
533 if ch == b'y':
533 if ch == b'y':
534 contents[visiblefctxs[i][0]] += linecontent
534 contents[visiblefctxs[i][0]] += linecontent
535 # chunkstats is hard to calculate if anything changes, therefore
535 # chunkstats is hard to calculate if anything changes, therefore
536 # set them to just a simple value (1, 1).
536 # set them to just a simple value (1, 1).
537 if editedtext != editortext:
537 if editedtext != editortext:
538 self.chunkstats = [1, 1]
538 self.chunkstats = [1, 1]
539 return contents
539 return contents
540
540
541 def _getline(self, lineinfo):
541 def _getline(self, lineinfo):
542 """((rev, linenum)) -> str. convert rev+line number to line content"""
542 """((rev, linenum)) -> str. convert rev+line number to line content"""
543 rev, linenum = lineinfo
543 rev, linenum = lineinfo
544 if rev & 1: # odd: original line taken from fctxs
544 if rev & 1: # odd: original line taken from fctxs
545 return self.contentlines[rev // 2][linenum]
545 return self.contentlines[rev // 2][linenum]
546 else: # even: fixup line from targetfctx
546 else: # even: fixup line from targetfctx
547 return self.targetlines[linenum]
547 return self.targetlines[linenum]
548
548
549 def _iscontinuous(self, a1, a2, closedinterval=False):
549 def _iscontinuous(self, a1, a2, closedinterval=False):
550 """(a1, a2 : int) -> bool
550 """(a1, a2 : int) -> bool
551
551
552 check if these lines are continuous. i.e. no other insertions or
552 check if these lines are continuous. i.e. no other insertions or
553 deletions (from other revisions) among these lines.
553 deletions (from other revisions) among these lines.
554
554
555 closedinterval decides whether a2 should be included or not. i.e. is
555 closedinterval decides whether a2 should be included or not. i.e. is
556 it [a1, a2), or [a1, a2] ?
556 it [a1, a2), or [a1, a2] ?
557 """
557 """
558 if a1 >= a2:
558 if a1 >= a2:
559 return True
559 return True
560 llog = self.linelog
560 llog = self.linelog
561 offset1 = llog.getoffset(a1)
561 offset1 = llog.getoffset(a1)
562 offset2 = llog.getoffset(a2) + int(closedinterval)
562 offset2 = llog.getoffset(a2) + int(closedinterval)
563 linesinbetween = llog.getalllines(offset1, offset2)
563 linesinbetween = llog.getalllines(offset1, offset2)
564 return len(linesinbetween) == a2 - a1 + int(closedinterval)
564 return len(linesinbetween) == a2 - a1 + int(closedinterval)
565
565
566 def _optimizefixups(self, fixups):
566 def _optimizefixups(self, fixups):
567 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
567 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
568 merge adjacent fixups to make them less fragmented.
568 merge adjacent fixups to make them less fragmented.
569 """
569 """
570 result = []
570 result = []
571 pcurrentchunk = [[-1, -1, -1, -1, -1]]
571 pcurrentchunk = [[-1, -1, -1, -1, -1]]
572
572
573 def pushchunk():
573 def pushchunk():
574 if pcurrentchunk[0][0] != -1:
574 if pcurrentchunk[0][0] != -1:
575 result.append(tuple(pcurrentchunk[0]))
575 result.append(tuple(pcurrentchunk[0]))
576
576
577 for i, chunk in enumerate(fixups):
577 for i, chunk in enumerate(fixups):
578 rev, a1, a2, b1, b2 = chunk
578 rev, a1, a2, b1, b2 = chunk
579 lastrev = pcurrentchunk[0][0]
579 lastrev = pcurrentchunk[0][0]
580 lasta2 = pcurrentchunk[0][2]
580 lasta2 = pcurrentchunk[0][2]
581 lastb2 = pcurrentchunk[0][4]
581 lastb2 = pcurrentchunk[0][4]
582 if (
582 if (
583 a1 == lasta2
583 a1 == lasta2
584 and b1 == lastb2
584 and b1 == lastb2
585 and rev == lastrev
585 and rev == lastrev
586 and self._iscontinuous(max(a1 - 1, 0), a1)
586 and self._iscontinuous(max(a1 - 1, 0), a1)
587 ):
587 ):
588 # merge into currentchunk
588 # merge into currentchunk
589 pcurrentchunk[0][2] = a2
589 pcurrentchunk[0][2] = a2
590 pcurrentchunk[0][4] = b2
590 pcurrentchunk[0][4] = b2
591 else:
591 else:
592 pushchunk()
592 pushchunk()
593 pcurrentchunk[0] = list(chunk)
593 pcurrentchunk[0] = list(chunk)
594 pushchunk()
594 pushchunk()
595 return result
595 return result
596
596
597 def _showchanges(self, fm, alines, blines, chunk, fixups):
597 def _showchanges(self, fm, alines, blines, chunk, fixups):
598 def trim(line):
598 def trim(line):
599 if line.endswith(b'\n'):
599 if line.endswith(b'\n'):
600 line = line[:-1]
600 line = line[:-1]
601 return line
601 return line
602
602
603 # this is not optimized for perf but _showchanges only gets executed
603 # this is not optimized for perf but _showchanges only gets executed
604 # with an extra command-line flag.
604 # with an extra command-line flag.
605 a1, a2, b1, b2 = chunk
605 a1, a2, b1, b2 = chunk
606 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
606 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
607 for idx, fa1, fa2, fb1, fb2 in fixups:
607 for idx, fa1, fa2, fb1, fb2 in fixups:
608 for i in pycompat.xrange(fa1, fa2):
608 for i in pycompat.xrange(fa1, fa2):
609 aidxs[i - a1] = (max(idx, 1) - 1) // 2
609 aidxs[i - a1] = (max(idx, 1) - 1) // 2
610 for i in pycompat.xrange(fb1, fb2):
610 for i in pycompat.xrange(fb1, fb2):
611 bidxs[i - b1] = (max(idx, 1) - 1) // 2
611 bidxs[i - b1] = (max(idx, 1) - 1) // 2
612
612
613 fm.startitem()
613 fm.startitem()
614 fm.write(
614 fm.write(
615 b'hunk',
615 b'hunk',
616 b' %s\n',
616 b' %s\n',
617 b'@@ -%d,%d +%d,%d @@' % (a1, a2 - a1, b1, b2 - b1),
617 b'@@ -%d,%d +%d,%d @@' % (a1, a2 - a1, b1, b2 - b1),
618 label=b'diff.hunk',
618 label=b'diff.hunk',
619 )
619 )
620 fm.data(path=self.path, linetype=b'hunk')
620 fm.data(path=self.path, linetype=b'hunk')
621
621
622 def writeline(idx, diffchar, line, linetype, linelabel):
622 def writeline(idx, diffchar, line, linetype, linelabel):
623 fm.startitem()
623 fm.startitem()
624 node = b''
624 node = b''
625 if idx:
625 if idx:
626 ctx = self.fctxs[idx]
626 ctx = self.fctxs[idx]
627 fm.context(fctx=ctx)
627 fm.context(fctx=ctx)
628 node = ctx.hex()
628 node = ctx.hex()
629 self.ctxaffected.add(ctx.changectx())
629 self.ctxaffected.add(ctx.changectx())
630 fm.write(b'node', b'%-7.7s ', node, label=b'absorb.node')
630 fm.write(b'node', b'%-7.7s ', node, label=b'absorb.node')
631 fm.write(
631 fm.write(
632 b'diffchar ' + linetype,
632 b'diffchar ' + linetype,
633 b'%s%s\n',
633 b'%s%s\n',
634 diffchar,
634 diffchar,
635 line,
635 line,
636 label=linelabel,
636 label=linelabel,
637 )
637 )
638 fm.data(path=self.path, linetype=linetype)
638 fm.data(path=self.path, linetype=linetype)
639
639
640 for i in pycompat.xrange(a1, a2):
640 for i in pycompat.xrange(a1, a2):
641 writeline(
641 writeline(
642 aidxs[i - a1],
642 aidxs[i - a1],
643 b'-',
643 b'-',
644 trim(alines[i]),
644 trim(alines[i]),
645 b'deleted',
645 b'deleted',
646 b'diff.deleted',
646 b'diff.deleted',
647 )
647 )
648 for i in pycompat.xrange(b1, b2):
648 for i in pycompat.xrange(b1, b2):
649 writeline(
649 writeline(
650 bidxs[i - b1],
650 bidxs[i - b1],
651 b'+',
651 b'+',
652 trim(blines[i]),
652 trim(blines[i]),
653 b'inserted',
653 b'inserted',
654 b'diff.inserted',
654 b'diff.inserted',
655 )
655 )
656
656
657
657
658 class fixupstate:
658 class fixupstate:
659 """state needed to run absorb
659 """state needed to run absorb
660
660
661 internally, it keeps paths and filefixupstates.
661 internally, it keeps paths and filefixupstates.
662
662
663 a typical use is like filefixupstates:
663 a typical use is like filefixupstates:
664
664
665 1. call diffwith, to calculate fixups
665 1. call diffwith, to calculate fixups
666 2. (optionally), present fixups to the user, or edit fixups
666 2. (optionally), present fixups to the user, or edit fixups
667 3. call apply, to apply changes to memory
667 3. call apply, to apply changes to memory
668 4. call commit, to commit changes to hg database
668 4. call commit, to commit changes to hg database
669 """
669 """
670
670
671 def __init__(self, stack, ui=None, opts=None):
671 def __init__(self, stack, ui=None, opts=None):
672 """([ctx], ui or None) -> None
672 """([ctx], ui or None) -> None
673
673
674 stack: should be linear, and sorted by topo order - oldest first.
674 stack: should be linear, and sorted by topo order - oldest first.
675 all commits in stack are considered mutable.
675 all commits in stack are considered mutable.
676 """
676 """
677 assert stack
677 assert stack
678 self.ui = ui or nullui()
678 self.ui = ui or nullui()
679 self.opts = opts or {}
679 self.opts = opts or {}
680 self.stack = stack
680 self.stack = stack
681 self.repo = stack[-1].repo().unfiltered()
681 self.repo = stack[-1].repo().unfiltered()
682
682
683 # following fields will be filled later
683 # following fields will be filled later
684 self.paths = [] # [str]
684 self.paths = [] # [str]
685 self.status = None # ctx.status output
685 self.status = None # ctx.status output
686 self.fctxmap = {} # {path: {ctx: fctx}}
686 self.fctxmap = {} # {path: {ctx: fctx}}
687 self.fixupmap = {} # {path: filefixupstate}
687 self.fixupmap = {} # {path: filefixupstate}
688 self.replacemap = {} # {oldnode: newnode or None}
688 self.replacemap = {} # {oldnode: newnode or None}
689 self.finalnode = None # head after all fixups
689 self.finalnode = None # head after all fixups
690 self.ctxaffected = set() # ctx that will be absorbed into
690 self.ctxaffected = set() # ctx that will be absorbed into
691
691
692 def diffwith(self, targetctx, match=None, fm=None):
692 def diffwith(self, targetctx, match=None, fm=None):
693 """diff and prepare fixups. update self.fixupmap, self.paths"""
693 """diff and prepare fixups. update self.fixupmap, self.paths"""
694 # only care about modified files
694 # only care about modified files
695 self.status = self.stack[-1].status(targetctx, match)
695 self.status = self.stack[-1].status(targetctx, match)
696 self.paths = []
696 self.paths = []
697 # but if --edit-lines is used, the user may want to edit files
697 # but if --edit-lines is used, the user may want to edit files
698 # even if they are not modified
698 # even if they are not modified
699 editopt = self.opts.get(b'edit_lines')
699 editopt = self.opts.get(b'edit_lines')
700 if not self.status.modified and editopt and match:
700 if not self.status.modified and editopt and match:
701 interestingpaths = match.files()
701 interestingpaths = match.files()
702 else:
702 else:
703 interestingpaths = self.status.modified
703 interestingpaths = self.status.modified
704 # prepare the filefixupstate
704 # prepare the filefixupstate
705 seenfctxs = set()
705 seenfctxs = set()
706 # sorting is necessary to eliminate ambiguity for the "double move"
706 # sorting is necessary to eliminate ambiguity for the "double move"
707 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
707 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
708 for path in sorted(interestingpaths):
708 for path in sorted(interestingpaths):
709 self.ui.debug(b'calculating fixups for %s\n' % path)
709 self.ui.debug(b'calculating fixups for %s\n' % path)
710 targetfctx = targetctx[path]
710 targetfctx = targetctx[path]
711 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
711 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
712 # ignore symbolic links or binary, or unchanged files
712 # ignore symbolic links or binary, or unchanged files
713 if any(
713 if any(
714 f.islink() or stringutil.binary(f.data())
714 f.islink() or stringutil.binary(f.data())
715 for f in [targetfctx] + fctxs
715 for f in [targetfctx] + fctxs
716 if not isinstance(f, emptyfilecontext)
716 if not isinstance(f, emptyfilecontext)
717 ):
717 ):
718 continue
718 continue
719 if targetfctx.data() == fctxs[-1].data() and not editopt:
719 if targetfctx.data() == fctxs[-1].data() and not editopt:
720 continue
720 continue
721 seenfctxs.update(fctxs[1:])
721 seenfctxs.update(fctxs[1:])
722 self.fctxmap[path] = ctx2fctx
722 self.fctxmap[path] = ctx2fctx
723 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
723 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
724 if fm is not None:
724 if fm is not None:
725 fm.startitem()
725 fm.startitem()
726 fm.plain(b'showing changes for ')
726 fm.plain(b'showing changes for ')
727 fm.write(b'path', b'%s\n', path, label=b'absorb.path')
727 fm.write(b'path', b'%s\n', path, label=b'absorb.path')
728 fm.data(linetype=b'path')
728 fm.data(linetype=b'path')
729 fstate.diffwith(targetfctx, fm)
729 fstate.diffwith(targetfctx, fm)
730 self.fixupmap[path] = fstate
730 self.fixupmap[path] = fstate
731 self.paths.append(path)
731 self.paths.append(path)
732 self.ctxaffected.update(fstate.ctxaffected)
732 self.ctxaffected.update(fstate.ctxaffected)
733
733
734 def apply(self):
734 def apply(self):
735 """apply fixups to individual filefixupstates"""
735 """apply fixups to individual filefixupstates"""
736 for path, state in self.fixupmap.items():
736 for path, state in self.fixupmap.items():
737 if self.ui.debugflag:
737 if self.ui.debugflag:
738 self.ui.write(_(b'applying fixups to %s\n') % path)
738 self.ui.write(_(b'applying fixups to %s\n') % path)
739 state.apply()
739 state.apply()
740
740
741 @property
741 @property
742 def chunkstats(self):
742 def chunkstats(self):
743 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
743 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
744 return {path: state.chunkstats for path, state in self.fixupmap.items()}
744 return {path: state.chunkstats for path, state in self.fixupmap.items()}
745
745
746 def commit(self):
746 def commit(self):
747 """commit changes. update self.finalnode, self.replacemap"""
747 """commit changes. update self.finalnode, self.replacemap"""
748 with self.repo.transaction(b'absorb') as tr:
748 with self.repo.transaction(b'absorb') as tr:
749 self._commitstack()
749 self._commitstack()
750 self._movebookmarks(tr)
750 self._movebookmarks(tr)
751 if self.repo[b'.'].node() in self.replacemap:
751 if self.repo[b'.'].node() in self.replacemap:
752 self._moveworkingdirectoryparent()
752 self._moveworkingdirectoryparent()
753 self._cleanupoldcommits()
753 self._cleanupoldcommits()
754 return self.finalnode
754 return self.finalnode
755
755
756 def printchunkstats(self):
756 def printchunkstats(self):
757 """print things like '1 of 2 chunk(s) applied'"""
757 """print things like '1 of 2 chunk(s) applied'"""
758 ui = self.ui
758 ui = self.ui
759 chunkstats = self.chunkstats
759 chunkstats = self.chunkstats
760 if ui.verbose:
760 if ui.verbose:
761 # chunkstats for each file
761 # chunkstats for each file
762 for path, stat in chunkstats.items():
762 for path, stat in chunkstats.items():
763 if stat[0]:
763 if stat[0]:
764 ui.write(
764 ui.write(
765 _(b'%s: %d of %d chunk(s) applied\n')
765 _(b'%s: %d of %d chunk(s) applied\n')
766 % (path, stat[0], stat[1])
766 % (path, stat[0], stat[1])
767 )
767 )
768 elif not ui.quiet:
768 elif not ui.quiet:
769 # a summary for all files
769 # a summary for all files
770 stats = chunkstats.values()
770 stats = chunkstats.values()
771 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
771 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
772 ui.write(_(b'%d of %d chunk(s) applied\n') % (applied, total))
772 ui.write(_(b'%d of %d chunk(s) applied\n') % (applied, total))
773
773
774 def _commitstack(self):
774 def _commitstack(self):
775 """make new commits. update self.finalnode, self.replacemap.
775 """make new commits. update self.finalnode, self.replacemap.
776 it is splitted from "commit" to avoid too much indentation.
776 it is splitted from "commit" to avoid too much indentation.
777 """
777 """
778 # last node (20-char) committed by us
778 # last node (20-char) committed by us
779 lastcommitted = None
779 lastcommitted = None
780 # p1 which overrides the parent of the next commit, "None" means use
780 # p1 which overrides the parent of the next commit, "None" means use
781 # the original parent unchanged
781 # the original parent unchanged
782 nextp1 = None
782 nextp1 = None
783 for ctx in self.stack:
783 for ctx in self.stack:
784 memworkingcopy = self._getnewfilecontents(ctx)
784 memworkingcopy = self._getnewfilecontents(ctx)
785 if not memworkingcopy and not lastcommitted:
785 if not memworkingcopy and not lastcommitted:
786 # nothing changed, nothing commited
786 # nothing changed, nothing commited
787 nextp1 = ctx
787 nextp1 = ctx
788 continue
788 continue
789 willbecomenoop = ctx.files() and self._willbecomenoop(
789 willbecomenoop = ctx.files() and self._willbecomenoop(
790 memworkingcopy, ctx, nextp1
790 memworkingcopy, ctx, nextp1
791 )
791 )
792 if self.skip_empty_successor and willbecomenoop:
792 if self.skip_empty_successor and willbecomenoop:
793 # changeset is no longer necessary
793 # changeset is no longer necessary
794 self.replacemap[ctx.node()] = None
794 self.replacemap[ctx.node()] = None
795 msg = _(b'became empty and was dropped')
795 msg = _(b'became empty and was dropped')
796 else:
796 else:
797 # changeset needs re-commit
797 # changeset needs re-commit
798 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
798 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
799 lastcommitted = self.repo[nodestr]
799 lastcommitted = self.repo[nodestr]
800 nextp1 = lastcommitted
800 nextp1 = lastcommitted
801 self.replacemap[ctx.node()] = lastcommitted.node()
801 self.replacemap[ctx.node()] = lastcommitted.node()
802 if memworkingcopy:
802 if memworkingcopy:
803 if willbecomenoop:
803 if willbecomenoop:
804 msg = _(b'%d file(s) changed, became empty as %s')
804 msg = _(b'%d file(s) changed, became empty as %s')
805 else:
805 else:
806 msg = _(b'%d file(s) changed, became %s')
806 msg = _(b'%d file(s) changed, became %s')
807 msg = msg % (
807 msg = msg % (
808 len(memworkingcopy),
808 len(memworkingcopy),
809 self._ctx2str(lastcommitted),
809 self._ctx2str(lastcommitted),
810 )
810 )
811 else:
811 else:
812 msg = _(b'became %s') % self._ctx2str(lastcommitted)
812 msg = _(b'became %s') % self._ctx2str(lastcommitted)
813 if self.ui.verbose and msg:
813 if self.ui.verbose and msg:
814 self.ui.write(_(b'%s: %s\n') % (self._ctx2str(ctx), msg))
814 self.ui.write(_(b'%s: %s\n') % (self._ctx2str(ctx), msg))
815 self.finalnode = lastcommitted and lastcommitted.node()
815 self.finalnode = lastcommitted and lastcommitted.node()
816
816
817 def _ctx2str(self, ctx):
817 def _ctx2str(self, ctx):
818 if self.ui.debugflag:
818 if self.ui.debugflag:
819 return b'%d:%s' % (ctx.rev(), ctx.hex())
819 return b'%d:%s' % (ctx.rev(), ctx.hex())
820 else:
820 else:
821 return b'%d:%s' % (ctx.rev(), short(ctx.node()))
821 return b'%d:%s' % (ctx.rev(), short(ctx.node()))
822
822
823 def _getnewfilecontents(self, ctx):
823 def _getnewfilecontents(self, ctx):
824 """(ctx) -> {path: str}
824 """(ctx) -> {path: str}
825
825
826 fetch file contents from filefixupstates.
826 fetch file contents from filefixupstates.
827 return the working copy overrides - files different from ctx.
827 return the working copy overrides - files different from ctx.
828 """
828 """
829 result = {}
829 result = {}
830 for path in self.paths:
830 for path in self.paths:
831 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
831 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
832 if ctx not in ctx2fctx:
832 if ctx not in ctx2fctx:
833 continue
833 continue
834 fctx = ctx2fctx[ctx]
834 fctx = ctx2fctx[ctx]
835 content = fctx.data()
835 content = fctx.data()
836 newcontent = self.fixupmap[path].getfinalcontent(fctx)
836 newcontent = self.fixupmap[path].getfinalcontent(fctx)
837 if content != newcontent:
837 if content != newcontent:
838 result[fctx.path()] = newcontent
838 result[fctx.path()] = newcontent
839 return result
839 return result
840
840
841 def _movebookmarks(self, tr):
841 def _movebookmarks(self, tr):
842 repo = self.repo
842 repo = self.repo
843 needupdate = [
843 needupdate = [
844 (name, self.replacemap[hsh])
844 (name, self.replacemap[hsh])
845 for name, hsh in repo._bookmarks.items()
845 for name, hsh in repo._bookmarks.items()
846 if hsh in self.replacemap
846 if hsh in self.replacemap
847 ]
847 ]
848 changes = []
848 changes = []
849 for name, hsh in needupdate:
849 for name, hsh in needupdate:
850 if hsh:
850 if hsh:
851 changes.append((name, hsh))
851 changes.append((name, hsh))
852 if self.ui.verbose:
852 if self.ui.verbose:
853 self.ui.write(
853 self.ui.write(
854 _(b'moving bookmark %s to %s\n') % (name, hex(hsh))
854 _(b'moving bookmark %s to %s\n') % (name, hex(hsh))
855 )
855 )
856 else:
856 else:
857 changes.append((name, None))
857 changes.append((name, None))
858 if self.ui.verbose:
858 if self.ui.verbose:
859 self.ui.write(_(b'deleting bookmark %s\n') % name)
859 self.ui.write(_(b'deleting bookmark %s\n') % name)
860 repo._bookmarks.applychanges(repo, tr, changes)
860 repo._bookmarks.applychanges(repo, tr, changes)
861
861
862 def _moveworkingdirectoryparent(self):
862 def _moveworkingdirectoryparent(self):
863 if not self.finalnode:
863 if not self.finalnode:
864 # Find the latest not-{obsoleted,stripped} parent.
864 # Find the latest not-{obsoleted,stripped} parent.
865 revs = self.repo.revs(b'max(::. - %ln)', self.replacemap.keys())
865 revs = self.repo.revs(b'max(::. - %ln)', self.replacemap.keys())
866 ctx = self.repo[revs.first()]
866 ctx = self.repo[revs.first()]
867 self.finalnode = ctx.node()
867 self.finalnode = ctx.node()
868 else:
868 else:
869 ctx = self.repo[self.finalnode]
869 ctx = self.repo[self.finalnode]
870
870
871 dirstate = self.repo.dirstate
871 dirstate = self.repo.dirstate
872 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
872 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
873 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
873 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
874 noop = lambda: 0
874 noop = lambda: 0
875 restore = noop
875 restore = noop
876 if util.safehasattr(dirstate, '_fsmonitorstate'):
876 if util.safehasattr(dirstate, '_fsmonitorstate'):
877 bak = dirstate._fsmonitorstate.invalidate
877 bak = dirstate._fsmonitorstate.invalidate
878
878
879 def restore():
879 def restore():
880 dirstate._fsmonitorstate.invalidate = bak
880 dirstate._fsmonitorstate.invalidate = bak
881
881
882 dirstate._fsmonitorstate.invalidate = noop
882 dirstate._fsmonitorstate.invalidate = noop
883 try:
883 try:
884 with dirstate.parentchange():
884 with dirstate.parentchange():
885 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
885 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
886 finally:
886 finally:
887 restore()
887 restore()
888
888
889 @staticmethod
889 @staticmethod
890 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
890 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
891 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
891 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
892
892
893 if it will become an empty commit (does not change anything, after the
893 if it will become an empty commit (does not change anything, after the
894 memworkingcopy overrides), return True. otherwise return False.
894 memworkingcopy overrides), return True. otherwise return False.
895 """
895 """
896 if not pctx:
896 if not pctx:
897 parents = ctx.parents()
897 parents = ctx.parents()
898 if len(parents) != 1:
898 if len(parents) != 1:
899 return False
899 return False
900 pctx = parents[0]
900 pctx = parents[0]
901 if ctx.branch() != pctx.branch():
901 if ctx.branch() != pctx.branch():
902 return False
902 return False
903 if ctx.extra().get(b'close'):
903 if ctx.extra().get(b'close'):
904 return False
904 return False
905 # ctx changes more files (not a subset of memworkingcopy)
905 # ctx changes more files (not a subset of memworkingcopy)
906 if not set(ctx.files()).issubset(set(memworkingcopy)):
906 if not set(ctx.files()).issubset(set(memworkingcopy)):
907 return False
907 return False
908 for path, content in memworkingcopy.items():
908 for path, content in memworkingcopy.items():
909 if path not in pctx or path not in ctx:
909 if path not in pctx or path not in ctx:
910 return False
910 return False
911 fctx = ctx[path]
911 fctx = ctx[path]
912 pfctx = pctx[path]
912 pfctx = pctx[path]
913 if pfctx.flags() != fctx.flags():
913 if pfctx.flags() != fctx.flags():
914 return False
914 return False
915 if pfctx.data() != content:
915 if pfctx.data() != content:
916 return False
916 return False
917 return True
917 return True
918
918
919 def _commitsingle(self, memworkingcopy, ctx, p1=None):
919 def _commitsingle(self, memworkingcopy, ctx, p1=None):
920 """(ctx, {path: content}, node) -> node. make a single commit
920 """(ctx, {path: content}, node) -> node. make a single commit
921
921
922 the commit is a clone from ctx, with a (optionally) different p1, and
922 the commit is a clone from ctx, with a (optionally) different p1, and
923 different file contents replaced by memworkingcopy.
923 different file contents replaced by memworkingcopy.
924 """
924 """
925 parents = p1 and (p1, self.repo.nullid)
925 parents = p1 and (p1, self.repo.nullid)
926 extra = ctx.extra()
926 extra = ctx.extra()
927 if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'):
927 if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'):
928 extra[b'absorb_source'] = ctx.hex()
928 extra[b'absorb_source'] = ctx.hex()
929
929
930 desc = rewriteutil.update_hash_refs(
930 desc = rewriteutil.update_hash_refs(
931 ctx.repo(),
931 ctx.repo(),
932 ctx.description(),
932 ctx.description(),
933 {
933 {
934 oldnode: [newnode]
934 oldnode: [newnode]
935 for oldnode, newnode in self.replacemap.items()
935 for oldnode, newnode in self.replacemap.items()
936 },
936 },
937 )
937 )
938 mctx = overlaycontext(
938 mctx = overlaycontext(
939 memworkingcopy, ctx, parents, extra=extra, desc=desc
939 memworkingcopy, ctx, parents, extra=extra, desc=desc
940 )
940 )
941 return mctx.commit()
941 return mctx.commit()
942
942
943 @util.propertycache
943 @util.propertycache
944 def _useobsolete(self):
944 def _useobsolete(self):
945 """() -> bool"""
945 """() -> bool"""
946 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
946 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
947
947
948 def _cleanupoldcommits(self):
948 def _cleanupoldcommits(self):
949 replacements = {
949 replacements = {
950 k: ([v] if v is not None else [])
950 k: ([v] if v is not None else [])
951 for k, v in self.replacemap.items()
951 for k, v in self.replacemap.items()
952 }
952 }
953 if replacements:
953 if replacements:
954 scmutil.cleanupnodes(
954 scmutil.cleanupnodes(
955 self.repo, replacements, operation=b'absorb', fixphase=True
955 self.repo, replacements, operation=b'absorb', fixphase=True
956 )
956 )
957
957
958 @util.propertycache
958 @util.propertycache
959 def skip_empty_successor(self):
959 def skip_empty_successor(self):
960 return rewriteutil.skip_empty_successor(self.ui, b'absorb')
960 return rewriteutil.skip_empty_successor(self.ui, b'absorb')
961
961
962
962
963 def _parsechunk(hunk):
963 def _parsechunk(hunk):
964 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
964 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
965 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
965 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
966 return None, None
966 return None, None
967 path = hunk.header.filename()
967 path = hunk.header.filename()
968 a1 = hunk.fromline + len(hunk.before) - 1
968 a1 = hunk.fromline + len(hunk.before) - 1
969 # remove before and after context
969 # remove before and after context
970 hunk.before = hunk.after = []
970 hunk.before = hunk.after = []
971 buf = util.stringio()
971 buf = util.stringio()
972 hunk.write(buf)
972 hunk.write(buf)
973 patchlines = mdiff.splitnewlines(buf.getvalue())
973 patchlines = mdiff.splitnewlines(buf.getvalue())
974 # hunk.prettystr() will update hunk.removed
974 # hunk.prettystr() will update hunk.removed
975 a2 = a1 + hunk.removed
975 a2 = a1 + hunk.removed
976 blines = [l[1:] for l in patchlines[1:] if not l.startswith(b'-')]
976 blines = [l[1:] for l in patchlines[1:] if not l.startswith(b'-')]
977 return path, (a1, a2, blines)
977 return path, (a1, a2, blines)
978
978
979
979
980 def overlaydiffcontext(ctx, chunks):
980 def overlaydiffcontext(ctx, chunks):
981 """(ctx, [crecord.uihunk]) -> memctx
981 """(ctx, [crecord.uihunk]) -> memctx
982
982
983 return a memctx with some [1] patches (chunks) applied to ctx.
983 return a memctx with some [1] patches (chunks) applied to ctx.
984 [1]: modifications are handled. renames, mode changes, etc. are ignored.
984 [1]: modifications are handled. renames, mode changes, etc. are ignored.
985 """
985 """
986 # sadly the applying-patch logic is hardly reusable, and messy:
986 # sadly the applying-patch logic is hardly reusable, and messy:
987 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
987 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
988 # needs a file stream of a patch and will re-parse it, while we have
988 # needs a file stream of a patch and will re-parse it, while we have
989 # structured hunk objects at hand.
989 # structured hunk objects at hand.
990 # 2. a lot of different implementations about "chunk" (patch.hunk,
990 # 2. a lot of different implementations about "chunk" (patch.hunk,
991 # patch.recordhunk, crecord.uihunk)
991 # patch.recordhunk, crecord.uihunk)
992 # as we only care about applying changes to modified files, no mode
992 # as we only care about applying changes to modified files, no mode
993 # change, no binary diff, and no renames, it's probably okay to
993 # change, no binary diff, and no renames, it's probably okay to
994 # re-invent the logic using much simpler code here.
994 # re-invent the logic using much simpler code here.
995 memworkingcopy = {} # {path: content}
995 memworkingcopy = {} # {path: content}
996 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
996 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
997 for path, info in map(_parsechunk, chunks):
997 for path, info in map(_parsechunk, chunks):
998 if not path or not info:
998 if not path or not info:
999 continue
999 continue
1000 patchmap[path].append(info)
1000 patchmap[path].append(info)
1001 for path, patches in patchmap.items():
1001 for path, patches in patchmap.items():
1002 if path not in ctx or not patches:
1002 if path not in ctx or not patches:
1003 continue
1003 continue
1004 patches.sort(reverse=True)
1004 patches.sort(reverse=True)
1005 lines = mdiff.splitnewlines(ctx[path].data())
1005 lines = mdiff.splitnewlines(ctx[path].data())
1006 for a1, a2, blines in patches:
1006 for a1, a2, blines in patches:
1007 lines[a1:a2] = blines
1007 lines[a1:a2] = blines
1008 memworkingcopy[path] = b''.join(lines)
1008 memworkingcopy[path] = b''.join(lines)
1009 return overlaycontext(memworkingcopy, ctx)
1009 return overlaycontext(memworkingcopy, ctx)
1010
1010
1011
1011
1012 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
1012 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
1013 """pick fixup chunks from targetctx, apply them to stack.
1013 """pick fixup chunks from targetctx, apply them to stack.
1014
1014
1015 if targetctx is None, the working copy context will be used.
1015 if targetctx is None, the working copy context will be used.
1016 if stack is None, the current draft stack will be used.
1016 if stack is None, the current draft stack will be used.
1017 return fixupstate.
1017 return fixupstate.
1018 """
1018 """
1019 if stack is None:
1019 if stack is None:
1020 limit = ui.configint(b'absorb', b'max-stack-size')
1020 limit = ui.configint(b'absorb', b'max-stack-size')
1021 headctx = repo[b'.']
1021 headctx = repo[b'.']
1022 if len(headctx.parents()) > 1:
1022 if len(headctx.parents()) > 1:
1023 raise error.InputError(_(b'cannot absorb into a merge'))
1023 raise error.InputError(_(b'cannot absorb into a merge'))
1024 stack = getdraftstack(headctx, limit)
1024 stack = getdraftstack(headctx, limit)
1025 if limit and len(stack) >= limit:
1025 if limit and len(stack) >= limit:
1026 ui.warn(
1026 ui.warn(
1027 _(
1027 _(
1028 b'absorb: only the recent %d changesets will '
1028 b'absorb: only the recent %d changesets will '
1029 b'be analysed\n'
1029 b'be analysed\n'
1030 )
1030 )
1031 % limit
1031 % limit
1032 )
1032 )
1033 if not stack:
1033 if not stack:
1034 raise error.InputError(_(b'no mutable changeset to change'))
1034 raise error.InputError(_(b'no mutable changeset to change'))
1035 if targetctx is None: # default to working copy
1035 if targetctx is None: # default to working copy
1036 targetctx = repo[None]
1036 targetctx = repo[None]
1037 if pats is None:
1037 if pats is None:
1038 pats = ()
1038 pats = ()
1039 if opts is None:
1039 if opts is None:
1040 opts = {}
1040 opts = {}
1041 state = fixupstate(stack, ui=ui, opts=opts)
1041 state = fixupstate(stack, ui=ui, opts=opts)
1042 matcher = scmutil.match(targetctx, pats, opts)
1042 matcher = scmutil.match(targetctx, pats, opts)
1043 if opts.get(b'interactive'):
1043 if opts.get(b'interactive'):
1044 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
1044 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
1045 origchunks = patch.parsepatch(diff)
1045 origchunks = patch.parsepatch(diff)
1046 chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
1046 chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
1047 targetctx = overlaydiffcontext(stack[-1], chunks)
1047 targetctx = overlaydiffcontext(stack[-1], chunks)
1048 if opts.get(b'edit_lines'):
1049 # If we're going to open the editor, don't ask the user to confirm
1050 # first
1051 opts[b'apply_changes'] = True
1048 fm = None
1052 fm = None
1049 if opts.get(b'print_changes') or not opts.get(b'apply_changes'):
1053 if opts.get(b'print_changes') or not opts.get(b'apply_changes'):
1050 fm = ui.formatter(b'absorb', opts)
1054 fm = ui.formatter(b'absorb', opts)
1051 state.diffwith(targetctx, matcher, fm)
1055 state.diffwith(targetctx, matcher, fm)
1052 if fm is not None:
1056 if fm is not None:
1053 fm.startitem()
1057 fm.startitem()
1054 fm.write(
1058 fm.write(
1055 b"count", b"\n%d changesets affected\n", len(state.ctxaffected)
1059 b"count", b"\n%d changesets affected\n", len(state.ctxaffected)
1056 )
1060 )
1057 fm.data(linetype=b'summary')
1061 fm.data(linetype=b'summary')
1058 for ctx in reversed(stack):
1062 for ctx in reversed(stack):
1059 if ctx not in state.ctxaffected:
1063 if ctx not in state.ctxaffected:
1060 continue
1064 continue
1061 fm.startitem()
1065 fm.startitem()
1062 fm.context(ctx=ctx)
1066 fm.context(ctx=ctx)
1063 fm.data(linetype=b'changeset')
1067 fm.data(linetype=b'changeset')
1064 fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node')
1068 fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node')
1065 descfirstline = stringutil.firstline(ctx.description())
1069 descfirstline = stringutil.firstline(ctx.description())
1066 fm.write(
1070 fm.write(
1067 b'descfirstline',
1071 b'descfirstline',
1068 b'%s\n',
1072 b'%s\n',
1069 descfirstline,
1073 descfirstline,
1070 label=b'absorb.description',
1074 label=b'absorb.description',
1071 )
1075 )
1072 fm.end()
1076 fm.end()
1073 if not opts.get(b'dry_run'):
1077 if not opts.get(b'dry_run'):
1074 if (
1078 if (
1075 not opts.get(b'apply_changes')
1079 not opts.get(b'apply_changes')
1076 and state.ctxaffected
1080 and state.ctxaffected
1077 and ui.promptchoice(
1081 and ui.promptchoice(
1078 b"apply changes (y/N)? $$ &Yes $$ &No", default=1
1082 b"apply changes (y/N)? $$ &Yes $$ &No", default=1
1079 )
1083 )
1080 ):
1084 ):
1081 raise error.CanceledError(_(b'absorb cancelled\n'))
1085 raise error.CanceledError(_(b'absorb cancelled\n'))
1082
1086
1083 state.apply()
1087 state.apply()
1084 if state.commit():
1088 if state.commit():
1085 state.printchunkstats()
1089 state.printchunkstats()
1086 elif not ui.quiet:
1090 elif not ui.quiet:
1087 ui.write(_(b'nothing applied\n'))
1091 ui.write(_(b'nothing applied\n'))
1088 return state
1092 return state
1089
1093
1090
1094
1091 @command(
1095 @command(
1092 b'absorb',
1096 b'absorb',
1093 [
1097 [
1094 (
1098 (
1095 b'a',
1099 b'a',
1096 b'apply-changes',
1100 b'apply-changes',
1097 None,
1101 None,
1098 _(b'apply changes without prompting for confirmation'),
1102 _(b'apply changes without prompting for confirmation'),
1099 ),
1103 ),
1100 (
1104 (
1101 b'p',
1105 b'p',
1102 b'print-changes',
1106 b'print-changes',
1103 None,
1107 None,
1104 _(b'always print which changesets are modified by which changes'),
1108 _(b'always print which changesets are modified by which changes'),
1105 ),
1109 ),
1106 (
1110 (
1107 b'i',
1111 b'i',
1108 b'interactive',
1112 b'interactive',
1109 None,
1113 None,
1110 _(b'interactively select which chunks to apply'),
1114 _(b'interactively select which chunks to apply'),
1111 ),
1115 ),
1112 (
1116 (
1113 b'e',
1117 b'e',
1114 b'edit-lines',
1118 b'edit-lines',
1115 None,
1119 None,
1116 _(
1120 _(
1117 b'edit what lines belong to which changesets before commit '
1121 b'edit what lines belong to which changesets before commit '
1118 b'(EXPERIMENTAL)'
1122 b'(EXPERIMENTAL)'
1119 ),
1123 ),
1120 ),
1124 ),
1121 ]
1125 ]
1122 + commands.dryrunopts
1126 + commands.dryrunopts
1123 + commands.templateopts
1127 + commands.templateopts
1124 + commands.walkopts,
1128 + commands.walkopts,
1125 _(b'hg absorb [OPTION] [FILE]...'),
1129 _(b'hg absorb [OPTION] [FILE]...'),
1126 helpcategory=command.CATEGORY_COMMITTING,
1130 helpcategory=command.CATEGORY_COMMITTING,
1127 helpbasic=True,
1131 helpbasic=True,
1128 )
1132 )
1129 def absorbcmd(ui, repo, *pats, **opts):
1133 def absorbcmd(ui, repo, *pats, **opts):
1130 """incorporate corrections into the stack of draft changesets
1134 """incorporate corrections into the stack of draft changesets
1131
1135
1132 absorb analyzes each change in your working directory and attempts to
1136 absorb analyzes each change in your working directory and attempts to
1133 amend the changed lines into the changesets in your stack that first
1137 amend the changed lines into the changesets in your stack that first
1134 introduced those lines.
1138 introduced those lines.
1135
1139
1136 If absorb cannot find an unambiguous changeset to amend for a change,
1140 If absorb cannot find an unambiguous changeset to amend for a change,
1137 that change will be left in the working directory, untouched. They can be
1141 that change will be left in the working directory, untouched. They can be
1138 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
1142 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
1139 absorb does not write to the working directory.
1143 absorb does not write to the working directory.
1140
1144
1141 Changesets outside the revset `::. and not public() and not merge()` will
1145 Changesets outside the revset `::. and not public() and not merge()` will
1142 not be changed.
1146 not be changed.
1143
1147
1144 Changesets that become empty after applying the changes will be deleted.
1148 Changesets that become empty after applying the changes will be deleted.
1145
1149
1146 By default, absorb will show what it plans to do and prompt for
1150 By default, absorb will show what it plans to do and prompt for
1147 confirmation. If you are confident that the changes will be absorbed
1151 confirmation. If you are confident that the changes will be absorbed
1148 to the correct place, run :hg:`absorb -a` to apply the changes
1152 to the correct place, run :hg:`absorb -a` to apply the changes
1149 immediately.
1153 immediately.
1150
1154
1151 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1155 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1152 """
1156 """
1153 opts = pycompat.byteskwargs(opts)
1157 opts = pycompat.byteskwargs(opts)
1154
1158
1155 with repo.wlock(), repo.lock():
1159 with repo.wlock(), repo.lock():
1156 if not opts[b'dry_run']:
1160 if not opts[b'dry_run']:
1157 cmdutil.checkunfinished(repo)
1161 cmdutil.checkunfinished(repo)
1158
1162
1159 state = absorb(ui, repo, pats=pats, opts=opts)
1163 state = absorb(ui, repo, pats=pats, opts=opts)
1160 if sum(s[0] for s in state.chunkstats.values()) == 0:
1164 if sum(s[0] for s in state.chunkstats.values()) == 0:
1161 return 1
1165 return 1
@@ -1,61 +1,61 b''
1 $ cat >> $HGRCPATH << EOF
1 $ cat >> $HGRCPATH << EOF
2 > [extensions]
2 > [extensions]
3 > absorb=
3 > absorb=
4 > EOF
4 > EOF
5
5
6 $ hg init repo1
6 $ hg init repo1
7 $ cd repo1
7 $ cd repo1
8
8
9 Make some commits:
9 Make some commits:
10
10
11 $ for i in 1 2 3; do
11 $ for i in 1 2 3; do
12 > echo $i >> a
12 > echo $i >> a
13 > hg commit -A a -m "commit $i" -q
13 > hg commit -A a -m "commit $i" -q
14 > done
14 > done
15
15
16 absorb --edit-lines will run the editor if filename is provided:
16 absorb --edit-lines will run the editor if filename is provided:
17
17
18 $ hg absorb --edit-lines --apply-changes
18 $ hg absorb --edit-lines
19 nothing applied
19 nothing applied
20 [1]
20 [1]
21 $ HGEDITOR=cat hg absorb --edit-lines --apply-changes a
21 $ HGEDITOR=cat hg absorb --edit-lines a
22 HG: editing a
22 HG: editing a
23 HG: "y" means the line to the right exists in the changeset to the top
23 HG: "y" means the line to the right exists in the changeset to the top
24 HG:
24 HG:
25 HG: /---- 4ec16f85269a commit 1
25 HG: /---- 4ec16f85269a commit 1
26 HG: |/--- 5c5f95224a50 commit 2
26 HG: |/--- 5c5f95224a50 commit 2
27 HG: ||/-- 43f0a75bede7 commit 3
27 HG: ||/-- 43f0a75bede7 commit 3
28 HG: |||
28 HG: |||
29 yyy : 1
29 yyy : 1
30 yy : 2
30 yy : 2
31 y : 3
31 y : 3
32 nothing applied
32 nothing applied
33 [1]
33 [1]
34
34
35 Edit the file using --edit-lines:
35 Edit the file using --edit-lines:
36
36
37 $ cat > editortext << EOF
37 $ cat > editortext << EOF
38 > y : a
38 > y : a
39 > yy : b
39 > yy : b
40 > y : c
40 > y : c
41 > yy : d
41 > yy : d
42 > y y : e
42 > y y : e
43 > y : f
43 > y : f
44 > yyy : g
44 > yyy : g
45 > EOF
45 > EOF
46 $ HGEDITOR='cat editortext >' hg absorb -q --edit-lines --apply-changes a
46 $ HGEDITOR='cat editortext >' hg absorb -q --edit-lines a
47 $ hg cat -r 0 a
47 $ hg cat -r 0 a
48 d
48 d
49 e
49 e
50 f
50 f
51 g
51 g
52 $ hg cat -r 1 a
52 $ hg cat -r 1 a
53 b
53 b
54 c
54 c
55 d
55 d
56 g
56 g
57 $ hg cat -r 2 a
57 $ hg cat -r 2 a
58 a
58 a
59 b
59 b
60 e
60 e
61 g
61 g
General Comments 0
You need to be logged in to leave comments. Login now