##// END OF EJS Templates
absorb: run cleanupnodes() within transaction also when not using obsmarkers...
Martin von Zweigbergk -
r41996:c6a5009e default
parent child Browse files
Show More
@@ -1,1014 +1,1011 b''
1 # absorb.py
1 # absorb.py
2 #
2 #
3 # Copyright 2016 Facebook, Inc.
3 # Copyright 2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """apply working directory changes to changesets (EXPERIMENTAL)
8 """apply working directory changes to changesets (EXPERIMENTAL)
9
9
10 The absorb extension provides a command to use annotate information to
10 The absorb extension provides a command to use annotate information to
11 amend modified chunks into the corresponding non-public changesets.
11 amend modified chunks into the corresponding non-public changesets.
12
12
13 ::
13 ::
14
14
15 [absorb]
15 [absorb]
16 # only check 50 recent non-public changesets at most
16 # only check 50 recent non-public changesets at most
17 max-stack-size = 50
17 max-stack-size = 50
18 # whether to add noise to new commits to avoid obsolescence cycle
18 # whether to add noise to new commits to avoid obsolescence cycle
19 add-noise = 1
19 add-noise = 1
20 # make `amend --correlated` a shortcut to the main command
20 # make `amend --correlated` a shortcut to the main command
21 amend-flag = correlated
21 amend-flag = correlated
22
22
23 [color]
23 [color]
24 absorb.description = yellow
24 absorb.description = yellow
25 absorb.node = blue bold
25 absorb.node = blue bold
26 absorb.path = bold
26 absorb.path = bold
27 """
27 """
28
28
29 # TODO:
29 # TODO:
30 # * Rename config items to [commands] namespace
30 # * Rename config items to [commands] namespace
31 # * Converge getdraftstack() with other code in core
31 # * Converge getdraftstack() with other code in core
32 # * move many attributes on fixupstate to be private
32 # * move many attributes on fixupstate to be private
33
33
34 from __future__ import absolute_import
34 from __future__ import absolute_import
35
35
36 import collections
36 import collections
37
37
38 from mercurial.i18n import _
38 from mercurial.i18n import _
39 from mercurial import (
39 from mercurial import (
40 cmdutil,
40 cmdutil,
41 commands,
41 commands,
42 context,
42 context,
43 crecord,
43 crecord,
44 error,
44 error,
45 linelog,
45 linelog,
46 mdiff,
46 mdiff,
47 node,
47 node,
48 obsolete,
48 obsolete,
49 patch,
49 patch,
50 phases,
50 phases,
51 pycompat,
51 pycompat,
52 registrar,
52 registrar,
53 scmutil,
53 scmutil,
54 util,
54 util,
55 )
55 )
56 from mercurial.utils import (
56 from mercurial.utils import (
57 stringutil,
57 stringutil,
58 )
58 )
59
59
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # be specifying the version(s) of Mercurial they are tested with, or
62 # be specifying the version(s) of Mercurial they are tested with, or
63 # leave the attribute unspecified.
63 # leave the attribute unspecified.
64 testedwith = 'ships-with-hg-core'
64 testedwith = 'ships-with-hg-core'
65
65
66 cmdtable = {}
66 cmdtable = {}
67 command = registrar.command(cmdtable)
67 command = registrar.command(cmdtable)
68
68
69 configtable = {}
69 configtable = {}
70 configitem = registrar.configitem(configtable)
70 configitem = registrar.configitem(configtable)
71
71
72 configitem('absorb', 'add-noise', default=True)
72 configitem('absorb', 'add-noise', default=True)
73 configitem('absorb', 'amend-flag', default=None)
73 configitem('absorb', 'amend-flag', default=None)
74 configitem('absorb', 'max-stack-size', default=50)
74 configitem('absorb', 'max-stack-size', default=50)
75
75
76 colortable = {
76 colortable = {
77 'absorb.description': 'yellow',
77 'absorb.description': 'yellow',
78 'absorb.node': 'blue bold',
78 'absorb.node': 'blue bold',
79 'absorb.path': 'bold',
79 'absorb.path': 'bold',
80 }
80 }
81
81
82 defaultdict = collections.defaultdict
82 defaultdict = collections.defaultdict
83
83
84 class nullui(object):
84 class nullui(object):
85 """blank ui object doing nothing"""
85 """blank ui object doing nothing"""
86 debugflag = False
86 debugflag = False
87 verbose = False
87 verbose = False
88 quiet = True
88 quiet = True
89
89
90 def __getitem__(name):
90 def __getitem__(name):
91 def nullfunc(*args, **kwds):
91 def nullfunc(*args, **kwds):
92 return
92 return
93 return nullfunc
93 return nullfunc
94
94
95 class emptyfilecontext(object):
95 class emptyfilecontext(object):
96 """minimal filecontext representing an empty file"""
96 """minimal filecontext representing an empty file"""
97 def data(self):
97 def data(self):
98 return ''
98 return ''
99
99
100 def node(self):
100 def node(self):
101 return node.nullid
101 return node.nullid
102
102
103 def uniq(lst):
103 def uniq(lst):
104 """list -> list. remove duplicated items without changing the order"""
104 """list -> list. remove duplicated items without changing the order"""
105 seen = set()
105 seen = set()
106 result = []
106 result = []
107 for x in lst:
107 for x in lst:
108 if x not in seen:
108 if x not in seen:
109 seen.add(x)
109 seen.add(x)
110 result.append(x)
110 result.append(x)
111 return result
111 return result
112
112
113 def getdraftstack(headctx, limit=None):
113 def getdraftstack(headctx, limit=None):
114 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
114 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
115
115
116 changesets are sorted in topo order, oldest first.
116 changesets are sorted in topo order, oldest first.
117 return at most limit items, if limit is a positive number.
117 return at most limit items, if limit is a positive number.
118
118
119 merges are considered as non-draft as well. i.e. every commit
119 merges are considered as non-draft as well. i.e. every commit
120 returned has and only has 1 parent.
120 returned has and only has 1 parent.
121 """
121 """
122 ctx = headctx
122 ctx = headctx
123 result = []
123 result = []
124 while ctx.phase() != phases.public:
124 while ctx.phase() != phases.public:
125 if limit and len(result) >= limit:
125 if limit and len(result) >= limit:
126 break
126 break
127 parents = ctx.parents()
127 parents = ctx.parents()
128 if len(parents) != 1:
128 if len(parents) != 1:
129 break
129 break
130 result.append(ctx)
130 result.append(ctx)
131 ctx = parents[0]
131 ctx = parents[0]
132 result.reverse()
132 result.reverse()
133 return result
133 return result
134
134
135 def getfilestack(stack, path, seenfctxs=None):
135 def getfilestack(stack, path, seenfctxs=None):
136 """([ctx], str, set) -> [fctx], {ctx: fctx}
136 """([ctx], str, set) -> [fctx], {ctx: fctx}
137
137
138 stack is a list of contexts, from old to new. usually they are what
138 stack is a list of contexts, from old to new. usually they are what
139 "getdraftstack" returns.
139 "getdraftstack" returns.
140
140
141 follows renames, but not copies.
141 follows renames, but not copies.
142
142
143 seenfctxs is a set of filecontexts that will be considered "immutable".
143 seenfctxs is a set of filecontexts that will be considered "immutable".
144 they are usually what this function returned in earlier calls, useful
144 they are usually what this function returned in earlier calls, useful
145 to avoid issues that a file was "moved" to multiple places and was then
145 to avoid issues that a file was "moved" to multiple places and was then
146 modified differently, like: "a" was copied to "b", "a" was also copied to
146 modified differently, like: "a" was copied to "b", "a" was also copied to
147 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
147 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
148 and we enforce only one of them to be able to affect "a"'s content.
148 and we enforce only one of them to be able to affect "a"'s content.
149
149
150 return an empty list and an empty dict, if the specified path does not
150 return an empty list and an empty dict, if the specified path does not
151 exist in stack[-1] (the top of the stack).
151 exist in stack[-1] (the top of the stack).
152
152
153 otherwise, return a list of de-duplicated filecontexts, and the map to
153 otherwise, return a list of de-duplicated filecontexts, and the map to
154 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
154 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
155 of the list would be outside the stack and should be considered immutable.
155 of the list would be outside the stack and should be considered immutable.
156 the remaining items are within the stack.
156 the remaining items are within the stack.
157
157
158 for example, given the following changelog and corresponding filelog
158 for example, given the following changelog and corresponding filelog
159 revisions:
159 revisions:
160
160
161 changelog: 3----4----5----6----7
161 changelog: 3----4----5----6----7
162 filelog: x 0----1----1----2 (x: no such file yet)
162 filelog: x 0----1----1----2 (x: no such file yet)
163
163
164 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
164 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
165 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
165 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
166 dummy empty filecontext.
166 dummy empty filecontext.
167 - if stack = [2], returns ([], {})
167 - if stack = [2], returns ([], {})
168 - if stack = [7], returns ([1, 2], {7: 2})
168 - if stack = [7], returns ([1, 2], {7: 2})
169 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
169 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
170 removed, since 1 is immutable.
170 removed, since 1 is immutable.
171 """
171 """
172 if seenfctxs is None:
172 if seenfctxs is None:
173 seenfctxs = set()
173 seenfctxs = set()
174 assert stack
174 assert stack
175
175
176 if path not in stack[-1]:
176 if path not in stack[-1]:
177 return [], {}
177 return [], {}
178
178
179 fctxs = []
179 fctxs = []
180 fctxmap = {}
180 fctxmap = {}
181
181
182 pctx = stack[0].p1() # the public (immutable) ctx we stop at
182 pctx = stack[0].p1() # the public (immutable) ctx we stop at
183 for ctx in reversed(stack):
183 for ctx in reversed(stack):
184 if path not in ctx: # the file is added in the next commit
184 if path not in ctx: # the file is added in the next commit
185 pctx = ctx
185 pctx = ctx
186 break
186 break
187 fctx = ctx[path]
187 fctx = ctx[path]
188 fctxs.append(fctx)
188 fctxs.append(fctx)
189 if fctx in seenfctxs: # treat fctx as the immutable one
189 if fctx in seenfctxs: # treat fctx as the immutable one
190 pctx = None # do not add another immutable fctx
190 pctx = None # do not add another immutable fctx
191 break
191 break
192 fctxmap[ctx] = fctx # only for mutable fctxs
192 fctxmap[ctx] = fctx # only for mutable fctxs
193 copy = fctx.copysource()
193 copy = fctx.copysource()
194 if copy:
194 if copy:
195 path = copy # follow rename
195 path = copy # follow rename
196 if path in ctx: # but do not follow copy
196 if path in ctx: # but do not follow copy
197 pctx = ctx.p1()
197 pctx = ctx.p1()
198 break
198 break
199
199
200 if pctx is not None: # need an extra immutable fctx
200 if pctx is not None: # need an extra immutable fctx
201 if path in pctx:
201 if path in pctx:
202 fctxs.append(pctx[path])
202 fctxs.append(pctx[path])
203 else:
203 else:
204 fctxs.append(emptyfilecontext())
204 fctxs.append(emptyfilecontext())
205
205
206 fctxs.reverse()
206 fctxs.reverse()
207 # note: we rely on a property of hg: filerev is not reused for linear
207 # note: we rely on a property of hg: filerev is not reused for linear
208 # history. i.e. it's impossible to have:
208 # history. i.e. it's impossible to have:
209 # changelog: 4----5----6 (linear, no merges)
209 # changelog: 4----5----6 (linear, no merges)
210 # filelog: 1----2----1
210 # filelog: 1----2----1
211 # ^ reuse filerev (impossible)
211 # ^ reuse filerev (impossible)
212 # because parents are part of the hash. if that's not true, we need to
212 # because parents are part of the hash. if that's not true, we need to
213 # remove uniq and find a different way to identify fctxs.
213 # remove uniq and find a different way to identify fctxs.
214 return uniq(fctxs), fctxmap
214 return uniq(fctxs), fctxmap
215
215
216 class overlaystore(patch.filestore):
216 class overlaystore(patch.filestore):
217 """read-only, hybrid store based on a dict and ctx.
217 """read-only, hybrid store based on a dict and ctx.
218 memworkingcopy: {path: content}, overrides file contents.
218 memworkingcopy: {path: content}, overrides file contents.
219 """
219 """
220 def __init__(self, basectx, memworkingcopy):
220 def __init__(self, basectx, memworkingcopy):
221 self.basectx = basectx
221 self.basectx = basectx
222 self.memworkingcopy = memworkingcopy
222 self.memworkingcopy = memworkingcopy
223
223
224 def getfile(self, path):
224 def getfile(self, path):
225 """comply with mercurial.patch.filestore.getfile"""
225 """comply with mercurial.patch.filestore.getfile"""
226 if path not in self.basectx:
226 if path not in self.basectx:
227 return None, None, None
227 return None, None, None
228 fctx = self.basectx[path]
228 fctx = self.basectx[path]
229 if path in self.memworkingcopy:
229 if path in self.memworkingcopy:
230 content = self.memworkingcopy[path]
230 content = self.memworkingcopy[path]
231 else:
231 else:
232 content = fctx.data()
232 content = fctx.data()
233 mode = (fctx.islink(), fctx.isexec())
233 mode = (fctx.islink(), fctx.isexec())
234 copy = fctx.copysource()
234 copy = fctx.copysource()
235 return content, mode, copy
235 return content, mode, copy
236
236
237 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
237 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
238 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
238 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
239 memworkingcopy overrides file contents.
239 memworkingcopy overrides file contents.
240 """
240 """
241 # parents must contain 2 items: (node1, node2)
241 # parents must contain 2 items: (node1, node2)
242 if parents is None:
242 if parents is None:
243 parents = ctx.repo().changelog.parents(ctx.node())
243 parents = ctx.repo().changelog.parents(ctx.node())
244 if extra is None:
244 if extra is None:
245 extra = ctx.extra()
245 extra = ctx.extra()
246 date = ctx.date()
246 date = ctx.date()
247 desc = ctx.description()
247 desc = ctx.description()
248 user = ctx.user()
248 user = ctx.user()
249 files = set(ctx.files()).union(memworkingcopy)
249 files = set(ctx.files()).union(memworkingcopy)
250 store = overlaystore(ctx, memworkingcopy)
250 store = overlaystore(ctx, memworkingcopy)
251 return context.memctx(
251 return context.memctx(
252 repo=ctx.repo(), parents=parents, text=desc,
252 repo=ctx.repo(), parents=parents, text=desc,
253 files=files, filectxfn=store, user=user, date=date,
253 files=files, filectxfn=store, user=user, date=date,
254 branch=None, extra=extra)
254 branch=None, extra=extra)
255
255
256 class filefixupstate(object):
256 class filefixupstate(object):
257 """state needed to apply fixups to a single file
257 """state needed to apply fixups to a single file
258
258
259 internally, it keeps file contents of several revisions and a linelog.
259 internally, it keeps file contents of several revisions and a linelog.
260
260
261 the linelog uses odd revision numbers for original contents (fctxs passed
261 the linelog uses odd revision numbers for original contents (fctxs passed
262 to __init__), and even revision numbers for fixups, like:
262 to __init__), and even revision numbers for fixups, like:
263
263
264 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
264 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
265 linelog rev 2: fixups made to self.fctxs[0]
265 linelog rev 2: fixups made to self.fctxs[0]
266 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
266 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
267 linelog rev 4: fixups made to self.fctxs[1]
267 linelog rev 4: fixups made to self.fctxs[1]
268 ...
268 ...
269
269
270 a typical use is like:
270 a typical use is like:
271
271
272 1. call diffwith, to calculate self.fixups
272 1. call diffwith, to calculate self.fixups
273 2. (optionally), present self.fixups to the user, or change it
273 2. (optionally), present self.fixups to the user, or change it
274 3. call apply, to apply changes
274 3. call apply, to apply changes
275 4. read results from "finalcontents", or call getfinalcontent
275 4. read results from "finalcontents", or call getfinalcontent
276 """
276 """
277
277
278 def __init__(self, fctxs, path, ui=None, opts=None):
278 def __init__(self, fctxs, path, ui=None, opts=None):
279 """([fctx], ui or None) -> None
279 """([fctx], ui or None) -> None
280
280
281 fctxs should be linear, and sorted by topo order - oldest first.
281 fctxs should be linear, and sorted by topo order - oldest first.
282 fctxs[0] will be considered as "immutable" and will not be changed.
282 fctxs[0] will be considered as "immutable" and will not be changed.
283 """
283 """
284 self.fctxs = fctxs
284 self.fctxs = fctxs
285 self.path = path
285 self.path = path
286 self.ui = ui or nullui()
286 self.ui = ui or nullui()
287 self.opts = opts or {}
287 self.opts = opts or {}
288
288
289 # following fields are built from fctxs. they exist for perf reason
289 # following fields are built from fctxs. they exist for perf reason
290 self.contents = [f.data() for f in fctxs]
290 self.contents = [f.data() for f in fctxs]
291 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
291 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
292 self.linelog = self._buildlinelog()
292 self.linelog = self._buildlinelog()
293 if self.ui.debugflag:
293 if self.ui.debugflag:
294 assert self._checkoutlinelog() == self.contents
294 assert self._checkoutlinelog() == self.contents
295
295
296 # following fields will be filled later
296 # following fields will be filled later
297 self.chunkstats = [0, 0] # [adopted, total : int]
297 self.chunkstats = [0, 0] # [adopted, total : int]
298 self.targetlines = [] # [str]
298 self.targetlines = [] # [str]
299 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
299 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
300 self.finalcontents = [] # [str]
300 self.finalcontents = [] # [str]
301 self.ctxaffected = set()
301 self.ctxaffected = set()
302
302
303 def diffwith(self, targetfctx, fm=None):
303 def diffwith(self, targetfctx, fm=None):
304 """calculate fixups needed by examining the differences between
304 """calculate fixups needed by examining the differences between
305 self.fctxs[-1] and targetfctx, chunk by chunk.
305 self.fctxs[-1] and targetfctx, chunk by chunk.
306
306
307 targetfctx is the target state we move towards. we may or may not be
307 targetfctx is the target state we move towards. we may or may not be
308 able to get there because not all modified chunks can be amended into
308 able to get there because not all modified chunks can be amended into
309 a non-public fctx unambiguously.
309 a non-public fctx unambiguously.
310
310
311 call this only once, before apply().
311 call this only once, before apply().
312
312
313 update self.fixups, self.chunkstats, and self.targetlines.
313 update self.fixups, self.chunkstats, and self.targetlines.
314 """
314 """
315 a = self.contents[-1]
315 a = self.contents[-1]
316 alines = self.contentlines[-1]
316 alines = self.contentlines[-1]
317 b = targetfctx.data()
317 b = targetfctx.data()
318 blines = mdiff.splitnewlines(b)
318 blines = mdiff.splitnewlines(b)
319 self.targetlines = blines
319 self.targetlines = blines
320
320
321 self.linelog.annotate(self.linelog.maxrev)
321 self.linelog.annotate(self.linelog.maxrev)
322 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
322 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
323 assert len(annotated) == len(alines)
323 assert len(annotated) == len(alines)
324 # add a dummy end line to make insertion at the end easier
324 # add a dummy end line to make insertion at the end easier
325 if annotated:
325 if annotated:
326 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
326 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
327 annotated.append(dummyendline)
327 annotated.append(dummyendline)
328
328
329 # analyse diff blocks
329 # analyse diff blocks
330 for chunk in self._alldiffchunks(a, b, alines, blines):
330 for chunk in self._alldiffchunks(a, b, alines, blines):
331 newfixups = self._analysediffchunk(chunk, annotated)
331 newfixups = self._analysediffchunk(chunk, annotated)
332 self.chunkstats[0] += bool(newfixups) # 1 or 0
332 self.chunkstats[0] += bool(newfixups) # 1 or 0
333 self.chunkstats[1] += 1
333 self.chunkstats[1] += 1
334 self.fixups += newfixups
334 self.fixups += newfixups
335 if fm is not None:
335 if fm is not None:
336 self._showchanges(fm, alines, blines, chunk, newfixups)
336 self._showchanges(fm, alines, blines, chunk, newfixups)
337
337
338 def apply(self):
338 def apply(self):
339 """apply self.fixups. update self.linelog, self.finalcontents.
339 """apply self.fixups. update self.linelog, self.finalcontents.
340
340
341 call this only once, before getfinalcontent(), after diffwith().
341 call this only once, before getfinalcontent(), after diffwith().
342 """
342 """
343 # the following is unnecessary, as it's done by "diffwith":
343 # the following is unnecessary, as it's done by "diffwith":
344 # self.linelog.annotate(self.linelog.maxrev)
344 # self.linelog.annotate(self.linelog.maxrev)
345 for rev, a1, a2, b1, b2 in reversed(self.fixups):
345 for rev, a1, a2, b1, b2 in reversed(self.fixups):
346 blines = self.targetlines[b1:b2]
346 blines = self.targetlines[b1:b2]
347 if self.ui.debugflag:
347 if self.ui.debugflag:
348 idx = (max(rev - 1, 0)) // 2
348 idx = (max(rev - 1, 0)) // 2
349 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
349 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
350 % (node.short(self.fctxs[idx].node()),
350 % (node.short(self.fctxs[idx].node()),
351 a1, a2, len(blines)))
351 a1, a2, len(blines)))
352 self.linelog.replacelines(rev, a1, a2, b1, b2)
352 self.linelog.replacelines(rev, a1, a2, b1, b2)
353 if self.opts.get('edit_lines', False):
353 if self.opts.get('edit_lines', False):
354 self.finalcontents = self._checkoutlinelogwithedits()
354 self.finalcontents = self._checkoutlinelogwithedits()
355 else:
355 else:
356 self.finalcontents = self._checkoutlinelog()
356 self.finalcontents = self._checkoutlinelog()
357
357
358 def getfinalcontent(self, fctx):
358 def getfinalcontent(self, fctx):
359 """(fctx) -> str. get modified file content for a given filecontext"""
359 """(fctx) -> str. get modified file content for a given filecontext"""
360 idx = self.fctxs.index(fctx)
360 idx = self.fctxs.index(fctx)
361 return self.finalcontents[idx]
361 return self.finalcontents[idx]
362
362
363 def _analysediffchunk(self, chunk, annotated):
363 def _analysediffchunk(self, chunk, annotated):
364 """analyse a different chunk and return new fixups found
364 """analyse a different chunk and return new fixups found
365
365
366 return [] if no lines from the chunk can be safely applied.
366 return [] if no lines from the chunk can be safely applied.
367
367
368 the chunk (or lines) cannot be safely applied, if, for example:
368 the chunk (or lines) cannot be safely applied, if, for example:
369 - the modified (deleted) lines belong to a public changeset
369 - the modified (deleted) lines belong to a public changeset
370 (self.fctxs[0])
370 (self.fctxs[0])
371 - the chunk is a pure insertion and the adjacent lines (at most 2
371 - the chunk is a pure insertion and the adjacent lines (at most 2
372 lines) belong to different non-public changesets, or do not belong
372 lines) belong to different non-public changesets, or do not belong
373 to any non-public changesets.
373 to any non-public changesets.
374 - the chunk is modifying lines from different changesets.
374 - the chunk is modifying lines from different changesets.
375 in this case, if the number of lines deleted equals to the number
375 in this case, if the number of lines deleted equals to the number
376 of lines added, assume it's a simple 1:1 map (could be wrong).
376 of lines added, assume it's a simple 1:1 map (could be wrong).
377 otherwise, give up.
377 otherwise, give up.
378 - the chunk is modifying lines from a single non-public changeset,
378 - the chunk is modifying lines from a single non-public changeset,
379 but other revisions touch the area as well. i.e. the lines are
379 but other revisions touch the area as well. i.e. the lines are
380 not continuous as seen from the linelog.
380 not continuous as seen from the linelog.
381 """
381 """
382 a1, a2, b1, b2 = chunk
382 a1, a2, b1, b2 = chunk
383 # find involved indexes from annotate result
383 # find involved indexes from annotate result
384 involved = annotated[a1:a2]
384 involved = annotated[a1:a2]
385 if not involved and annotated: # a1 == a2 and a is not empty
385 if not involved and annotated: # a1 == a2 and a is not empty
386 # pure insertion, check nearby lines. ignore lines belong
386 # pure insertion, check nearby lines. ignore lines belong
387 # to the public (first) changeset (i.e. annotated[i][0] == 1)
387 # to the public (first) changeset (i.e. annotated[i][0] == 1)
388 nearbylinenums = {a2, max(0, a1 - 1)}
388 nearbylinenums = {a2, max(0, a1 - 1)}
389 involved = [annotated[i]
389 involved = [annotated[i]
390 for i in nearbylinenums if annotated[i][0] != 1]
390 for i in nearbylinenums if annotated[i][0] != 1]
391 involvedrevs = list(set(r for r, l in involved))
391 involvedrevs = list(set(r for r, l in involved))
392 newfixups = []
392 newfixups = []
393 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
393 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
394 # chunk belongs to a single revision
394 # chunk belongs to a single revision
395 rev = involvedrevs[0]
395 rev = involvedrevs[0]
396 if rev > 1:
396 if rev > 1:
397 fixuprev = rev + 1
397 fixuprev = rev + 1
398 newfixups.append((fixuprev, a1, a2, b1, b2))
398 newfixups.append((fixuprev, a1, a2, b1, b2))
399 elif a2 - a1 == b2 - b1 or b1 == b2:
399 elif a2 - a1 == b2 - b1 or b1 == b2:
400 # 1:1 line mapping, or chunk was deleted
400 # 1:1 line mapping, or chunk was deleted
401 for i in pycompat.xrange(a1, a2):
401 for i in pycompat.xrange(a1, a2):
402 rev, linenum = annotated[i]
402 rev, linenum = annotated[i]
403 if rev > 1:
403 if rev > 1:
404 if b1 == b2: # deletion, simply remove that single line
404 if b1 == b2: # deletion, simply remove that single line
405 nb1 = nb2 = 0
405 nb1 = nb2 = 0
406 else: # 1:1 line mapping, change the corresponding rev
406 else: # 1:1 line mapping, change the corresponding rev
407 nb1 = b1 + i - a1
407 nb1 = b1 + i - a1
408 nb2 = nb1 + 1
408 nb2 = nb1 + 1
409 fixuprev = rev + 1
409 fixuprev = rev + 1
410 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
410 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
411 return self._optimizefixups(newfixups)
411 return self._optimizefixups(newfixups)
412
412
413 @staticmethod
413 @staticmethod
414 def _alldiffchunks(a, b, alines, blines):
414 def _alldiffchunks(a, b, alines, blines):
415 """like mdiff.allblocks, but only care about differences"""
415 """like mdiff.allblocks, but only care about differences"""
416 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
416 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
417 for chunk, btype in blocks:
417 for chunk, btype in blocks:
418 if btype != '!':
418 if btype != '!':
419 continue
419 continue
420 yield chunk
420 yield chunk
421
421
422 def _buildlinelog(self):
422 def _buildlinelog(self):
423 """calculate the initial linelog based on self.content{,line}s.
423 """calculate the initial linelog based on self.content{,line}s.
424 this is similar to running a partial "annotate".
424 this is similar to running a partial "annotate".
425 """
425 """
426 llog = linelog.linelog()
426 llog = linelog.linelog()
427 a, alines = '', []
427 a, alines = '', []
428 for i in pycompat.xrange(len(self.contents)):
428 for i in pycompat.xrange(len(self.contents)):
429 b, blines = self.contents[i], self.contentlines[i]
429 b, blines = self.contents[i], self.contentlines[i]
430 llrev = i * 2 + 1
430 llrev = i * 2 + 1
431 chunks = self._alldiffchunks(a, b, alines, blines)
431 chunks = self._alldiffchunks(a, b, alines, blines)
432 for a1, a2, b1, b2 in reversed(list(chunks)):
432 for a1, a2, b1, b2 in reversed(list(chunks)):
433 llog.replacelines(llrev, a1, a2, b1, b2)
433 llog.replacelines(llrev, a1, a2, b1, b2)
434 a, alines = b, blines
434 a, alines = b, blines
435 return llog
435 return llog
436
436
437 def _checkoutlinelog(self):
437 def _checkoutlinelog(self):
438 """() -> [str]. check out file contents from linelog"""
438 """() -> [str]. check out file contents from linelog"""
439 contents = []
439 contents = []
440 for i in pycompat.xrange(len(self.contents)):
440 for i in pycompat.xrange(len(self.contents)):
441 rev = (i + 1) * 2
441 rev = (i + 1) * 2
442 self.linelog.annotate(rev)
442 self.linelog.annotate(rev)
443 content = ''.join(map(self._getline, self.linelog.annotateresult))
443 content = ''.join(map(self._getline, self.linelog.annotateresult))
444 contents.append(content)
444 contents.append(content)
445 return contents
445 return contents
446
446
447 def _checkoutlinelogwithedits(self):
447 def _checkoutlinelogwithedits(self):
448 """() -> [str]. prompt all lines for edit"""
448 """() -> [str]. prompt all lines for edit"""
449 alllines = self.linelog.getalllines()
449 alllines = self.linelog.getalllines()
450 # header
450 # header
451 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
451 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
452 'exists in the changeset to the top\nHG:\n')
452 'exists in the changeset to the top\nHG:\n')
453 % self.fctxs[-1].path())
453 % self.fctxs[-1].path())
454 # [(idx, fctx)]. hide the dummy emptyfilecontext
454 # [(idx, fctx)]. hide the dummy emptyfilecontext
455 visiblefctxs = [(i, f)
455 visiblefctxs = [(i, f)
456 for i, f in enumerate(self.fctxs)
456 for i, f in enumerate(self.fctxs)
457 if not isinstance(f, emptyfilecontext)]
457 if not isinstance(f, emptyfilecontext)]
458 for i, (j, f) in enumerate(visiblefctxs):
458 for i, (j, f) in enumerate(visiblefctxs):
459 editortext += (_('HG: %s/%s %s %s\n') %
459 editortext += (_('HG: %s/%s %s %s\n') %
460 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
460 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
461 node.short(f.node()),
461 node.short(f.node()),
462 f.description().split('\n',1)[0]))
462 f.description().split('\n',1)[0]))
463 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
463 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
464 # figure out the lifetime of a line, this is relatively inefficient,
464 # figure out the lifetime of a line, this is relatively inefficient,
465 # but probably fine
465 # but probably fine
466 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
466 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
467 for i, f in visiblefctxs:
467 for i, f in visiblefctxs:
468 self.linelog.annotate((i + 1) * 2)
468 self.linelog.annotate((i + 1) * 2)
469 for l in self.linelog.annotateresult:
469 for l in self.linelog.annotateresult:
470 lineset[l].add(i)
470 lineset[l].add(i)
471 # append lines
471 # append lines
472 for l in alllines:
472 for l in alllines:
473 editortext += (' %s : %s' %
473 editortext += (' %s : %s' %
474 (''.join([('y' if i in lineset[l] else ' ')
474 (''.join([('y' if i in lineset[l] else ' ')
475 for i, _f in visiblefctxs]),
475 for i, _f in visiblefctxs]),
476 self._getline(l)))
476 self._getline(l)))
477 # run editor
477 # run editor
478 editedtext = self.ui.edit(editortext, '', action='absorb')
478 editedtext = self.ui.edit(editortext, '', action='absorb')
479 if not editedtext:
479 if not editedtext:
480 raise error.Abort(_('empty editor text'))
480 raise error.Abort(_('empty editor text'))
481 # parse edited result
481 # parse edited result
482 contents = ['' for i in self.fctxs]
482 contents = ['' for i in self.fctxs]
483 leftpadpos = 4
483 leftpadpos = 4
484 colonpos = leftpadpos + len(visiblefctxs) + 1
484 colonpos = leftpadpos + len(visiblefctxs) + 1
485 for l in mdiff.splitnewlines(editedtext):
485 for l in mdiff.splitnewlines(editedtext):
486 if l.startswith('HG:'):
486 if l.startswith('HG:'):
487 continue
487 continue
488 if l[colonpos - 1:colonpos + 2] != ' : ':
488 if l[colonpos - 1:colonpos + 2] != ' : ':
489 raise error.Abort(_('malformed line: %s') % l)
489 raise error.Abort(_('malformed line: %s') % l)
490 linecontent = l[colonpos + 2:]
490 linecontent = l[colonpos + 2:]
491 for i, ch in enumerate(
491 for i, ch in enumerate(
492 pycompat.bytestr(l[leftpadpos:colonpos - 1])):
492 pycompat.bytestr(l[leftpadpos:colonpos - 1])):
493 if ch == 'y':
493 if ch == 'y':
494 contents[visiblefctxs[i][0]] += linecontent
494 contents[visiblefctxs[i][0]] += linecontent
495 # chunkstats is hard to calculate if anything changes, therefore
495 # chunkstats is hard to calculate if anything changes, therefore
496 # set them to just a simple value (1, 1).
496 # set them to just a simple value (1, 1).
497 if editedtext != editortext:
497 if editedtext != editortext:
498 self.chunkstats = [1, 1]
498 self.chunkstats = [1, 1]
499 return contents
499 return contents
500
500
501 def _getline(self, lineinfo):
501 def _getline(self, lineinfo):
502 """((rev, linenum)) -> str. convert rev+line number to line content"""
502 """((rev, linenum)) -> str. convert rev+line number to line content"""
503 rev, linenum = lineinfo
503 rev, linenum = lineinfo
504 if rev & 1: # odd: original line taken from fctxs
504 if rev & 1: # odd: original line taken from fctxs
505 return self.contentlines[rev // 2][linenum]
505 return self.contentlines[rev // 2][linenum]
506 else: # even: fixup line from targetfctx
506 else: # even: fixup line from targetfctx
507 return self.targetlines[linenum]
507 return self.targetlines[linenum]
508
508
509 def _iscontinuous(self, a1, a2, closedinterval=False):
509 def _iscontinuous(self, a1, a2, closedinterval=False):
510 """(a1, a2 : int) -> bool
510 """(a1, a2 : int) -> bool
511
511
512 check if these lines are continuous. i.e. no other insertions or
512 check if these lines are continuous. i.e. no other insertions or
513 deletions (from other revisions) among these lines.
513 deletions (from other revisions) among these lines.
514
514
515 closedinterval decides whether a2 should be included or not. i.e. is
515 closedinterval decides whether a2 should be included or not. i.e. is
516 it [a1, a2), or [a1, a2] ?
516 it [a1, a2), or [a1, a2] ?
517 """
517 """
518 if a1 >= a2:
518 if a1 >= a2:
519 return True
519 return True
520 llog = self.linelog
520 llog = self.linelog
521 offset1 = llog.getoffset(a1)
521 offset1 = llog.getoffset(a1)
522 offset2 = llog.getoffset(a2) + int(closedinterval)
522 offset2 = llog.getoffset(a2) + int(closedinterval)
523 linesinbetween = llog.getalllines(offset1, offset2)
523 linesinbetween = llog.getalllines(offset1, offset2)
524 return len(linesinbetween) == a2 - a1 + int(closedinterval)
524 return len(linesinbetween) == a2 - a1 + int(closedinterval)
525
525
526 def _optimizefixups(self, fixups):
526 def _optimizefixups(self, fixups):
527 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
527 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
528 merge adjacent fixups to make them less fragmented.
528 merge adjacent fixups to make them less fragmented.
529 """
529 """
530 result = []
530 result = []
531 pcurrentchunk = [[-1, -1, -1, -1, -1]]
531 pcurrentchunk = [[-1, -1, -1, -1, -1]]
532
532
533 def pushchunk():
533 def pushchunk():
534 if pcurrentchunk[0][0] != -1:
534 if pcurrentchunk[0][0] != -1:
535 result.append(tuple(pcurrentchunk[0]))
535 result.append(tuple(pcurrentchunk[0]))
536
536
537 for i, chunk in enumerate(fixups):
537 for i, chunk in enumerate(fixups):
538 rev, a1, a2, b1, b2 = chunk
538 rev, a1, a2, b1, b2 = chunk
539 lastrev = pcurrentchunk[0][0]
539 lastrev = pcurrentchunk[0][0]
540 lasta2 = pcurrentchunk[0][2]
540 lasta2 = pcurrentchunk[0][2]
541 lastb2 = pcurrentchunk[0][4]
541 lastb2 = pcurrentchunk[0][4]
542 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
542 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
543 self._iscontinuous(max(a1 - 1, 0), a1)):
543 self._iscontinuous(max(a1 - 1, 0), a1)):
544 # merge into currentchunk
544 # merge into currentchunk
545 pcurrentchunk[0][2] = a2
545 pcurrentchunk[0][2] = a2
546 pcurrentchunk[0][4] = b2
546 pcurrentchunk[0][4] = b2
547 else:
547 else:
548 pushchunk()
548 pushchunk()
549 pcurrentchunk[0] = list(chunk)
549 pcurrentchunk[0] = list(chunk)
550 pushchunk()
550 pushchunk()
551 return result
551 return result
552
552
553 def _showchanges(self, fm, alines, blines, chunk, fixups):
553 def _showchanges(self, fm, alines, blines, chunk, fixups):
554
554
555 def trim(line):
555 def trim(line):
556 if line.endswith('\n'):
556 if line.endswith('\n'):
557 line = line[:-1]
557 line = line[:-1]
558 return line
558 return line
559
559
560 # this is not optimized for perf but _showchanges only gets executed
560 # this is not optimized for perf but _showchanges only gets executed
561 # with an extra command-line flag.
561 # with an extra command-line flag.
562 a1, a2, b1, b2 = chunk
562 a1, a2, b1, b2 = chunk
563 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
563 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
564 for idx, fa1, fa2, fb1, fb2 in fixups:
564 for idx, fa1, fa2, fb1, fb2 in fixups:
565 for i in pycompat.xrange(fa1, fa2):
565 for i in pycompat.xrange(fa1, fa2):
566 aidxs[i - a1] = (max(idx, 1) - 1) // 2
566 aidxs[i - a1] = (max(idx, 1) - 1) // 2
567 for i in pycompat.xrange(fb1, fb2):
567 for i in pycompat.xrange(fb1, fb2):
568 bidxs[i - b1] = (max(idx, 1) - 1) // 2
568 bidxs[i - b1] = (max(idx, 1) - 1) // 2
569
569
570 fm.startitem()
570 fm.startitem()
571 fm.write('hunk', ' %s\n',
571 fm.write('hunk', ' %s\n',
572 '@@ -%d,%d +%d,%d @@'
572 '@@ -%d,%d +%d,%d @@'
573 % (a1, a2 - a1, b1, b2 - b1), label='diff.hunk')
573 % (a1, a2 - a1, b1, b2 - b1), label='diff.hunk')
574 fm.data(path=self.path, linetype='hunk')
574 fm.data(path=self.path, linetype='hunk')
575
575
576 def writeline(idx, diffchar, line, linetype, linelabel):
576 def writeline(idx, diffchar, line, linetype, linelabel):
577 fm.startitem()
577 fm.startitem()
578 node = ''
578 node = ''
579 if idx:
579 if idx:
580 ctx = self.fctxs[idx]
580 ctx = self.fctxs[idx]
581 fm.context(fctx=ctx)
581 fm.context(fctx=ctx)
582 node = ctx.hex()
582 node = ctx.hex()
583 self.ctxaffected.add(ctx.changectx())
583 self.ctxaffected.add(ctx.changectx())
584 fm.write('node', '%-7.7s ', node, label='absorb.node')
584 fm.write('node', '%-7.7s ', node, label='absorb.node')
585 fm.write('diffchar ' + linetype, '%s%s\n', diffchar, line,
585 fm.write('diffchar ' + linetype, '%s%s\n', diffchar, line,
586 label=linelabel)
586 label=linelabel)
587 fm.data(path=self.path, linetype=linetype)
587 fm.data(path=self.path, linetype=linetype)
588
588
589 for i in pycompat.xrange(a1, a2):
589 for i in pycompat.xrange(a1, a2):
590 writeline(aidxs[i - a1], '-', trim(alines[i]), 'deleted',
590 writeline(aidxs[i - a1], '-', trim(alines[i]), 'deleted',
591 'diff.deleted')
591 'diff.deleted')
592 for i in pycompat.xrange(b1, b2):
592 for i in pycompat.xrange(b1, b2):
593 writeline(bidxs[i - b1], '+', trim(blines[i]), 'inserted',
593 writeline(bidxs[i - b1], '+', trim(blines[i]), 'inserted',
594 'diff.inserted')
594 'diff.inserted')
595
595
596 class fixupstate(object):
596 class fixupstate(object):
597 """state needed to run absorb
597 """state needed to run absorb
598
598
599 internally, it keeps paths and filefixupstates.
599 internally, it keeps paths and filefixupstates.
600
600
601 a typical use is like filefixupstates:
601 a typical use is like filefixupstates:
602
602
603 1. call diffwith, to calculate fixups
603 1. call diffwith, to calculate fixups
604 2. (optionally), present fixups to the user, or edit fixups
604 2. (optionally), present fixups to the user, or edit fixups
605 3. call apply, to apply changes to memory
605 3. call apply, to apply changes to memory
606 4. call commit, to commit changes to hg database
606 4. call commit, to commit changes to hg database
607 """
607 """
608
608
609 def __init__(self, stack, ui=None, opts=None):
609 def __init__(self, stack, ui=None, opts=None):
610 """([ctx], ui or None) -> None
610 """([ctx], ui or None) -> None
611
611
612 stack: should be linear, and sorted by topo order - oldest first.
612 stack: should be linear, and sorted by topo order - oldest first.
613 all commits in stack are considered mutable.
613 all commits in stack are considered mutable.
614 """
614 """
615 assert stack
615 assert stack
616 self.ui = ui or nullui()
616 self.ui = ui or nullui()
617 self.opts = opts or {}
617 self.opts = opts or {}
618 self.stack = stack
618 self.stack = stack
619 self.repo = stack[-1].repo().unfiltered()
619 self.repo = stack[-1].repo().unfiltered()
620
620
621 # following fields will be filled later
621 # following fields will be filled later
622 self.paths = [] # [str]
622 self.paths = [] # [str]
623 self.status = None # ctx.status output
623 self.status = None # ctx.status output
624 self.fctxmap = {} # {path: {ctx: fctx}}
624 self.fctxmap = {} # {path: {ctx: fctx}}
625 self.fixupmap = {} # {path: filefixupstate}
625 self.fixupmap = {} # {path: filefixupstate}
626 self.replacemap = {} # {oldnode: newnode or None}
626 self.replacemap = {} # {oldnode: newnode or None}
627 self.finalnode = None # head after all fixups
627 self.finalnode = None # head after all fixups
628 self.ctxaffected = set() # ctx that will be absorbed into
628 self.ctxaffected = set() # ctx that will be absorbed into
629
629
630 def diffwith(self, targetctx, match=None, fm=None):
630 def diffwith(self, targetctx, match=None, fm=None):
631 """diff and prepare fixups. update self.fixupmap, self.paths"""
631 """diff and prepare fixups. update self.fixupmap, self.paths"""
632 # only care about modified files
632 # only care about modified files
633 self.status = self.stack[-1].status(targetctx, match)
633 self.status = self.stack[-1].status(targetctx, match)
634 self.paths = []
634 self.paths = []
635 # but if --edit-lines is used, the user may want to edit files
635 # but if --edit-lines is used, the user may want to edit files
636 # even if they are not modified
636 # even if they are not modified
637 editopt = self.opts.get('edit_lines')
637 editopt = self.opts.get('edit_lines')
638 if not self.status.modified and editopt and match:
638 if not self.status.modified and editopt and match:
639 interestingpaths = match.files()
639 interestingpaths = match.files()
640 else:
640 else:
641 interestingpaths = self.status.modified
641 interestingpaths = self.status.modified
642 # prepare the filefixupstate
642 # prepare the filefixupstate
643 seenfctxs = set()
643 seenfctxs = set()
644 # sorting is necessary to eliminate ambiguity for the "double move"
644 # sorting is necessary to eliminate ambiguity for the "double move"
645 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
645 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
646 for path in sorted(interestingpaths):
646 for path in sorted(interestingpaths):
647 self.ui.debug('calculating fixups for %s\n' % path)
647 self.ui.debug('calculating fixups for %s\n' % path)
648 targetfctx = targetctx[path]
648 targetfctx = targetctx[path]
649 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
649 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
650 # ignore symbolic links or binary, or unchanged files
650 # ignore symbolic links or binary, or unchanged files
651 if any(f.islink() or stringutil.binary(f.data())
651 if any(f.islink() or stringutil.binary(f.data())
652 for f in [targetfctx] + fctxs
652 for f in [targetfctx] + fctxs
653 if not isinstance(f, emptyfilecontext)):
653 if not isinstance(f, emptyfilecontext)):
654 continue
654 continue
655 if targetfctx.data() == fctxs[-1].data() and not editopt:
655 if targetfctx.data() == fctxs[-1].data() and not editopt:
656 continue
656 continue
657 seenfctxs.update(fctxs[1:])
657 seenfctxs.update(fctxs[1:])
658 self.fctxmap[path] = ctx2fctx
658 self.fctxmap[path] = ctx2fctx
659 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
659 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
660 if fm is not None:
660 if fm is not None:
661 fm.startitem()
661 fm.startitem()
662 fm.plain('showing changes for ')
662 fm.plain('showing changes for ')
663 fm.write('path', '%s\n', path, label='absorb.path')
663 fm.write('path', '%s\n', path, label='absorb.path')
664 fm.data(linetype='path')
664 fm.data(linetype='path')
665 fstate.diffwith(targetfctx, fm)
665 fstate.diffwith(targetfctx, fm)
666 self.fixupmap[path] = fstate
666 self.fixupmap[path] = fstate
667 self.paths.append(path)
667 self.paths.append(path)
668 self.ctxaffected.update(fstate.ctxaffected)
668 self.ctxaffected.update(fstate.ctxaffected)
669
669
670 def apply(self):
670 def apply(self):
671 """apply fixups to individual filefixupstates"""
671 """apply fixups to individual filefixupstates"""
672 for path, state in self.fixupmap.iteritems():
672 for path, state in self.fixupmap.iteritems():
673 if self.ui.debugflag:
673 if self.ui.debugflag:
674 self.ui.write(_('applying fixups to %s\n') % path)
674 self.ui.write(_('applying fixups to %s\n') % path)
675 state.apply()
675 state.apply()
676
676
677 @property
677 @property
678 def chunkstats(self):
678 def chunkstats(self):
679 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
679 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
680 return dict((path, state.chunkstats)
680 return dict((path, state.chunkstats)
681 for path, state in self.fixupmap.iteritems())
681 for path, state in self.fixupmap.iteritems())
682
682
683 def commit(self):
683 def commit(self):
684 """commit changes. update self.finalnode, self.replacemap"""
684 """commit changes. update self.finalnode, self.replacemap"""
685 with self.repo.wlock(), self.repo.lock():
685 with self.repo.wlock(), self.repo.lock():
686 with self.repo.transaction('absorb') as tr:
686 with self.repo.transaction('absorb') as tr:
687 self._commitstack()
687 self._commitstack()
688 self._movebookmarks(tr)
688 self._movebookmarks(tr)
689 if self.repo['.'].node() in self.replacemap:
689 if self.repo['.'].node() in self.replacemap:
690 self._moveworkingdirectoryparent()
690 self._moveworkingdirectoryparent()
691 if self._useobsolete:
692 self._cleanupoldcommits()
693 if not self._useobsolete: # strip must be outside transactions
694 self._cleanupoldcommits()
691 self._cleanupoldcommits()
695 return self.finalnode
692 return self.finalnode
696
693
697 def printchunkstats(self):
694 def printchunkstats(self):
698 """print things like '1 of 2 chunk(s) applied'"""
695 """print things like '1 of 2 chunk(s) applied'"""
699 ui = self.ui
696 ui = self.ui
700 chunkstats = self.chunkstats
697 chunkstats = self.chunkstats
701 if ui.verbose:
698 if ui.verbose:
702 # chunkstats for each file
699 # chunkstats for each file
703 for path, stat in chunkstats.iteritems():
700 for path, stat in chunkstats.iteritems():
704 if stat[0]:
701 if stat[0]:
705 ui.write(_('%s: %d of %d chunk(s) applied\n')
702 ui.write(_('%s: %d of %d chunk(s) applied\n')
706 % (path, stat[0], stat[1]))
703 % (path, stat[0], stat[1]))
707 elif not ui.quiet:
704 elif not ui.quiet:
708 # a summary for all files
705 # a summary for all files
709 stats = chunkstats.values()
706 stats = chunkstats.values()
710 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
707 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
711 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
708 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
712
709
713 def _commitstack(self):
710 def _commitstack(self):
714 """make new commits. update self.finalnode, self.replacemap.
711 """make new commits. update self.finalnode, self.replacemap.
715 it is splitted from "commit" to avoid too much indentation.
712 it is splitted from "commit" to avoid too much indentation.
716 """
713 """
717 # last node (20-char) committed by us
714 # last node (20-char) committed by us
718 lastcommitted = None
715 lastcommitted = None
719 # p1 which overrides the parent of the next commit, "None" means use
716 # p1 which overrides the parent of the next commit, "None" means use
720 # the original parent unchanged
717 # the original parent unchanged
721 nextp1 = None
718 nextp1 = None
722 for ctx in self.stack:
719 for ctx in self.stack:
723 memworkingcopy = self._getnewfilecontents(ctx)
720 memworkingcopy = self._getnewfilecontents(ctx)
724 if not memworkingcopy and not lastcommitted:
721 if not memworkingcopy and not lastcommitted:
725 # nothing changed, nothing commited
722 # nothing changed, nothing commited
726 nextp1 = ctx
723 nextp1 = ctx
727 continue
724 continue
728 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
725 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
729 # changeset is no longer necessary
726 # changeset is no longer necessary
730 self.replacemap[ctx.node()] = None
727 self.replacemap[ctx.node()] = None
731 msg = _('became empty and was dropped')
728 msg = _('became empty and was dropped')
732 else:
729 else:
733 # changeset needs re-commit
730 # changeset needs re-commit
734 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
731 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
735 lastcommitted = self.repo[nodestr]
732 lastcommitted = self.repo[nodestr]
736 nextp1 = lastcommitted
733 nextp1 = lastcommitted
737 self.replacemap[ctx.node()] = lastcommitted.node()
734 self.replacemap[ctx.node()] = lastcommitted.node()
738 if memworkingcopy:
735 if memworkingcopy:
739 msg = _('%d file(s) changed, became %s') % (
736 msg = _('%d file(s) changed, became %s') % (
740 len(memworkingcopy), self._ctx2str(lastcommitted))
737 len(memworkingcopy), self._ctx2str(lastcommitted))
741 else:
738 else:
742 msg = _('became %s') % self._ctx2str(lastcommitted)
739 msg = _('became %s') % self._ctx2str(lastcommitted)
743 if self.ui.verbose and msg:
740 if self.ui.verbose and msg:
744 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
741 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
745 self.finalnode = lastcommitted and lastcommitted.node()
742 self.finalnode = lastcommitted and lastcommitted.node()
746
743
747 def _ctx2str(self, ctx):
744 def _ctx2str(self, ctx):
748 if self.ui.debugflag:
745 if self.ui.debugflag:
749 return '%d:%s' % (ctx.rev(), ctx.hex())
746 return '%d:%s' % (ctx.rev(), ctx.hex())
750 else:
747 else:
751 return '%d:%s' % (ctx.rev(), node.short(ctx.node()))
748 return '%d:%s' % (ctx.rev(), node.short(ctx.node()))
752
749
753 def _getnewfilecontents(self, ctx):
750 def _getnewfilecontents(self, ctx):
754 """(ctx) -> {path: str}
751 """(ctx) -> {path: str}
755
752
756 fetch file contents from filefixupstates.
753 fetch file contents from filefixupstates.
757 return the working copy overrides - files different from ctx.
754 return the working copy overrides - files different from ctx.
758 """
755 """
759 result = {}
756 result = {}
760 for path in self.paths:
757 for path in self.paths:
761 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
758 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
762 if ctx not in ctx2fctx:
759 if ctx not in ctx2fctx:
763 continue
760 continue
764 fctx = ctx2fctx[ctx]
761 fctx = ctx2fctx[ctx]
765 content = fctx.data()
762 content = fctx.data()
766 newcontent = self.fixupmap[path].getfinalcontent(fctx)
763 newcontent = self.fixupmap[path].getfinalcontent(fctx)
767 if content != newcontent:
764 if content != newcontent:
768 result[fctx.path()] = newcontent
765 result[fctx.path()] = newcontent
769 return result
766 return result
770
767
771 def _movebookmarks(self, tr):
768 def _movebookmarks(self, tr):
772 repo = self.repo
769 repo = self.repo
773 needupdate = [(name, self.replacemap[hsh])
770 needupdate = [(name, self.replacemap[hsh])
774 for name, hsh in repo._bookmarks.iteritems()
771 for name, hsh in repo._bookmarks.iteritems()
775 if hsh in self.replacemap]
772 if hsh in self.replacemap]
776 changes = []
773 changes = []
777 for name, hsh in needupdate:
774 for name, hsh in needupdate:
778 if hsh:
775 if hsh:
779 changes.append((name, hsh))
776 changes.append((name, hsh))
780 if self.ui.verbose:
777 if self.ui.verbose:
781 self.ui.write(_('moving bookmark %s to %s\n')
778 self.ui.write(_('moving bookmark %s to %s\n')
782 % (name, node.hex(hsh)))
779 % (name, node.hex(hsh)))
783 else:
780 else:
784 changes.append((name, None))
781 changes.append((name, None))
785 if self.ui.verbose:
782 if self.ui.verbose:
786 self.ui.write(_('deleting bookmark %s\n') % name)
783 self.ui.write(_('deleting bookmark %s\n') % name)
787 repo._bookmarks.applychanges(repo, tr, changes)
784 repo._bookmarks.applychanges(repo, tr, changes)
788
785
789 def _moveworkingdirectoryparent(self):
786 def _moveworkingdirectoryparent(self):
790 if not self.finalnode:
787 if not self.finalnode:
791 # Find the latest not-{obsoleted,stripped} parent.
788 # Find the latest not-{obsoleted,stripped} parent.
792 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
789 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
793 ctx = self.repo[revs.first()]
790 ctx = self.repo[revs.first()]
794 self.finalnode = ctx.node()
791 self.finalnode = ctx.node()
795 else:
792 else:
796 ctx = self.repo[self.finalnode]
793 ctx = self.repo[self.finalnode]
797
794
798 dirstate = self.repo.dirstate
795 dirstate = self.repo.dirstate
799 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
796 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
800 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
797 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
801 noop = lambda: 0
798 noop = lambda: 0
802 restore = noop
799 restore = noop
803 if util.safehasattr(dirstate, '_fsmonitorstate'):
800 if util.safehasattr(dirstate, '_fsmonitorstate'):
804 bak = dirstate._fsmonitorstate.invalidate
801 bak = dirstate._fsmonitorstate.invalidate
805 def restore():
802 def restore():
806 dirstate._fsmonitorstate.invalidate = bak
803 dirstate._fsmonitorstate.invalidate = bak
807 dirstate._fsmonitorstate.invalidate = noop
804 dirstate._fsmonitorstate.invalidate = noop
808 try:
805 try:
809 with dirstate.parentchange():
806 with dirstate.parentchange():
810 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
807 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
811 finally:
808 finally:
812 restore()
809 restore()
813
810
814 @staticmethod
811 @staticmethod
815 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
812 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
816 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
813 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
817
814
818 if it will become an empty commit (does not change anything, after the
815 if it will become an empty commit (does not change anything, after the
819 memworkingcopy overrides), return True. otherwise return False.
816 memworkingcopy overrides), return True. otherwise return False.
820 """
817 """
821 if not pctx:
818 if not pctx:
822 parents = ctx.parents()
819 parents = ctx.parents()
823 if len(parents) != 1:
820 if len(parents) != 1:
824 return False
821 return False
825 pctx = parents[0]
822 pctx = parents[0]
826 # ctx changes more files (not a subset of memworkingcopy)
823 # ctx changes more files (not a subset of memworkingcopy)
827 if not set(ctx.files()).issubset(set(memworkingcopy)):
824 if not set(ctx.files()).issubset(set(memworkingcopy)):
828 return False
825 return False
829 for path, content in memworkingcopy.iteritems():
826 for path, content in memworkingcopy.iteritems():
830 if path not in pctx or path not in ctx:
827 if path not in pctx or path not in ctx:
831 return False
828 return False
832 fctx = ctx[path]
829 fctx = ctx[path]
833 pfctx = pctx[path]
830 pfctx = pctx[path]
834 if pfctx.flags() != fctx.flags():
831 if pfctx.flags() != fctx.flags():
835 return False
832 return False
836 if pfctx.data() != content:
833 if pfctx.data() != content:
837 return False
834 return False
838 return True
835 return True
839
836
840 def _commitsingle(self, memworkingcopy, ctx, p1=None):
837 def _commitsingle(self, memworkingcopy, ctx, p1=None):
841 """(ctx, {path: content}, node) -> node. make a single commit
838 """(ctx, {path: content}, node) -> node. make a single commit
842
839
843 the commit is a clone from ctx, with a (optionally) different p1, and
840 the commit is a clone from ctx, with a (optionally) different p1, and
844 different file contents replaced by memworkingcopy.
841 different file contents replaced by memworkingcopy.
845 """
842 """
846 parents = p1 and (p1, node.nullid)
843 parents = p1 and (p1, node.nullid)
847 extra = ctx.extra()
844 extra = ctx.extra()
848 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
845 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
849 extra['absorb_source'] = ctx.hex()
846 extra['absorb_source'] = ctx.hex()
850 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
847 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
851 return mctx.commit()
848 return mctx.commit()
852
849
853 @util.propertycache
850 @util.propertycache
854 def _useobsolete(self):
851 def _useobsolete(self):
855 """() -> bool"""
852 """() -> bool"""
856 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
853 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
857
854
858 def _cleanupoldcommits(self):
855 def _cleanupoldcommits(self):
859 replacements = {k: ([v] if v is not None else [])
856 replacements = {k: ([v] if v is not None else [])
860 for k, v in self.replacemap.iteritems()}
857 for k, v in self.replacemap.iteritems()}
861 if replacements:
858 if replacements:
862 scmutil.cleanupnodes(self.repo, replacements, operation='absorb',
859 scmutil.cleanupnodes(self.repo, replacements, operation='absorb',
863 fixphase=True)
860 fixphase=True)
864
861
865 def _parsechunk(hunk):
862 def _parsechunk(hunk):
866 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
863 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
867 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
864 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
868 return None, None
865 return None, None
869 path = hunk.header.filename()
866 path = hunk.header.filename()
870 a1 = hunk.fromline + len(hunk.before) - 1
867 a1 = hunk.fromline + len(hunk.before) - 1
871 # remove before and after context
868 # remove before and after context
872 hunk.before = hunk.after = []
869 hunk.before = hunk.after = []
873 buf = util.stringio()
870 buf = util.stringio()
874 hunk.write(buf)
871 hunk.write(buf)
875 patchlines = mdiff.splitnewlines(buf.getvalue())
872 patchlines = mdiff.splitnewlines(buf.getvalue())
876 # hunk.prettystr() will update hunk.removed
873 # hunk.prettystr() will update hunk.removed
877 a2 = a1 + hunk.removed
874 a2 = a1 + hunk.removed
878 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
875 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
879 return path, (a1, a2, blines)
876 return path, (a1, a2, blines)
880
877
881 def overlaydiffcontext(ctx, chunks):
878 def overlaydiffcontext(ctx, chunks):
882 """(ctx, [crecord.uihunk]) -> memctx
879 """(ctx, [crecord.uihunk]) -> memctx
883
880
884 return a memctx with some [1] patches (chunks) applied to ctx.
881 return a memctx with some [1] patches (chunks) applied to ctx.
885 [1]: modifications are handled. renames, mode changes, etc. are ignored.
882 [1]: modifications are handled. renames, mode changes, etc. are ignored.
886 """
883 """
887 # sadly the applying-patch logic is hardly reusable, and messy:
884 # sadly the applying-patch logic is hardly reusable, and messy:
888 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
885 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
889 # needs a file stream of a patch and will re-parse it, while we have
886 # needs a file stream of a patch and will re-parse it, while we have
890 # structured hunk objects at hand.
887 # structured hunk objects at hand.
891 # 2. a lot of different implementations about "chunk" (patch.hunk,
888 # 2. a lot of different implementations about "chunk" (patch.hunk,
892 # patch.recordhunk, crecord.uihunk)
889 # patch.recordhunk, crecord.uihunk)
893 # as we only care about applying changes to modified files, no mode
890 # as we only care about applying changes to modified files, no mode
894 # change, no binary diff, and no renames, it's probably okay to
891 # change, no binary diff, and no renames, it's probably okay to
895 # re-invent the logic using much simpler code here.
892 # re-invent the logic using much simpler code here.
896 memworkingcopy = {} # {path: content}
893 memworkingcopy = {} # {path: content}
897 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
894 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
898 for path, info in map(_parsechunk, chunks):
895 for path, info in map(_parsechunk, chunks):
899 if not path or not info:
896 if not path or not info:
900 continue
897 continue
901 patchmap[path].append(info)
898 patchmap[path].append(info)
902 for path, patches in patchmap.iteritems():
899 for path, patches in patchmap.iteritems():
903 if path not in ctx or not patches:
900 if path not in ctx or not patches:
904 continue
901 continue
905 patches.sort(reverse=True)
902 patches.sort(reverse=True)
906 lines = mdiff.splitnewlines(ctx[path].data())
903 lines = mdiff.splitnewlines(ctx[path].data())
907 for a1, a2, blines in patches:
904 for a1, a2, blines in patches:
908 lines[a1:a2] = blines
905 lines[a1:a2] = blines
909 memworkingcopy[path] = ''.join(lines)
906 memworkingcopy[path] = ''.join(lines)
910 return overlaycontext(memworkingcopy, ctx)
907 return overlaycontext(memworkingcopy, ctx)
911
908
912 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
909 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
913 """pick fixup chunks from targetctx, apply them to stack.
910 """pick fixup chunks from targetctx, apply them to stack.
914
911
915 if targetctx is None, the working copy context will be used.
912 if targetctx is None, the working copy context will be used.
916 if stack is None, the current draft stack will be used.
913 if stack is None, the current draft stack will be used.
917 return fixupstate.
914 return fixupstate.
918 """
915 """
919 if stack is None:
916 if stack is None:
920 limit = ui.configint('absorb', 'max-stack-size')
917 limit = ui.configint('absorb', 'max-stack-size')
921 stack = getdraftstack(repo['.'], limit)
918 stack = getdraftstack(repo['.'], limit)
922 if limit and len(stack) >= limit:
919 if limit and len(stack) >= limit:
923 ui.warn(_('absorb: only the recent %d changesets will '
920 ui.warn(_('absorb: only the recent %d changesets will '
924 'be analysed\n')
921 'be analysed\n')
925 % limit)
922 % limit)
926 if not stack:
923 if not stack:
927 raise error.Abort(_('no mutable changeset to change'))
924 raise error.Abort(_('no mutable changeset to change'))
928 if targetctx is None: # default to working copy
925 if targetctx is None: # default to working copy
929 targetctx = repo[None]
926 targetctx = repo[None]
930 if pats is None:
927 if pats is None:
931 pats = ()
928 pats = ()
932 if opts is None:
929 if opts is None:
933 opts = {}
930 opts = {}
934 state = fixupstate(stack, ui=ui, opts=opts)
931 state = fixupstate(stack, ui=ui, opts=opts)
935 matcher = scmutil.match(targetctx, pats, opts)
932 matcher = scmutil.match(targetctx, pats, opts)
936 if opts.get('interactive'):
933 if opts.get('interactive'):
937 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
934 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
938 origchunks = patch.parsepatch(diff)
935 origchunks = patch.parsepatch(diff)
939 chunks = cmdutil.recordfilter(ui, origchunks)[0]
936 chunks = cmdutil.recordfilter(ui, origchunks)[0]
940 targetctx = overlaydiffcontext(stack[-1], chunks)
937 targetctx = overlaydiffcontext(stack[-1], chunks)
941 fm = None
938 fm = None
942 if opts.get('print_changes') or not opts.get('apply_changes'):
939 if opts.get('print_changes') or not opts.get('apply_changes'):
943 fm = ui.formatter('absorb', opts)
940 fm = ui.formatter('absorb', opts)
944 state.diffwith(targetctx, matcher, fm)
941 state.diffwith(targetctx, matcher, fm)
945 if fm is not None:
942 if fm is not None:
946 fm.startitem()
943 fm.startitem()
947 fm.write("count", "\n%d changesets affected\n", len(state.ctxaffected))
944 fm.write("count", "\n%d changesets affected\n", len(state.ctxaffected))
948 fm.data(linetype='summary')
945 fm.data(linetype='summary')
949 for ctx in reversed(stack):
946 for ctx in reversed(stack):
950 if ctx not in state.ctxaffected:
947 if ctx not in state.ctxaffected:
951 continue
948 continue
952 fm.startitem()
949 fm.startitem()
953 fm.context(ctx=ctx)
950 fm.context(ctx=ctx)
954 fm.data(linetype='changeset')
951 fm.data(linetype='changeset')
955 fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
952 fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
956 descfirstline = ctx.description().splitlines()[0]
953 descfirstline = ctx.description().splitlines()[0]
957 fm.write('descfirstline', '%s\n', descfirstline,
954 fm.write('descfirstline', '%s\n', descfirstline,
958 label='absorb.description')
955 label='absorb.description')
959 fm.end()
956 fm.end()
960 if not opts.get('dry_run'):
957 if not opts.get('dry_run'):
961 if (not opts.get('apply_changes') and
958 if (not opts.get('apply_changes') and
962 state.ctxaffected and
959 state.ctxaffected and
963 ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1)):
960 ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1)):
964 raise error.Abort(_('absorb cancelled\n'))
961 raise error.Abort(_('absorb cancelled\n'))
965
962
966 state.apply()
963 state.apply()
967 if state.commit():
964 if state.commit():
968 state.printchunkstats()
965 state.printchunkstats()
969 elif not ui.quiet:
966 elif not ui.quiet:
970 ui.write(_('nothing applied\n'))
967 ui.write(_('nothing applied\n'))
971 return state
968 return state
972
969
973 @command('absorb',
970 @command('absorb',
974 [('a', 'apply-changes', None,
971 [('a', 'apply-changes', None,
975 _('apply changes without prompting for confirmation')),
972 _('apply changes without prompting for confirmation')),
976 ('p', 'print-changes', None,
973 ('p', 'print-changes', None,
977 _('always print which changesets are modified by which changes')),
974 _('always print which changesets are modified by which changes')),
978 ('i', 'interactive', None,
975 ('i', 'interactive', None,
979 _('interactively select which chunks to apply (EXPERIMENTAL)')),
976 _('interactively select which chunks to apply (EXPERIMENTAL)')),
980 ('e', 'edit-lines', None,
977 ('e', 'edit-lines', None,
981 _('edit what lines belong to which changesets before commit '
978 _('edit what lines belong to which changesets before commit '
982 '(EXPERIMENTAL)')),
979 '(EXPERIMENTAL)')),
983 ] + commands.dryrunopts + commands.templateopts + commands.walkopts,
980 ] + commands.dryrunopts + commands.templateopts + commands.walkopts,
984 _('hg absorb [OPTION] [FILE]...'),
981 _('hg absorb [OPTION] [FILE]...'),
985 helpcategory=command.CATEGORY_COMMITTING,
982 helpcategory=command.CATEGORY_COMMITTING,
986 helpbasic=True)
983 helpbasic=True)
987 def absorbcmd(ui, repo, *pats, **opts):
984 def absorbcmd(ui, repo, *pats, **opts):
988 """incorporate corrections into the stack of draft changesets
985 """incorporate corrections into the stack of draft changesets
989
986
990 absorb analyzes each change in your working directory and attempts to
987 absorb analyzes each change in your working directory and attempts to
991 amend the changed lines into the changesets in your stack that first
988 amend the changed lines into the changesets in your stack that first
992 introduced those lines.
989 introduced those lines.
993
990
994 If absorb cannot find an unambiguous changeset to amend for a change,
991 If absorb cannot find an unambiguous changeset to amend for a change,
995 that change will be left in the working directory, untouched. They can be
992 that change will be left in the working directory, untouched. They can be
996 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
993 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
997 absorb does not write to the working directory.
994 absorb does not write to the working directory.
998
995
999 Changesets outside the revset `::. and not public() and not merge()` will
996 Changesets outside the revset `::. and not public() and not merge()` will
1000 not be changed.
997 not be changed.
1001
998
1002 Changesets that become empty after applying the changes will be deleted.
999 Changesets that become empty after applying the changes will be deleted.
1003
1000
1004 By default, absorb will show what it plans to do and prompt for
1001 By default, absorb will show what it plans to do and prompt for
1005 confirmation. If you are confident that the changes will be absorbed
1002 confirmation. If you are confident that the changes will be absorbed
1006 to the correct place, run :hg:`absorb -a` to apply the changes
1003 to the correct place, run :hg:`absorb -a` to apply the changes
1007 immediately.
1004 immediately.
1008
1005
1009 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1006 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1010 """
1007 """
1011 opts = pycompat.byteskwargs(opts)
1008 opts = pycompat.byteskwargs(opts)
1012 state = absorb(ui, repo, pats=pats, opts=opts)
1009 state = absorb(ui, repo, pats=pats, opts=opts)
1013 if sum(s[0] for s in state.chunkstats.values()) == 0:
1010 if sum(s[0] for s in state.chunkstats.values()) == 0:
1014 return 1
1011 return 1
General Comments 0
You need to be logged in to leave comments. Login now