##// END OF EJS Templates
absorb: aborting if another operation is in progress...
Rodrigo Damazio Bovendorp -
r42297:537a8aeb default
parent child Browse files
Show More
@@ -0,0 +1,30 b''
1 $ cat >> $HGRCPATH << EOF
2 > [extensions]
3 > absorb=
4 > EOF
5
6 Abort absorb if there is an unfinished operation.
7
8 $ hg init abortunresolved
9 $ cd abortunresolved
10
11 $ echo "foo1" > foo.whole
12 $ hg commit -Aqm "foo 1"
13
14 $ hg update null
15 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
16 $ echo "foo2" > foo.whole
17 $ hg commit -Aqm "foo 2"
18
19 $ hg --config extensions.rebase= rebase -r 1 -d 0
20 rebasing 1:c3b6dc0e177a "foo 2" (tip)
21 merging foo.whole
22 warning: conflicts while merging foo.whole! (edit, then use 'hg resolve --mark')
23 unresolved conflicts (see hg resolve, then hg rebase --continue)
24 [1]
25
26 $ hg --config extensions.rebase= absorb
27 abort: rebase in progress
28 (use 'hg rebase --continue' or 'hg rebase --abort')
29 [255]
30
@@ -1,1011 +1,1015 b''
1 # absorb.py
1 # absorb.py
2 #
2 #
3 # Copyright 2016 Facebook, Inc.
3 # Copyright 2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """apply working directory changes to changesets (EXPERIMENTAL)
8 """apply working directory changes to changesets (EXPERIMENTAL)
9
9
10 The absorb extension provides a command to use annotate information to
10 The absorb extension provides a command to use annotate information to
11 amend modified chunks into the corresponding non-public changesets.
11 amend modified chunks into the corresponding non-public changesets.
12
12
13 ::
13 ::
14
14
15 [absorb]
15 [absorb]
16 # only check 50 recent non-public changesets at most
16 # only check 50 recent non-public changesets at most
17 max-stack-size = 50
17 max-stack-size = 50
18 # whether to add noise to new commits to avoid obsolescence cycle
18 # whether to add noise to new commits to avoid obsolescence cycle
19 add-noise = 1
19 add-noise = 1
20 # make `amend --correlated` a shortcut to the main command
20 # make `amend --correlated` a shortcut to the main command
21 amend-flag = correlated
21 amend-flag = correlated
22
22
23 [color]
23 [color]
24 absorb.description = yellow
24 absorb.description = yellow
25 absorb.node = blue bold
25 absorb.node = blue bold
26 absorb.path = bold
26 absorb.path = bold
27 """
27 """
28
28
29 # TODO:
29 # TODO:
30 # * Rename config items to [commands] namespace
30 # * Rename config items to [commands] namespace
31 # * Converge getdraftstack() with other code in core
31 # * Converge getdraftstack() with other code in core
32 # * move many attributes on fixupstate to be private
32 # * move many attributes on fixupstate to be private
33
33
34 from __future__ import absolute_import
34 from __future__ import absolute_import
35
35
36 import collections
36 import collections
37
37
38 from mercurial.i18n import _
38 from mercurial.i18n import _
39 from mercurial import (
39 from mercurial import (
40 cmdutil,
40 cmdutil,
41 commands,
41 commands,
42 context,
42 context,
43 crecord,
43 crecord,
44 error,
44 error,
45 linelog,
45 linelog,
46 mdiff,
46 mdiff,
47 node,
47 node,
48 obsolete,
48 obsolete,
49 patch,
49 patch,
50 phases,
50 phases,
51 pycompat,
51 pycompat,
52 registrar,
52 registrar,
53 scmutil,
53 scmutil,
54 util,
54 util,
55 )
55 )
56 from mercurial.utils import (
56 from mercurial.utils import (
57 stringutil,
57 stringutil,
58 )
58 )
59
59
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # be specifying the version(s) of Mercurial they are tested with, or
62 # be specifying the version(s) of Mercurial they are tested with, or
63 # leave the attribute unspecified.
63 # leave the attribute unspecified.
64 testedwith = 'ships-with-hg-core'
64 testedwith = 'ships-with-hg-core'
65
65
66 cmdtable = {}
66 cmdtable = {}
67 command = registrar.command(cmdtable)
67 command = registrar.command(cmdtable)
68
68
69 configtable = {}
69 configtable = {}
70 configitem = registrar.configitem(configtable)
70 configitem = registrar.configitem(configtable)
71
71
72 configitem('absorb', 'add-noise', default=True)
72 configitem('absorb', 'add-noise', default=True)
73 configitem('absorb', 'amend-flag', default=None)
73 configitem('absorb', 'amend-flag', default=None)
74 configitem('absorb', 'max-stack-size', default=50)
74 configitem('absorb', 'max-stack-size', default=50)
75
75
76 colortable = {
76 colortable = {
77 'absorb.description': 'yellow',
77 'absorb.description': 'yellow',
78 'absorb.node': 'blue bold',
78 'absorb.node': 'blue bold',
79 'absorb.path': 'bold',
79 'absorb.path': 'bold',
80 }
80 }
81
81
82 defaultdict = collections.defaultdict
82 defaultdict = collections.defaultdict
83
83
84 class nullui(object):
84 class nullui(object):
85 """blank ui object doing nothing"""
85 """blank ui object doing nothing"""
86 debugflag = False
86 debugflag = False
87 verbose = False
87 verbose = False
88 quiet = True
88 quiet = True
89
89
90 def __getitem__(name):
90 def __getitem__(name):
91 def nullfunc(*args, **kwds):
91 def nullfunc(*args, **kwds):
92 return
92 return
93 return nullfunc
93 return nullfunc
94
94
95 class emptyfilecontext(object):
95 class emptyfilecontext(object):
96 """minimal filecontext representing an empty file"""
96 """minimal filecontext representing an empty file"""
97 def data(self):
97 def data(self):
98 return ''
98 return ''
99
99
100 def node(self):
100 def node(self):
101 return node.nullid
101 return node.nullid
102
102
103 def uniq(lst):
103 def uniq(lst):
104 """list -> list. remove duplicated items without changing the order"""
104 """list -> list. remove duplicated items without changing the order"""
105 seen = set()
105 seen = set()
106 result = []
106 result = []
107 for x in lst:
107 for x in lst:
108 if x not in seen:
108 if x not in seen:
109 seen.add(x)
109 seen.add(x)
110 result.append(x)
110 result.append(x)
111 return result
111 return result
112
112
113 def getdraftstack(headctx, limit=None):
113 def getdraftstack(headctx, limit=None):
114 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
114 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
115
115
116 changesets are sorted in topo order, oldest first.
116 changesets are sorted in topo order, oldest first.
117 return at most limit items, if limit is a positive number.
117 return at most limit items, if limit is a positive number.
118
118
119 merges are considered as non-draft as well. i.e. every commit
119 merges are considered as non-draft as well. i.e. every commit
120 returned has and only has 1 parent.
120 returned has and only has 1 parent.
121 """
121 """
122 ctx = headctx
122 ctx = headctx
123 result = []
123 result = []
124 while ctx.phase() != phases.public:
124 while ctx.phase() != phases.public:
125 if limit and len(result) >= limit:
125 if limit and len(result) >= limit:
126 break
126 break
127 parents = ctx.parents()
127 parents = ctx.parents()
128 if len(parents) != 1:
128 if len(parents) != 1:
129 break
129 break
130 result.append(ctx)
130 result.append(ctx)
131 ctx = parents[0]
131 ctx = parents[0]
132 result.reverse()
132 result.reverse()
133 return result
133 return result
134
134
135 def getfilestack(stack, path, seenfctxs=None):
135 def getfilestack(stack, path, seenfctxs=None):
136 """([ctx], str, set) -> [fctx], {ctx: fctx}
136 """([ctx], str, set) -> [fctx], {ctx: fctx}
137
137
138 stack is a list of contexts, from old to new. usually they are what
138 stack is a list of contexts, from old to new. usually they are what
139 "getdraftstack" returns.
139 "getdraftstack" returns.
140
140
141 follows renames, but not copies.
141 follows renames, but not copies.
142
142
143 seenfctxs is a set of filecontexts that will be considered "immutable".
143 seenfctxs is a set of filecontexts that will be considered "immutable".
144 they are usually what this function returned in earlier calls, useful
144 they are usually what this function returned in earlier calls, useful
145 to avoid issues that a file was "moved" to multiple places and was then
145 to avoid issues that a file was "moved" to multiple places and was then
146 modified differently, like: "a" was copied to "b", "a" was also copied to
146 modified differently, like: "a" was copied to "b", "a" was also copied to
147 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
147 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
148 and we enforce only one of them to be able to affect "a"'s content.
148 and we enforce only one of them to be able to affect "a"'s content.
149
149
150 return an empty list and an empty dict, if the specified path does not
150 return an empty list and an empty dict, if the specified path does not
151 exist in stack[-1] (the top of the stack).
151 exist in stack[-1] (the top of the stack).
152
152
153 otherwise, return a list of de-duplicated filecontexts, and the map to
153 otherwise, return a list of de-duplicated filecontexts, and the map to
154 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
154 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
155 of the list would be outside the stack and should be considered immutable.
155 of the list would be outside the stack and should be considered immutable.
156 the remaining items are within the stack.
156 the remaining items are within the stack.
157
157
158 for example, given the following changelog and corresponding filelog
158 for example, given the following changelog and corresponding filelog
159 revisions:
159 revisions:
160
160
161 changelog: 3----4----5----6----7
161 changelog: 3----4----5----6----7
162 filelog: x 0----1----1----2 (x: no such file yet)
162 filelog: x 0----1----1----2 (x: no such file yet)
163
163
164 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
164 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
165 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
165 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
166 dummy empty filecontext.
166 dummy empty filecontext.
167 - if stack = [2], returns ([], {})
167 - if stack = [2], returns ([], {})
168 - if stack = [7], returns ([1, 2], {7: 2})
168 - if stack = [7], returns ([1, 2], {7: 2})
169 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
169 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
170 removed, since 1 is immutable.
170 removed, since 1 is immutable.
171 """
171 """
172 if seenfctxs is None:
172 if seenfctxs is None:
173 seenfctxs = set()
173 seenfctxs = set()
174 assert stack
174 assert stack
175
175
176 if path not in stack[-1]:
176 if path not in stack[-1]:
177 return [], {}
177 return [], {}
178
178
179 fctxs = []
179 fctxs = []
180 fctxmap = {}
180 fctxmap = {}
181
181
182 pctx = stack[0].p1() # the public (immutable) ctx we stop at
182 pctx = stack[0].p1() # the public (immutable) ctx we stop at
183 for ctx in reversed(stack):
183 for ctx in reversed(stack):
184 if path not in ctx: # the file is added in the next commit
184 if path not in ctx: # the file is added in the next commit
185 pctx = ctx
185 pctx = ctx
186 break
186 break
187 fctx = ctx[path]
187 fctx = ctx[path]
188 fctxs.append(fctx)
188 fctxs.append(fctx)
189 if fctx in seenfctxs: # treat fctx as the immutable one
189 if fctx in seenfctxs: # treat fctx as the immutable one
190 pctx = None # do not add another immutable fctx
190 pctx = None # do not add another immutable fctx
191 break
191 break
192 fctxmap[ctx] = fctx # only for mutable fctxs
192 fctxmap[ctx] = fctx # only for mutable fctxs
193 copy = fctx.copysource()
193 copy = fctx.copysource()
194 if copy:
194 if copy:
195 path = copy # follow rename
195 path = copy # follow rename
196 if path in ctx: # but do not follow copy
196 if path in ctx: # but do not follow copy
197 pctx = ctx.p1()
197 pctx = ctx.p1()
198 break
198 break
199
199
200 if pctx is not None: # need an extra immutable fctx
200 if pctx is not None: # need an extra immutable fctx
201 if path in pctx:
201 if path in pctx:
202 fctxs.append(pctx[path])
202 fctxs.append(pctx[path])
203 else:
203 else:
204 fctxs.append(emptyfilecontext())
204 fctxs.append(emptyfilecontext())
205
205
206 fctxs.reverse()
206 fctxs.reverse()
207 # note: we rely on a property of hg: filerev is not reused for linear
207 # note: we rely on a property of hg: filerev is not reused for linear
208 # history. i.e. it's impossible to have:
208 # history. i.e. it's impossible to have:
209 # changelog: 4----5----6 (linear, no merges)
209 # changelog: 4----5----6 (linear, no merges)
210 # filelog: 1----2----1
210 # filelog: 1----2----1
211 # ^ reuse filerev (impossible)
211 # ^ reuse filerev (impossible)
212 # because parents are part of the hash. if that's not true, we need to
212 # because parents are part of the hash. if that's not true, we need to
213 # remove uniq and find a different way to identify fctxs.
213 # remove uniq and find a different way to identify fctxs.
214 return uniq(fctxs), fctxmap
214 return uniq(fctxs), fctxmap
215
215
216 class overlaystore(patch.filestore):
216 class overlaystore(patch.filestore):
217 """read-only, hybrid store based on a dict and ctx.
217 """read-only, hybrid store based on a dict and ctx.
218 memworkingcopy: {path: content}, overrides file contents.
218 memworkingcopy: {path: content}, overrides file contents.
219 """
219 """
220 def __init__(self, basectx, memworkingcopy):
220 def __init__(self, basectx, memworkingcopy):
221 self.basectx = basectx
221 self.basectx = basectx
222 self.memworkingcopy = memworkingcopy
222 self.memworkingcopy = memworkingcopy
223
223
224 def getfile(self, path):
224 def getfile(self, path):
225 """comply with mercurial.patch.filestore.getfile"""
225 """comply with mercurial.patch.filestore.getfile"""
226 if path not in self.basectx:
226 if path not in self.basectx:
227 return None, None, None
227 return None, None, None
228 fctx = self.basectx[path]
228 fctx = self.basectx[path]
229 if path in self.memworkingcopy:
229 if path in self.memworkingcopy:
230 content = self.memworkingcopy[path]
230 content = self.memworkingcopy[path]
231 else:
231 else:
232 content = fctx.data()
232 content = fctx.data()
233 mode = (fctx.islink(), fctx.isexec())
233 mode = (fctx.islink(), fctx.isexec())
234 copy = fctx.copysource()
234 copy = fctx.copysource()
235 return content, mode, copy
235 return content, mode, copy
236
236
237 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
237 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
238 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
238 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
239 memworkingcopy overrides file contents.
239 memworkingcopy overrides file contents.
240 """
240 """
241 # parents must contain 2 items: (node1, node2)
241 # parents must contain 2 items: (node1, node2)
242 if parents is None:
242 if parents is None:
243 parents = ctx.repo().changelog.parents(ctx.node())
243 parents = ctx.repo().changelog.parents(ctx.node())
244 if extra is None:
244 if extra is None:
245 extra = ctx.extra()
245 extra = ctx.extra()
246 date = ctx.date()
246 date = ctx.date()
247 desc = ctx.description()
247 desc = ctx.description()
248 user = ctx.user()
248 user = ctx.user()
249 files = set(ctx.files()).union(memworkingcopy)
249 files = set(ctx.files()).union(memworkingcopy)
250 store = overlaystore(ctx, memworkingcopy)
250 store = overlaystore(ctx, memworkingcopy)
251 return context.memctx(
251 return context.memctx(
252 repo=ctx.repo(), parents=parents, text=desc,
252 repo=ctx.repo(), parents=parents, text=desc,
253 files=files, filectxfn=store, user=user, date=date,
253 files=files, filectxfn=store, user=user, date=date,
254 branch=None, extra=extra)
254 branch=None, extra=extra)
255
255
256 class filefixupstate(object):
256 class filefixupstate(object):
257 """state needed to apply fixups to a single file
257 """state needed to apply fixups to a single file
258
258
259 internally, it keeps file contents of several revisions and a linelog.
259 internally, it keeps file contents of several revisions and a linelog.
260
260
261 the linelog uses odd revision numbers for original contents (fctxs passed
261 the linelog uses odd revision numbers for original contents (fctxs passed
262 to __init__), and even revision numbers for fixups, like:
262 to __init__), and even revision numbers for fixups, like:
263
263
264 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
264 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
265 linelog rev 2: fixups made to self.fctxs[0]
265 linelog rev 2: fixups made to self.fctxs[0]
266 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
266 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
267 linelog rev 4: fixups made to self.fctxs[1]
267 linelog rev 4: fixups made to self.fctxs[1]
268 ...
268 ...
269
269
270 a typical use is like:
270 a typical use is like:
271
271
272 1. call diffwith, to calculate self.fixups
272 1. call diffwith, to calculate self.fixups
273 2. (optionally), present self.fixups to the user, or change it
273 2. (optionally), present self.fixups to the user, or change it
274 3. call apply, to apply changes
274 3. call apply, to apply changes
275 4. read results from "finalcontents", or call getfinalcontent
275 4. read results from "finalcontents", or call getfinalcontent
276 """
276 """
277
277
278 def __init__(self, fctxs, path, ui=None, opts=None):
278 def __init__(self, fctxs, path, ui=None, opts=None):
279 """([fctx], ui or None) -> None
279 """([fctx], ui or None) -> None
280
280
281 fctxs should be linear, and sorted by topo order - oldest first.
281 fctxs should be linear, and sorted by topo order - oldest first.
282 fctxs[0] will be considered as "immutable" and will not be changed.
282 fctxs[0] will be considered as "immutable" and will not be changed.
283 """
283 """
284 self.fctxs = fctxs
284 self.fctxs = fctxs
285 self.path = path
285 self.path = path
286 self.ui = ui or nullui()
286 self.ui = ui or nullui()
287 self.opts = opts or {}
287 self.opts = opts or {}
288
288
289 # following fields are built from fctxs. they exist for perf reason
289 # following fields are built from fctxs. they exist for perf reason
290 self.contents = [f.data() for f in fctxs]
290 self.contents = [f.data() for f in fctxs]
291 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
291 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
292 self.linelog = self._buildlinelog()
292 self.linelog = self._buildlinelog()
293 if self.ui.debugflag:
293 if self.ui.debugflag:
294 assert self._checkoutlinelog() == self.contents
294 assert self._checkoutlinelog() == self.contents
295
295
296 # following fields will be filled later
296 # following fields will be filled later
297 self.chunkstats = [0, 0] # [adopted, total : int]
297 self.chunkstats = [0, 0] # [adopted, total : int]
298 self.targetlines = [] # [str]
298 self.targetlines = [] # [str]
299 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
299 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
300 self.finalcontents = [] # [str]
300 self.finalcontents = [] # [str]
301 self.ctxaffected = set()
301 self.ctxaffected = set()
302
302
303 def diffwith(self, targetfctx, fm=None):
303 def diffwith(self, targetfctx, fm=None):
304 """calculate fixups needed by examining the differences between
304 """calculate fixups needed by examining the differences between
305 self.fctxs[-1] and targetfctx, chunk by chunk.
305 self.fctxs[-1] and targetfctx, chunk by chunk.
306
306
307 targetfctx is the target state we move towards. we may or may not be
307 targetfctx is the target state we move towards. we may or may not be
308 able to get there because not all modified chunks can be amended into
308 able to get there because not all modified chunks can be amended into
309 a non-public fctx unambiguously.
309 a non-public fctx unambiguously.
310
310
311 call this only once, before apply().
311 call this only once, before apply().
312
312
313 update self.fixups, self.chunkstats, and self.targetlines.
313 update self.fixups, self.chunkstats, and self.targetlines.
314 """
314 """
315 a = self.contents[-1]
315 a = self.contents[-1]
316 alines = self.contentlines[-1]
316 alines = self.contentlines[-1]
317 b = targetfctx.data()
317 b = targetfctx.data()
318 blines = mdiff.splitnewlines(b)
318 blines = mdiff.splitnewlines(b)
319 self.targetlines = blines
319 self.targetlines = blines
320
320
321 self.linelog.annotate(self.linelog.maxrev)
321 self.linelog.annotate(self.linelog.maxrev)
322 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
322 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
323 assert len(annotated) == len(alines)
323 assert len(annotated) == len(alines)
324 # add a dummy end line to make insertion at the end easier
324 # add a dummy end line to make insertion at the end easier
325 if annotated:
325 if annotated:
326 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
326 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
327 annotated.append(dummyendline)
327 annotated.append(dummyendline)
328
328
329 # analyse diff blocks
329 # analyse diff blocks
330 for chunk in self._alldiffchunks(a, b, alines, blines):
330 for chunk in self._alldiffchunks(a, b, alines, blines):
331 newfixups = self._analysediffchunk(chunk, annotated)
331 newfixups = self._analysediffchunk(chunk, annotated)
332 self.chunkstats[0] += bool(newfixups) # 1 or 0
332 self.chunkstats[0] += bool(newfixups) # 1 or 0
333 self.chunkstats[1] += 1
333 self.chunkstats[1] += 1
334 self.fixups += newfixups
334 self.fixups += newfixups
335 if fm is not None:
335 if fm is not None:
336 self._showchanges(fm, alines, blines, chunk, newfixups)
336 self._showchanges(fm, alines, blines, chunk, newfixups)
337
337
338 def apply(self):
338 def apply(self):
339 """apply self.fixups. update self.linelog, self.finalcontents.
339 """apply self.fixups. update self.linelog, self.finalcontents.
340
340
341 call this only once, before getfinalcontent(), after diffwith().
341 call this only once, before getfinalcontent(), after diffwith().
342 """
342 """
343 # the following is unnecessary, as it's done by "diffwith":
343 # the following is unnecessary, as it's done by "diffwith":
344 # self.linelog.annotate(self.linelog.maxrev)
344 # self.linelog.annotate(self.linelog.maxrev)
345 for rev, a1, a2, b1, b2 in reversed(self.fixups):
345 for rev, a1, a2, b1, b2 in reversed(self.fixups):
346 blines = self.targetlines[b1:b2]
346 blines = self.targetlines[b1:b2]
347 if self.ui.debugflag:
347 if self.ui.debugflag:
348 idx = (max(rev - 1, 0)) // 2
348 idx = (max(rev - 1, 0)) // 2
349 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
349 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
350 % (node.short(self.fctxs[idx].node()),
350 % (node.short(self.fctxs[idx].node()),
351 a1, a2, len(blines)))
351 a1, a2, len(blines)))
352 self.linelog.replacelines(rev, a1, a2, b1, b2)
352 self.linelog.replacelines(rev, a1, a2, b1, b2)
353 if self.opts.get('edit_lines', False):
353 if self.opts.get('edit_lines', False):
354 self.finalcontents = self._checkoutlinelogwithedits()
354 self.finalcontents = self._checkoutlinelogwithedits()
355 else:
355 else:
356 self.finalcontents = self._checkoutlinelog()
356 self.finalcontents = self._checkoutlinelog()
357
357
358 def getfinalcontent(self, fctx):
358 def getfinalcontent(self, fctx):
359 """(fctx) -> str. get modified file content for a given filecontext"""
359 """(fctx) -> str. get modified file content for a given filecontext"""
360 idx = self.fctxs.index(fctx)
360 idx = self.fctxs.index(fctx)
361 return self.finalcontents[idx]
361 return self.finalcontents[idx]
362
362
363 def _analysediffchunk(self, chunk, annotated):
363 def _analysediffchunk(self, chunk, annotated):
364 """analyse a different chunk and return new fixups found
364 """analyse a different chunk and return new fixups found
365
365
366 return [] if no lines from the chunk can be safely applied.
366 return [] if no lines from the chunk can be safely applied.
367
367
368 the chunk (or lines) cannot be safely applied, if, for example:
368 the chunk (or lines) cannot be safely applied, if, for example:
369 - the modified (deleted) lines belong to a public changeset
369 - the modified (deleted) lines belong to a public changeset
370 (self.fctxs[0])
370 (self.fctxs[0])
371 - the chunk is a pure insertion and the adjacent lines (at most 2
371 - the chunk is a pure insertion and the adjacent lines (at most 2
372 lines) belong to different non-public changesets, or do not belong
372 lines) belong to different non-public changesets, or do not belong
373 to any non-public changesets.
373 to any non-public changesets.
374 - the chunk is modifying lines from different changesets.
374 - the chunk is modifying lines from different changesets.
375 in this case, if the number of lines deleted equals to the number
375 in this case, if the number of lines deleted equals to the number
376 of lines added, assume it's a simple 1:1 map (could be wrong).
376 of lines added, assume it's a simple 1:1 map (could be wrong).
377 otherwise, give up.
377 otherwise, give up.
378 - the chunk is modifying lines from a single non-public changeset,
378 - the chunk is modifying lines from a single non-public changeset,
379 but other revisions touch the area as well. i.e. the lines are
379 but other revisions touch the area as well. i.e. the lines are
380 not continuous as seen from the linelog.
380 not continuous as seen from the linelog.
381 """
381 """
382 a1, a2, b1, b2 = chunk
382 a1, a2, b1, b2 = chunk
383 # find involved indexes from annotate result
383 # find involved indexes from annotate result
384 involved = annotated[a1:a2]
384 involved = annotated[a1:a2]
385 if not involved and annotated: # a1 == a2 and a is not empty
385 if not involved and annotated: # a1 == a2 and a is not empty
386 # pure insertion, check nearby lines. ignore lines belong
386 # pure insertion, check nearby lines. ignore lines belong
387 # to the public (first) changeset (i.e. annotated[i][0] == 1)
387 # to the public (first) changeset (i.e. annotated[i][0] == 1)
388 nearbylinenums = {a2, max(0, a1 - 1)}
388 nearbylinenums = {a2, max(0, a1 - 1)}
389 involved = [annotated[i]
389 involved = [annotated[i]
390 for i in nearbylinenums if annotated[i][0] != 1]
390 for i in nearbylinenums if annotated[i][0] != 1]
391 involvedrevs = list(set(r for r, l in involved))
391 involvedrevs = list(set(r for r, l in involved))
392 newfixups = []
392 newfixups = []
393 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
393 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
394 # chunk belongs to a single revision
394 # chunk belongs to a single revision
395 rev = involvedrevs[0]
395 rev = involvedrevs[0]
396 if rev > 1:
396 if rev > 1:
397 fixuprev = rev + 1
397 fixuprev = rev + 1
398 newfixups.append((fixuprev, a1, a2, b1, b2))
398 newfixups.append((fixuprev, a1, a2, b1, b2))
399 elif a2 - a1 == b2 - b1 or b1 == b2:
399 elif a2 - a1 == b2 - b1 or b1 == b2:
400 # 1:1 line mapping, or chunk was deleted
400 # 1:1 line mapping, or chunk was deleted
401 for i in pycompat.xrange(a1, a2):
401 for i in pycompat.xrange(a1, a2):
402 rev, linenum = annotated[i]
402 rev, linenum = annotated[i]
403 if rev > 1:
403 if rev > 1:
404 if b1 == b2: # deletion, simply remove that single line
404 if b1 == b2: # deletion, simply remove that single line
405 nb1 = nb2 = 0
405 nb1 = nb2 = 0
406 else: # 1:1 line mapping, change the corresponding rev
406 else: # 1:1 line mapping, change the corresponding rev
407 nb1 = b1 + i - a1
407 nb1 = b1 + i - a1
408 nb2 = nb1 + 1
408 nb2 = nb1 + 1
409 fixuprev = rev + 1
409 fixuprev = rev + 1
410 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
410 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
411 return self._optimizefixups(newfixups)
411 return self._optimizefixups(newfixups)
412
412
413 @staticmethod
413 @staticmethod
414 def _alldiffchunks(a, b, alines, blines):
414 def _alldiffchunks(a, b, alines, blines):
415 """like mdiff.allblocks, but only care about differences"""
415 """like mdiff.allblocks, but only care about differences"""
416 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
416 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
417 for chunk, btype in blocks:
417 for chunk, btype in blocks:
418 if btype != '!':
418 if btype != '!':
419 continue
419 continue
420 yield chunk
420 yield chunk
421
421
422 def _buildlinelog(self):
422 def _buildlinelog(self):
423 """calculate the initial linelog based on self.content{,line}s.
423 """calculate the initial linelog based on self.content{,line}s.
424 this is similar to running a partial "annotate".
424 this is similar to running a partial "annotate".
425 """
425 """
426 llog = linelog.linelog()
426 llog = linelog.linelog()
427 a, alines = '', []
427 a, alines = '', []
428 for i in pycompat.xrange(len(self.contents)):
428 for i in pycompat.xrange(len(self.contents)):
429 b, blines = self.contents[i], self.contentlines[i]
429 b, blines = self.contents[i], self.contentlines[i]
430 llrev = i * 2 + 1
430 llrev = i * 2 + 1
431 chunks = self._alldiffchunks(a, b, alines, blines)
431 chunks = self._alldiffchunks(a, b, alines, blines)
432 for a1, a2, b1, b2 in reversed(list(chunks)):
432 for a1, a2, b1, b2 in reversed(list(chunks)):
433 llog.replacelines(llrev, a1, a2, b1, b2)
433 llog.replacelines(llrev, a1, a2, b1, b2)
434 a, alines = b, blines
434 a, alines = b, blines
435 return llog
435 return llog
436
436
437 def _checkoutlinelog(self):
437 def _checkoutlinelog(self):
438 """() -> [str]. check out file contents from linelog"""
438 """() -> [str]. check out file contents from linelog"""
439 contents = []
439 contents = []
440 for i in pycompat.xrange(len(self.contents)):
440 for i in pycompat.xrange(len(self.contents)):
441 rev = (i + 1) * 2
441 rev = (i + 1) * 2
442 self.linelog.annotate(rev)
442 self.linelog.annotate(rev)
443 content = ''.join(map(self._getline, self.linelog.annotateresult))
443 content = ''.join(map(self._getline, self.linelog.annotateresult))
444 contents.append(content)
444 contents.append(content)
445 return contents
445 return contents
446
446
447 def _checkoutlinelogwithedits(self):
447 def _checkoutlinelogwithedits(self):
448 """() -> [str]. prompt all lines for edit"""
448 """() -> [str]. prompt all lines for edit"""
449 alllines = self.linelog.getalllines()
449 alllines = self.linelog.getalllines()
450 # header
450 # header
451 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
451 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
452 'exists in the changeset to the top\nHG:\n')
452 'exists in the changeset to the top\nHG:\n')
453 % self.fctxs[-1].path())
453 % self.fctxs[-1].path())
454 # [(idx, fctx)]. hide the dummy emptyfilecontext
454 # [(idx, fctx)]. hide the dummy emptyfilecontext
455 visiblefctxs = [(i, f)
455 visiblefctxs = [(i, f)
456 for i, f in enumerate(self.fctxs)
456 for i, f in enumerate(self.fctxs)
457 if not isinstance(f, emptyfilecontext)]
457 if not isinstance(f, emptyfilecontext)]
458 for i, (j, f) in enumerate(visiblefctxs):
458 for i, (j, f) in enumerate(visiblefctxs):
459 editortext += (_('HG: %s/%s %s %s\n') %
459 editortext += (_('HG: %s/%s %s %s\n') %
460 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
460 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
461 node.short(f.node()),
461 node.short(f.node()),
462 f.description().split('\n',1)[0]))
462 f.description().split('\n',1)[0]))
463 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
463 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
464 # figure out the lifetime of a line, this is relatively inefficient,
464 # figure out the lifetime of a line, this is relatively inefficient,
465 # but probably fine
465 # but probably fine
466 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
466 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
467 for i, f in visiblefctxs:
467 for i, f in visiblefctxs:
468 self.linelog.annotate((i + 1) * 2)
468 self.linelog.annotate((i + 1) * 2)
469 for l in self.linelog.annotateresult:
469 for l in self.linelog.annotateresult:
470 lineset[l].add(i)
470 lineset[l].add(i)
471 # append lines
471 # append lines
472 for l in alllines:
472 for l in alllines:
473 editortext += (' %s : %s' %
473 editortext += (' %s : %s' %
474 (''.join([('y' if i in lineset[l] else ' ')
474 (''.join([('y' if i in lineset[l] else ' ')
475 for i, _f in visiblefctxs]),
475 for i, _f in visiblefctxs]),
476 self._getline(l)))
476 self._getline(l)))
477 # run editor
477 # run editor
478 editedtext = self.ui.edit(editortext, '', action='absorb')
478 editedtext = self.ui.edit(editortext, '', action='absorb')
479 if not editedtext:
479 if not editedtext:
480 raise error.Abort(_('empty editor text'))
480 raise error.Abort(_('empty editor text'))
481 # parse edited result
481 # parse edited result
482 contents = ['' for i in self.fctxs]
482 contents = ['' for i in self.fctxs]
483 leftpadpos = 4
483 leftpadpos = 4
484 colonpos = leftpadpos + len(visiblefctxs) + 1
484 colonpos = leftpadpos + len(visiblefctxs) + 1
485 for l in mdiff.splitnewlines(editedtext):
485 for l in mdiff.splitnewlines(editedtext):
486 if l.startswith('HG:'):
486 if l.startswith('HG:'):
487 continue
487 continue
488 if l[colonpos - 1:colonpos + 2] != ' : ':
488 if l[colonpos - 1:colonpos + 2] != ' : ':
489 raise error.Abort(_('malformed line: %s') % l)
489 raise error.Abort(_('malformed line: %s') % l)
490 linecontent = l[colonpos + 2:]
490 linecontent = l[colonpos + 2:]
491 for i, ch in enumerate(
491 for i, ch in enumerate(
492 pycompat.bytestr(l[leftpadpos:colonpos - 1])):
492 pycompat.bytestr(l[leftpadpos:colonpos - 1])):
493 if ch == 'y':
493 if ch == 'y':
494 contents[visiblefctxs[i][0]] += linecontent
494 contents[visiblefctxs[i][0]] += linecontent
495 # chunkstats is hard to calculate if anything changes, therefore
495 # chunkstats is hard to calculate if anything changes, therefore
496 # set them to just a simple value (1, 1).
496 # set them to just a simple value (1, 1).
497 if editedtext != editortext:
497 if editedtext != editortext:
498 self.chunkstats = [1, 1]
498 self.chunkstats = [1, 1]
499 return contents
499 return contents
500
500
501 def _getline(self, lineinfo):
501 def _getline(self, lineinfo):
502 """((rev, linenum)) -> str. convert rev+line number to line content"""
502 """((rev, linenum)) -> str. convert rev+line number to line content"""
503 rev, linenum = lineinfo
503 rev, linenum = lineinfo
504 if rev & 1: # odd: original line taken from fctxs
504 if rev & 1: # odd: original line taken from fctxs
505 return self.contentlines[rev // 2][linenum]
505 return self.contentlines[rev // 2][linenum]
506 else: # even: fixup line from targetfctx
506 else: # even: fixup line from targetfctx
507 return self.targetlines[linenum]
507 return self.targetlines[linenum]
508
508
509 def _iscontinuous(self, a1, a2, closedinterval=False):
509 def _iscontinuous(self, a1, a2, closedinterval=False):
510 """(a1, a2 : int) -> bool
510 """(a1, a2 : int) -> bool
511
511
512 check if these lines are continuous. i.e. no other insertions or
512 check if these lines are continuous. i.e. no other insertions or
513 deletions (from other revisions) among these lines.
513 deletions (from other revisions) among these lines.
514
514
515 closedinterval decides whether a2 should be included or not. i.e. is
515 closedinterval decides whether a2 should be included or not. i.e. is
516 it [a1, a2), or [a1, a2] ?
516 it [a1, a2), or [a1, a2] ?
517 """
517 """
518 if a1 >= a2:
518 if a1 >= a2:
519 return True
519 return True
520 llog = self.linelog
520 llog = self.linelog
521 offset1 = llog.getoffset(a1)
521 offset1 = llog.getoffset(a1)
522 offset2 = llog.getoffset(a2) + int(closedinterval)
522 offset2 = llog.getoffset(a2) + int(closedinterval)
523 linesinbetween = llog.getalllines(offset1, offset2)
523 linesinbetween = llog.getalllines(offset1, offset2)
524 return len(linesinbetween) == a2 - a1 + int(closedinterval)
524 return len(linesinbetween) == a2 - a1 + int(closedinterval)
525
525
526 def _optimizefixups(self, fixups):
526 def _optimizefixups(self, fixups):
527 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
527 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
528 merge adjacent fixups to make them less fragmented.
528 merge adjacent fixups to make them less fragmented.
529 """
529 """
530 result = []
530 result = []
531 pcurrentchunk = [[-1, -1, -1, -1, -1]]
531 pcurrentchunk = [[-1, -1, -1, -1, -1]]
532
532
533 def pushchunk():
533 def pushchunk():
534 if pcurrentchunk[0][0] != -1:
534 if pcurrentchunk[0][0] != -1:
535 result.append(tuple(pcurrentchunk[0]))
535 result.append(tuple(pcurrentchunk[0]))
536
536
537 for i, chunk in enumerate(fixups):
537 for i, chunk in enumerate(fixups):
538 rev, a1, a2, b1, b2 = chunk
538 rev, a1, a2, b1, b2 = chunk
539 lastrev = pcurrentchunk[0][0]
539 lastrev = pcurrentchunk[0][0]
540 lasta2 = pcurrentchunk[0][2]
540 lasta2 = pcurrentchunk[0][2]
541 lastb2 = pcurrentchunk[0][4]
541 lastb2 = pcurrentchunk[0][4]
542 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
542 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
543 self._iscontinuous(max(a1 - 1, 0), a1)):
543 self._iscontinuous(max(a1 - 1, 0), a1)):
544 # merge into currentchunk
544 # merge into currentchunk
545 pcurrentchunk[0][2] = a2
545 pcurrentchunk[0][2] = a2
546 pcurrentchunk[0][4] = b2
546 pcurrentchunk[0][4] = b2
547 else:
547 else:
548 pushchunk()
548 pushchunk()
549 pcurrentchunk[0] = list(chunk)
549 pcurrentchunk[0] = list(chunk)
550 pushchunk()
550 pushchunk()
551 return result
551 return result
552
552
553 def _showchanges(self, fm, alines, blines, chunk, fixups):
553 def _showchanges(self, fm, alines, blines, chunk, fixups):
554
554
555 def trim(line):
555 def trim(line):
556 if line.endswith('\n'):
556 if line.endswith('\n'):
557 line = line[:-1]
557 line = line[:-1]
558 return line
558 return line
559
559
560 # this is not optimized for perf but _showchanges only gets executed
560 # this is not optimized for perf but _showchanges only gets executed
561 # with an extra command-line flag.
561 # with an extra command-line flag.
562 a1, a2, b1, b2 = chunk
562 a1, a2, b1, b2 = chunk
563 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
563 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
564 for idx, fa1, fa2, fb1, fb2 in fixups:
564 for idx, fa1, fa2, fb1, fb2 in fixups:
565 for i in pycompat.xrange(fa1, fa2):
565 for i in pycompat.xrange(fa1, fa2):
566 aidxs[i - a1] = (max(idx, 1) - 1) // 2
566 aidxs[i - a1] = (max(idx, 1) - 1) // 2
567 for i in pycompat.xrange(fb1, fb2):
567 for i in pycompat.xrange(fb1, fb2):
568 bidxs[i - b1] = (max(idx, 1) - 1) // 2
568 bidxs[i - b1] = (max(idx, 1) - 1) // 2
569
569
570 fm.startitem()
570 fm.startitem()
571 fm.write('hunk', ' %s\n',
571 fm.write('hunk', ' %s\n',
572 '@@ -%d,%d +%d,%d @@'
572 '@@ -%d,%d +%d,%d @@'
573 % (a1, a2 - a1, b1, b2 - b1), label='diff.hunk')
573 % (a1, a2 - a1, b1, b2 - b1), label='diff.hunk')
574 fm.data(path=self.path, linetype='hunk')
574 fm.data(path=self.path, linetype='hunk')
575
575
576 def writeline(idx, diffchar, line, linetype, linelabel):
576 def writeline(idx, diffchar, line, linetype, linelabel):
577 fm.startitem()
577 fm.startitem()
578 node = ''
578 node = ''
579 if idx:
579 if idx:
580 ctx = self.fctxs[idx]
580 ctx = self.fctxs[idx]
581 fm.context(fctx=ctx)
581 fm.context(fctx=ctx)
582 node = ctx.hex()
582 node = ctx.hex()
583 self.ctxaffected.add(ctx.changectx())
583 self.ctxaffected.add(ctx.changectx())
584 fm.write('node', '%-7.7s ', node, label='absorb.node')
584 fm.write('node', '%-7.7s ', node, label='absorb.node')
585 fm.write('diffchar ' + linetype, '%s%s\n', diffchar, line,
585 fm.write('diffchar ' + linetype, '%s%s\n', diffchar, line,
586 label=linelabel)
586 label=linelabel)
587 fm.data(path=self.path, linetype=linetype)
587 fm.data(path=self.path, linetype=linetype)
588
588
589 for i in pycompat.xrange(a1, a2):
589 for i in pycompat.xrange(a1, a2):
590 writeline(aidxs[i - a1], '-', trim(alines[i]), 'deleted',
590 writeline(aidxs[i - a1], '-', trim(alines[i]), 'deleted',
591 'diff.deleted')
591 'diff.deleted')
592 for i in pycompat.xrange(b1, b2):
592 for i in pycompat.xrange(b1, b2):
593 writeline(bidxs[i - b1], '+', trim(blines[i]), 'inserted',
593 writeline(bidxs[i - b1], '+', trim(blines[i]), 'inserted',
594 'diff.inserted')
594 'diff.inserted')
595
595
596 class fixupstate(object):
596 class fixupstate(object):
597 """state needed to run absorb
597 """state needed to run absorb
598
598
599 internally, it keeps paths and filefixupstates.
599 internally, it keeps paths and filefixupstates.
600
600
601 a typical use is like filefixupstates:
601 a typical use is like filefixupstates:
602
602
603 1. call diffwith, to calculate fixups
603 1. call diffwith, to calculate fixups
604 2. (optionally), present fixups to the user, or edit fixups
604 2. (optionally), present fixups to the user, or edit fixups
605 3. call apply, to apply changes to memory
605 3. call apply, to apply changes to memory
606 4. call commit, to commit changes to hg database
606 4. call commit, to commit changes to hg database
607 """
607 """
608
608
609 def __init__(self, stack, ui=None, opts=None):
609 def __init__(self, stack, ui=None, opts=None):
610 """([ctx], ui or None) -> None
610 """([ctx], ui or None) -> None
611
611
612 stack: should be linear, and sorted by topo order - oldest first.
612 stack: should be linear, and sorted by topo order - oldest first.
613 all commits in stack are considered mutable.
613 all commits in stack are considered mutable.
614 """
614 """
615 assert stack
615 assert stack
616 self.ui = ui or nullui()
616 self.ui = ui or nullui()
617 self.opts = opts or {}
617 self.opts = opts or {}
618 self.stack = stack
618 self.stack = stack
619 self.repo = stack[-1].repo().unfiltered()
619 self.repo = stack[-1].repo().unfiltered()
620
620
621 # following fields will be filled later
621 # following fields will be filled later
622 self.paths = [] # [str]
622 self.paths = [] # [str]
623 self.status = None # ctx.status output
623 self.status = None # ctx.status output
624 self.fctxmap = {} # {path: {ctx: fctx}}
624 self.fctxmap = {} # {path: {ctx: fctx}}
625 self.fixupmap = {} # {path: filefixupstate}
625 self.fixupmap = {} # {path: filefixupstate}
626 self.replacemap = {} # {oldnode: newnode or None}
626 self.replacemap = {} # {oldnode: newnode or None}
627 self.finalnode = None # head after all fixups
627 self.finalnode = None # head after all fixups
628 self.ctxaffected = set() # ctx that will be absorbed into
628 self.ctxaffected = set() # ctx that will be absorbed into
629
629
630 def diffwith(self, targetctx, match=None, fm=None):
630 def diffwith(self, targetctx, match=None, fm=None):
631 """diff and prepare fixups. update self.fixupmap, self.paths"""
631 """diff and prepare fixups. update self.fixupmap, self.paths"""
632 # only care about modified files
632 # only care about modified files
633 self.status = self.stack[-1].status(targetctx, match)
633 self.status = self.stack[-1].status(targetctx, match)
634 self.paths = []
634 self.paths = []
635 # but if --edit-lines is used, the user may want to edit files
635 # but if --edit-lines is used, the user may want to edit files
636 # even if they are not modified
636 # even if they are not modified
637 editopt = self.opts.get('edit_lines')
637 editopt = self.opts.get('edit_lines')
638 if not self.status.modified and editopt and match:
638 if not self.status.modified and editopt and match:
639 interestingpaths = match.files()
639 interestingpaths = match.files()
640 else:
640 else:
641 interestingpaths = self.status.modified
641 interestingpaths = self.status.modified
642 # prepare the filefixupstate
642 # prepare the filefixupstate
643 seenfctxs = set()
643 seenfctxs = set()
644 # sorting is necessary to eliminate ambiguity for the "double move"
644 # sorting is necessary to eliminate ambiguity for the "double move"
645 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
645 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
646 for path in sorted(interestingpaths):
646 for path in sorted(interestingpaths):
647 self.ui.debug('calculating fixups for %s\n' % path)
647 self.ui.debug('calculating fixups for %s\n' % path)
648 targetfctx = targetctx[path]
648 targetfctx = targetctx[path]
649 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
649 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
650 # ignore symbolic links or binary, or unchanged files
650 # ignore symbolic links or binary, or unchanged files
651 if any(f.islink() or stringutil.binary(f.data())
651 if any(f.islink() or stringutil.binary(f.data())
652 for f in [targetfctx] + fctxs
652 for f in [targetfctx] + fctxs
653 if not isinstance(f, emptyfilecontext)):
653 if not isinstance(f, emptyfilecontext)):
654 continue
654 continue
655 if targetfctx.data() == fctxs[-1].data() and not editopt:
655 if targetfctx.data() == fctxs[-1].data() and not editopt:
656 continue
656 continue
657 seenfctxs.update(fctxs[1:])
657 seenfctxs.update(fctxs[1:])
658 self.fctxmap[path] = ctx2fctx
658 self.fctxmap[path] = ctx2fctx
659 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
659 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
660 if fm is not None:
660 if fm is not None:
661 fm.startitem()
661 fm.startitem()
662 fm.plain('showing changes for ')
662 fm.plain('showing changes for ')
663 fm.write('path', '%s\n', path, label='absorb.path')
663 fm.write('path', '%s\n', path, label='absorb.path')
664 fm.data(linetype='path')
664 fm.data(linetype='path')
665 fstate.diffwith(targetfctx, fm)
665 fstate.diffwith(targetfctx, fm)
666 self.fixupmap[path] = fstate
666 self.fixupmap[path] = fstate
667 self.paths.append(path)
667 self.paths.append(path)
668 self.ctxaffected.update(fstate.ctxaffected)
668 self.ctxaffected.update(fstate.ctxaffected)
669
669
670 def apply(self):
670 def apply(self):
671 """apply fixups to individual filefixupstates"""
671 """apply fixups to individual filefixupstates"""
672 for path, state in self.fixupmap.iteritems():
672 for path, state in self.fixupmap.iteritems():
673 if self.ui.debugflag:
673 if self.ui.debugflag:
674 self.ui.write(_('applying fixups to %s\n') % path)
674 self.ui.write(_('applying fixups to %s\n') % path)
675 state.apply()
675 state.apply()
676
676
677 @property
677 @property
678 def chunkstats(self):
678 def chunkstats(self):
679 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
679 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
680 return dict((path, state.chunkstats)
680 return dict((path, state.chunkstats)
681 for path, state in self.fixupmap.iteritems())
681 for path, state in self.fixupmap.iteritems())
682
682
683 def commit(self):
683 def commit(self):
684 """commit changes. update self.finalnode, self.replacemap"""
684 """commit changes. update self.finalnode, self.replacemap"""
685 with self.repo.wlock(), self.repo.lock():
686 with self.repo.transaction('absorb') as tr:
685 with self.repo.transaction('absorb') as tr:
687 self._commitstack()
686 self._commitstack()
688 self._movebookmarks(tr)
687 self._movebookmarks(tr)
689 if self.repo['.'].node() in self.replacemap:
688 if self.repo['.'].node() in self.replacemap:
690 self._moveworkingdirectoryparent()
689 self._moveworkingdirectoryparent()
691 self._cleanupoldcommits()
690 self._cleanupoldcommits()
692 return self.finalnode
691 return self.finalnode
693
692
694 def printchunkstats(self):
693 def printchunkstats(self):
695 """print things like '1 of 2 chunk(s) applied'"""
694 """print things like '1 of 2 chunk(s) applied'"""
696 ui = self.ui
695 ui = self.ui
697 chunkstats = self.chunkstats
696 chunkstats = self.chunkstats
698 if ui.verbose:
697 if ui.verbose:
699 # chunkstats for each file
698 # chunkstats for each file
700 for path, stat in chunkstats.iteritems():
699 for path, stat in chunkstats.iteritems():
701 if stat[0]:
700 if stat[0]:
702 ui.write(_('%s: %d of %d chunk(s) applied\n')
701 ui.write(_('%s: %d of %d chunk(s) applied\n')
703 % (path, stat[0], stat[1]))
702 % (path, stat[0], stat[1]))
704 elif not ui.quiet:
703 elif not ui.quiet:
705 # a summary for all files
704 # a summary for all files
706 stats = chunkstats.values()
705 stats = chunkstats.values()
707 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
706 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
708 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
707 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
709
708
710 def _commitstack(self):
709 def _commitstack(self):
711 """make new commits. update self.finalnode, self.replacemap.
710 """make new commits. update self.finalnode, self.replacemap.
712 it is splitted from "commit" to avoid too much indentation.
711 it is splitted from "commit" to avoid too much indentation.
713 """
712 """
714 # last node (20-char) committed by us
713 # last node (20-char) committed by us
715 lastcommitted = None
714 lastcommitted = None
716 # p1 which overrides the parent of the next commit, "None" means use
715 # p1 which overrides the parent of the next commit, "None" means use
717 # the original parent unchanged
716 # the original parent unchanged
718 nextp1 = None
717 nextp1 = None
719 for ctx in self.stack:
718 for ctx in self.stack:
720 memworkingcopy = self._getnewfilecontents(ctx)
719 memworkingcopy = self._getnewfilecontents(ctx)
721 if not memworkingcopy and not lastcommitted:
720 if not memworkingcopy and not lastcommitted:
722 # nothing changed, nothing commited
721 # nothing changed, nothing commited
723 nextp1 = ctx
722 nextp1 = ctx
724 continue
723 continue
725 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
724 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
726 # changeset is no longer necessary
725 # changeset is no longer necessary
727 self.replacemap[ctx.node()] = None
726 self.replacemap[ctx.node()] = None
728 msg = _('became empty and was dropped')
727 msg = _('became empty and was dropped')
729 else:
728 else:
730 # changeset needs re-commit
729 # changeset needs re-commit
731 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
730 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
732 lastcommitted = self.repo[nodestr]
731 lastcommitted = self.repo[nodestr]
733 nextp1 = lastcommitted
732 nextp1 = lastcommitted
734 self.replacemap[ctx.node()] = lastcommitted.node()
733 self.replacemap[ctx.node()] = lastcommitted.node()
735 if memworkingcopy:
734 if memworkingcopy:
736 msg = _('%d file(s) changed, became %s') % (
735 msg = _('%d file(s) changed, became %s') % (
737 len(memworkingcopy), self._ctx2str(lastcommitted))
736 len(memworkingcopy), self._ctx2str(lastcommitted))
738 else:
737 else:
739 msg = _('became %s') % self._ctx2str(lastcommitted)
738 msg = _('became %s') % self._ctx2str(lastcommitted)
740 if self.ui.verbose and msg:
739 if self.ui.verbose and msg:
741 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
740 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
742 self.finalnode = lastcommitted and lastcommitted.node()
741 self.finalnode = lastcommitted and lastcommitted.node()
743
742
744 def _ctx2str(self, ctx):
743 def _ctx2str(self, ctx):
745 if self.ui.debugflag:
744 if self.ui.debugflag:
746 return '%d:%s' % (ctx.rev(), ctx.hex())
745 return '%d:%s' % (ctx.rev(), ctx.hex())
747 else:
746 else:
748 return '%d:%s' % (ctx.rev(), node.short(ctx.node()))
747 return '%d:%s' % (ctx.rev(), node.short(ctx.node()))
749
748
750 def _getnewfilecontents(self, ctx):
749 def _getnewfilecontents(self, ctx):
751 """(ctx) -> {path: str}
750 """(ctx) -> {path: str}
752
751
753 fetch file contents from filefixupstates.
752 fetch file contents from filefixupstates.
754 return the working copy overrides - files different from ctx.
753 return the working copy overrides - files different from ctx.
755 """
754 """
756 result = {}
755 result = {}
757 for path in self.paths:
756 for path in self.paths:
758 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
757 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
759 if ctx not in ctx2fctx:
758 if ctx not in ctx2fctx:
760 continue
759 continue
761 fctx = ctx2fctx[ctx]
760 fctx = ctx2fctx[ctx]
762 content = fctx.data()
761 content = fctx.data()
763 newcontent = self.fixupmap[path].getfinalcontent(fctx)
762 newcontent = self.fixupmap[path].getfinalcontent(fctx)
764 if content != newcontent:
763 if content != newcontent:
765 result[fctx.path()] = newcontent
764 result[fctx.path()] = newcontent
766 return result
765 return result
767
766
768 def _movebookmarks(self, tr):
767 def _movebookmarks(self, tr):
769 repo = self.repo
768 repo = self.repo
770 needupdate = [(name, self.replacemap[hsh])
769 needupdate = [(name, self.replacemap[hsh])
771 for name, hsh in repo._bookmarks.iteritems()
770 for name, hsh in repo._bookmarks.iteritems()
772 if hsh in self.replacemap]
771 if hsh in self.replacemap]
773 changes = []
772 changes = []
774 for name, hsh in needupdate:
773 for name, hsh in needupdate:
775 if hsh:
774 if hsh:
776 changes.append((name, hsh))
775 changes.append((name, hsh))
777 if self.ui.verbose:
776 if self.ui.verbose:
778 self.ui.write(_('moving bookmark %s to %s\n')
777 self.ui.write(_('moving bookmark %s to %s\n')
779 % (name, node.hex(hsh)))
778 % (name, node.hex(hsh)))
780 else:
779 else:
781 changes.append((name, None))
780 changes.append((name, None))
782 if self.ui.verbose:
781 if self.ui.verbose:
783 self.ui.write(_('deleting bookmark %s\n') % name)
782 self.ui.write(_('deleting bookmark %s\n') % name)
784 repo._bookmarks.applychanges(repo, tr, changes)
783 repo._bookmarks.applychanges(repo, tr, changes)
785
784
786 def _moveworkingdirectoryparent(self):
785 def _moveworkingdirectoryparent(self):
787 if not self.finalnode:
786 if not self.finalnode:
788 # Find the latest not-{obsoleted,stripped} parent.
787 # Find the latest not-{obsoleted,stripped} parent.
789 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
788 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
790 ctx = self.repo[revs.first()]
789 ctx = self.repo[revs.first()]
791 self.finalnode = ctx.node()
790 self.finalnode = ctx.node()
792 else:
791 else:
793 ctx = self.repo[self.finalnode]
792 ctx = self.repo[self.finalnode]
794
793
795 dirstate = self.repo.dirstate
794 dirstate = self.repo.dirstate
796 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
795 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
797 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
796 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
798 noop = lambda: 0
797 noop = lambda: 0
799 restore = noop
798 restore = noop
800 if util.safehasattr(dirstate, '_fsmonitorstate'):
799 if util.safehasattr(dirstate, '_fsmonitorstate'):
801 bak = dirstate._fsmonitorstate.invalidate
800 bak = dirstate._fsmonitorstate.invalidate
802 def restore():
801 def restore():
803 dirstate._fsmonitorstate.invalidate = bak
802 dirstate._fsmonitorstate.invalidate = bak
804 dirstate._fsmonitorstate.invalidate = noop
803 dirstate._fsmonitorstate.invalidate = noop
805 try:
804 try:
806 with dirstate.parentchange():
805 with dirstate.parentchange():
807 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
806 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
808 finally:
807 finally:
809 restore()
808 restore()
810
809
811 @staticmethod
810 @staticmethod
812 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
811 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
813 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
812 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
814
813
815 if it will become an empty commit (does not change anything, after the
814 if it will become an empty commit (does not change anything, after the
816 memworkingcopy overrides), return True. otherwise return False.
815 memworkingcopy overrides), return True. otherwise return False.
817 """
816 """
818 if not pctx:
817 if not pctx:
819 parents = ctx.parents()
818 parents = ctx.parents()
820 if len(parents) != 1:
819 if len(parents) != 1:
821 return False
820 return False
822 pctx = parents[0]
821 pctx = parents[0]
823 # ctx changes more files (not a subset of memworkingcopy)
822 # ctx changes more files (not a subset of memworkingcopy)
824 if not set(ctx.files()).issubset(set(memworkingcopy)):
823 if not set(ctx.files()).issubset(set(memworkingcopy)):
825 return False
824 return False
826 for path, content in memworkingcopy.iteritems():
825 for path, content in memworkingcopy.iteritems():
827 if path not in pctx or path not in ctx:
826 if path not in pctx or path not in ctx:
828 return False
827 return False
829 fctx = ctx[path]
828 fctx = ctx[path]
830 pfctx = pctx[path]
829 pfctx = pctx[path]
831 if pfctx.flags() != fctx.flags():
830 if pfctx.flags() != fctx.flags():
832 return False
831 return False
833 if pfctx.data() != content:
832 if pfctx.data() != content:
834 return False
833 return False
835 return True
834 return True
836
835
837 def _commitsingle(self, memworkingcopy, ctx, p1=None):
836 def _commitsingle(self, memworkingcopy, ctx, p1=None):
838 """(ctx, {path: content}, node) -> node. make a single commit
837 """(ctx, {path: content}, node) -> node. make a single commit
839
838
840 the commit is a clone from ctx, with a (optionally) different p1, and
839 the commit is a clone from ctx, with a (optionally) different p1, and
841 different file contents replaced by memworkingcopy.
840 different file contents replaced by memworkingcopy.
842 """
841 """
843 parents = p1 and (p1, node.nullid)
842 parents = p1 and (p1, node.nullid)
844 extra = ctx.extra()
843 extra = ctx.extra()
845 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
844 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
846 extra['absorb_source'] = ctx.hex()
845 extra['absorb_source'] = ctx.hex()
847 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
846 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
848 return mctx.commit()
847 return mctx.commit()
849
848
850 @util.propertycache
849 @util.propertycache
851 def _useobsolete(self):
850 def _useobsolete(self):
852 """() -> bool"""
851 """() -> bool"""
853 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
852 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
854
853
855 def _cleanupoldcommits(self):
854 def _cleanupoldcommits(self):
856 replacements = {k: ([v] if v is not None else [])
855 replacements = {k: ([v] if v is not None else [])
857 for k, v in self.replacemap.iteritems()}
856 for k, v in self.replacemap.iteritems()}
858 if replacements:
857 if replacements:
859 scmutil.cleanupnodes(self.repo, replacements, operation='absorb',
858 scmutil.cleanupnodes(self.repo, replacements, operation='absorb',
860 fixphase=True)
859 fixphase=True)
861
860
862 def _parsechunk(hunk):
861 def _parsechunk(hunk):
863 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
862 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
864 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
863 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
865 return None, None
864 return None, None
866 path = hunk.header.filename()
865 path = hunk.header.filename()
867 a1 = hunk.fromline + len(hunk.before) - 1
866 a1 = hunk.fromline + len(hunk.before) - 1
868 # remove before and after context
867 # remove before and after context
869 hunk.before = hunk.after = []
868 hunk.before = hunk.after = []
870 buf = util.stringio()
869 buf = util.stringio()
871 hunk.write(buf)
870 hunk.write(buf)
872 patchlines = mdiff.splitnewlines(buf.getvalue())
871 patchlines = mdiff.splitnewlines(buf.getvalue())
873 # hunk.prettystr() will update hunk.removed
872 # hunk.prettystr() will update hunk.removed
874 a2 = a1 + hunk.removed
873 a2 = a1 + hunk.removed
875 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
874 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
876 return path, (a1, a2, blines)
875 return path, (a1, a2, blines)
877
876
878 def overlaydiffcontext(ctx, chunks):
877 def overlaydiffcontext(ctx, chunks):
879 """(ctx, [crecord.uihunk]) -> memctx
878 """(ctx, [crecord.uihunk]) -> memctx
880
879
881 return a memctx with some [1] patches (chunks) applied to ctx.
880 return a memctx with some [1] patches (chunks) applied to ctx.
882 [1]: modifications are handled. renames, mode changes, etc. are ignored.
881 [1]: modifications are handled. renames, mode changes, etc. are ignored.
883 """
882 """
884 # sadly the applying-patch logic is hardly reusable, and messy:
883 # sadly the applying-patch logic is hardly reusable, and messy:
885 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
884 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
886 # needs a file stream of a patch and will re-parse it, while we have
885 # needs a file stream of a patch and will re-parse it, while we have
887 # structured hunk objects at hand.
886 # structured hunk objects at hand.
888 # 2. a lot of different implementations about "chunk" (patch.hunk,
887 # 2. a lot of different implementations about "chunk" (patch.hunk,
889 # patch.recordhunk, crecord.uihunk)
888 # patch.recordhunk, crecord.uihunk)
890 # as we only care about applying changes to modified files, no mode
889 # as we only care about applying changes to modified files, no mode
891 # change, no binary diff, and no renames, it's probably okay to
890 # change, no binary diff, and no renames, it's probably okay to
892 # re-invent the logic using much simpler code here.
891 # re-invent the logic using much simpler code here.
893 memworkingcopy = {} # {path: content}
892 memworkingcopy = {} # {path: content}
894 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
893 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
895 for path, info in map(_parsechunk, chunks):
894 for path, info in map(_parsechunk, chunks):
896 if not path or not info:
895 if not path or not info:
897 continue
896 continue
898 patchmap[path].append(info)
897 patchmap[path].append(info)
899 for path, patches in patchmap.iteritems():
898 for path, patches in patchmap.iteritems():
900 if path not in ctx or not patches:
899 if path not in ctx or not patches:
901 continue
900 continue
902 patches.sort(reverse=True)
901 patches.sort(reverse=True)
903 lines = mdiff.splitnewlines(ctx[path].data())
902 lines = mdiff.splitnewlines(ctx[path].data())
904 for a1, a2, blines in patches:
903 for a1, a2, blines in patches:
905 lines[a1:a2] = blines
904 lines[a1:a2] = blines
906 memworkingcopy[path] = ''.join(lines)
905 memworkingcopy[path] = ''.join(lines)
907 return overlaycontext(memworkingcopy, ctx)
906 return overlaycontext(memworkingcopy, ctx)
908
907
909 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
908 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
910 """pick fixup chunks from targetctx, apply them to stack.
909 """pick fixup chunks from targetctx, apply them to stack.
911
910
912 if targetctx is None, the working copy context will be used.
911 if targetctx is None, the working copy context will be used.
913 if stack is None, the current draft stack will be used.
912 if stack is None, the current draft stack will be used.
914 return fixupstate.
913 return fixupstate.
915 """
914 """
916 if stack is None:
915 if stack is None:
917 limit = ui.configint('absorb', 'max-stack-size')
916 limit = ui.configint('absorb', 'max-stack-size')
918 stack = getdraftstack(repo['.'], limit)
917 stack = getdraftstack(repo['.'], limit)
919 if limit and len(stack) >= limit:
918 if limit and len(stack) >= limit:
920 ui.warn(_('absorb: only the recent %d changesets will '
919 ui.warn(_('absorb: only the recent %d changesets will '
921 'be analysed\n')
920 'be analysed\n')
922 % limit)
921 % limit)
923 if not stack:
922 if not stack:
924 raise error.Abort(_('no mutable changeset to change'))
923 raise error.Abort(_('no mutable changeset to change'))
925 if targetctx is None: # default to working copy
924 if targetctx is None: # default to working copy
926 targetctx = repo[None]
925 targetctx = repo[None]
927 if pats is None:
926 if pats is None:
928 pats = ()
927 pats = ()
929 if opts is None:
928 if opts is None:
930 opts = {}
929 opts = {}
931 state = fixupstate(stack, ui=ui, opts=opts)
930 state = fixupstate(stack, ui=ui, opts=opts)
932 matcher = scmutil.match(targetctx, pats, opts)
931 matcher = scmutil.match(targetctx, pats, opts)
933 if opts.get('interactive'):
932 if opts.get('interactive'):
934 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
933 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
935 origchunks = patch.parsepatch(diff)
934 origchunks = patch.parsepatch(diff)
936 chunks = cmdutil.recordfilter(ui, origchunks)[0]
935 chunks = cmdutil.recordfilter(ui, origchunks)[0]
937 targetctx = overlaydiffcontext(stack[-1], chunks)
936 targetctx = overlaydiffcontext(stack[-1], chunks)
938 fm = None
937 fm = None
939 if opts.get('print_changes') or not opts.get('apply_changes'):
938 if opts.get('print_changes') or not opts.get('apply_changes'):
940 fm = ui.formatter('absorb', opts)
939 fm = ui.formatter('absorb', opts)
941 state.diffwith(targetctx, matcher, fm)
940 state.diffwith(targetctx, matcher, fm)
942 if fm is not None:
941 if fm is not None:
943 fm.startitem()
942 fm.startitem()
944 fm.write("count", "\n%d changesets affected\n", len(state.ctxaffected))
943 fm.write("count", "\n%d changesets affected\n", len(state.ctxaffected))
945 fm.data(linetype='summary')
944 fm.data(linetype='summary')
946 for ctx in reversed(stack):
945 for ctx in reversed(stack):
947 if ctx not in state.ctxaffected:
946 if ctx not in state.ctxaffected:
948 continue
947 continue
949 fm.startitem()
948 fm.startitem()
950 fm.context(ctx=ctx)
949 fm.context(ctx=ctx)
951 fm.data(linetype='changeset')
950 fm.data(linetype='changeset')
952 fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
951 fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
953 descfirstline = ctx.description().splitlines()[0]
952 descfirstline = ctx.description().splitlines()[0]
954 fm.write('descfirstline', '%s\n', descfirstline,
953 fm.write('descfirstline', '%s\n', descfirstline,
955 label='absorb.description')
954 label='absorb.description')
956 fm.end()
955 fm.end()
957 if not opts.get('dry_run'):
956 if not opts.get('dry_run'):
958 if (not opts.get('apply_changes') and
957 if (not opts.get('apply_changes') and
959 state.ctxaffected and
958 state.ctxaffected and
960 ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1)):
959 ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1)):
961 raise error.Abort(_('absorb cancelled\n'))
960 raise error.Abort(_('absorb cancelled\n'))
962
961
963 state.apply()
962 state.apply()
964 if state.commit():
963 if state.commit():
965 state.printchunkstats()
964 state.printchunkstats()
966 elif not ui.quiet:
965 elif not ui.quiet:
967 ui.write(_('nothing applied\n'))
966 ui.write(_('nothing applied\n'))
968 return state
967 return state
969
968
970 @command('absorb',
969 @command('absorb',
971 [('a', 'apply-changes', None,
970 [('a', 'apply-changes', None,
972 _('apply changes without prompting for confirmation')),
971 _('apply changes without prompting for confirmation')),
973 ('p', 'print-changes', None,
972 ('p', 'print-changes', None,
974 _('always print which changesets are modified by which changes')),
973 _('always print which changesets are modified by which changes')),
975 ('i', 'interactive', None,
974 ('i', 'interactive', None,
976 _('interactively select which chunks to apply (EXPERIMENTAL)')),
975 _('interactively select which chunks to apply (EXPERIMENTAL)')),
977 ('e', 'edit-lines', None,
976 ('e', 'edit-lines', None,
978 _('edit what lines belong to which changesets before commit '
977 _('edit what lines belong to which changesets before commit '
979 '(EXPERIMENTAL)')),
978 '(EXPERIMENTAL)')),
980 ] + commands.dryrunopts + commands.templateopts + commands.walkopts,
979 ] + commands.dryrunopts + commands.templateopts + commands.walkopts,
981 _('hg absorb [OPTION] [FILE]...'),
980 _('hg absorb [OPTION] [FILE]...'),
982 helpcategory=command.CATEGORY_COMMITTING,
981 helpcategory=command.CATEGORY_COMMITTING,
983 helpbasic=True)
982 helpbasic=True)
984 def absorbcmd(ui, repo, *pats, **opts):
983 def absorbcmd(ui, repo, *pats, **opts):
985 """incorporate corrections into the stack of draft changesets
984 """incorporate corrections into the stack of draft changesets
986
985
987 absorb analyzes each change in your working directory and attempts to
986 absorb analyzes each change in your working directory and attempts to
988 amend the changed lines into the changesets in your stack that first
987 amend the changed lines into the changesets in your stack that first
989 introduced those lines.
988 introduced those lines.
990
989
991 If absorb cannot find an unambiguous changeset to amend for a change,
990 If absorb cannot find an unambiguous changeset to amend for a change,
992 that change will be left in the working directory, untouched. They can be
991 that change will be left in the working directory, untouched. They can be
993 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
992 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
994 absorb does not write to the working directory.
993 absorb does not write to the working directory.
995
994
996 Changesets outside the revset `::. and not public() and not merge()` will
995 Changesets outside the revset `::. and not public() and not merge()` will
997 not be changed.
996 not be changed.
998
997
999 Changesets that become empty after applying the changes will be deleted.
998 Changesets that become empty after applying the changes will be deleted.
1000
999
1001 By default, absorb will show what it plans to do and prompt for
1000 By default, absorb will show what it plans to do and prompt for
1002 confirmation. If you are confident that the changes will be absorbed
1001 confirmation. If you are confident that the changes will be absorbed
1003 to the correct place, run :hg:`absorb -a` to apply the changes
1002 to the correct place, run :hg:`absorb -a` to apply the changes
1004 immediately.
1003 immediately.
1005
1004
1006 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1005 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1007 """
1006 """
1008 opts = pycompat.byteskwargs(opts)
1007 opts = pycompat.byteskwargs(opts)
1008
1009 with repo.wlock(), repo.lock():
1010 if not opts['dry_run']:
1011 cmdutil.checkunfinished(repo)
1012
1009 state = absorb(ui, repo, pats=pats, opts=opts)
1013 state = absorb(ui, repo, pats=pats, opts=opts)
1010 if sum(s[0] for s in state.chunkstats.values()) == 0:
1014 if sum(s[0] for s in state.chunkstats.values()) == 0:
1011 return 1
1015 return 1
General Comments 0
You need to be logged in to leave comments. Login now