##// END OF EJS Templates
py3: use .startswith() instead of bytes[0]...
Pulkit Goyal -
r42635:c1bf63ac default
parent child Browse files
Show More
@@ -1,1018 +1,1018
1 # absorb.py
1 # absorb.py
2 #
2 #
3 # Copyright 2016 Facebook, Inc.
3 # Copyright 2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """apply working directory changes to changesets (EXPERIMENTAL)
8 """apply working directory changes to changesets (EXPERIMENTAL)
9
9
10 The absorb extension provides a command to use annotate information to
10 The absorb extension provides a command to use annotate information to
11 amend modified chunks into the corresponding non-public changesets.
11 amend modified chunks into the corresponding non-public changesets.
12
12
13 ::
13 ::
14
14
15 [absorb]
15 [absorb]
16 # only check 50 recent non-public changesets at most
16 # only check 50 recent non-public changesets at most
17 max-stack-size = 50
17 max-stack-size = 50
18 # whether to add noise to new commits to avoid obsolescence cycle
18 # whether to add noise to new commits to avoid obsolescence cycle
19 add-noise = 1
19 add-noise = 1
20 # make `amend --correlated` a shortcut to the main command
20 # make `amend --correlated` a shortcut to the main command
21 amend-flag = correlated
21 amend-flag = correlated
22
22
23 [color]
23 [color]
24 absorb.description = yellow
24 absorb.description = yellow
25 absorb.node = blue bold
25 absorb.node = blue bold
26 absorb.path = bold
26 absorb.path = bold
27 """
27 """
28
28
29 # TODO:
29 # TODO:
30 # * Rename config items to [commands] namespace
30 # * Rename config items to [commands] namespace
31 # * Converge getdraftstack() with other code in core
31 # * Converge getdraftstack() with other code in core
32 # * move many attributes on fixupstate to be private
32 # * move many attributes on fixupstate to be private
33
33
34 from __future__ import absolute_import
34 from __future__ import absolute_import
35
35
36 import collections
36 import collections
37
37
38 from mercurial.i18n import _
38 from mercurial.i18n import _
39 from mercurial import (
39 from mercurial import (
40 cmdutil,
40 cmdutil,
41 commands,
41 commands,
42 context,
42 context,
43 crecord,
43 crecord,
44 error,
44 error,
45 linelog,
45 linelog,
46 mdiff,
46 mdiff,
47 node,
47 node,
48 obsolete,
48 obsolete,
49 patch,
49 patch,
50 phases,
50 phases,
51 pycompat,
51 pycompat,
52 registrar,
52 registrar,
53 scmutil,
53 scmutil,
54 util,
54 util,
55 )
55 )
56 from mercurial.utils import (
56 from mercurial.utils import (
57 stringutil,
57 stringutil,
58 )
58 )
59
59
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # be specifying the version(s) of Mercurial they are tested with, or
62 # be specifying the version(s) of Mercurial they are tested with, or
63 # leave the attribute unspecified.
63 # leave the attribute unspecified.
64 testedwith = 'ships-with-hg-core'
64 testedwith = 'ships-with-hg-core'
65
65
66 cmdtable = {}
66 cmdtable = {}
67 command = registrar.command(cmdtable)
67 command = registrar.command(cmdtable)
68
68
69 configtable = {}
69 configtable = {}
70 configitem = registrar.configitem(configtable)
70 configitem = registrar.configitem(configtable)
71
71
72 configitem('absorb', 'add-noise', default=True)
72 configitem('absorb', 'add-noise', default=True)
73 configitem('absorb', 'amend-flag', default=None)
73 configitem('absorb', 'amend-flag', default=None)
74 configitem('absorb', 'max-stack-size', default=50)
74 configitem('absorb', 'max-stack-size', default=50)
75
75
76 colortable = {
76 colortable = {
77 'absorb.description': 'yellow',
77 'absorb.description': 'yellow',
78 'absorb.node': 'blue bold',
78 'absorb.node': 'blue bold',
79 'absorb.path': 'bold',
79 'absorb.path': 'bold',
80 }
80 }
81
81
82 defaultdict = collections.defaultdict
82 defaultdict = collections.defaultdict
83
83
84 class nullui(object):
84 class nullui(object):
85 """blank ui object doing nothing"""
85 """blank ui object doing nothing"""
86 debugflag = False
86 debugflag = False
87 verbose = False
87 verbose = False
88 quiet = True
88 quiet = True
89
89
90 def __getitem__(name):
90 def __getitem__(name):
91 def nullfunc(*args, **kwds):
91 def nullfunc(*args, **kwds):
92 return
92 return
93 return nullfunc
93 return nullfunc
94
94
95 class emptyfilecontext(object):
95 class emptyfilecontext(object):
96 """minimal filecontext representing an empty file"""
96 """minimal filecontext representing an empty file"""
97 def data(self):
97 def data(self):
98 return ''
98 return ''
99
99
100 def node(self):
100 def node(self):
101 return node.nullid
101 return node.nullid
102
102
103 def uniq(lst):
103 def uniq(lst):
104 """list -> list. remove duplicated items without changing the order"""
104 """list -> list. remove duplicated items without changing the order"""
105 seen = set()
105 seen = set()
106 result = []
106 result = []
107 for x in lst:
107 for x in lst:
108 if x not in seen:
108 if x not in seen:
109 seen.add(x)
109 seen.add(x)
110 result.append(x)
110 result.append(x)
111 return result
111 return result
112
112
113 def getdraftstack(headctx, limit=None):
113 def getdraftstack(headctx, limit=None):
114 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
114 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
115
115
116 changesets are sorted in topo order, oldest first.
116 changesets are sorted in topo order, oldest first.
117 return at most limit items, if limit is a positive number.
117 return at most limit items, if limit is a positive number.
118
118
119 merges are considered as non-draft as well. i.e. every commit
119 merges are considered as non-draft as well. i.e. every commit
120 returned has and only has 1 parent.
120 returned has and only has 1 parent.
121 """
121 """
122 ctx = headctx
122 ctx = headctx
123 result = []
123 result = []
124 while ctx.phase() != phases.public:
124 while ctx.phase() != phases.public:
125 if limit and len(result) >= limit:
125 if limit and len(result) >= limit:
126 break
126 break
127 parents = ctx.parents()
127 parents = ctx.parents()
128 if len(parents) != 1:
128 if len(parents) != 1:
129 break
129 break
130 result.append(ctx)
130 result.append(ctx)
131 ctx = parents[0]
131 ctx = parents[0]
132 result.reverse()
132 result.reverse()
133 return result
133 return result
134
134
135 def getfilestack(stack, path, seenfctxs=None):
135 def getfilestack(stack, path, seenfctxs=None):
136 """([ctx], str, set) -> [fctx], {ctx: fctx}
136 """([ctx], str, set) -> [fctx], {ctx: fctx}
137
137
138 stack is a list of contexts, from old to new. usually they are what
138 stack is a list of contexts, from old to new. usually they are what
139 "getdraftstack" returns.
139 "getdraftstack" returns.
140
140
141 follows renames, but not copies.
141 follows renames, but not copies.
142
142
143 seenfctxs is a set of filecontexts that will be considered "immutable".
143 seenfctxs is a set of filecontexts that will be considered "immutable".
144 they are usually what this function returned in earlier calls, useful
144 they are usually what this function returned in earlier calls, useful
145 to avoid issues that a file was "moved" to multiple places and was then
145 to avoid issues that a file was "moved" to multiple places and was then
146 modified differently, like: "a" was copied to "b", "a" was also copied to
146 modified differently, like: "a" was copied to "b", "a" was also copied to
147 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
147 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
148 and we enforce only one of them to be able to affect "a"'s content.
148 and we enforce only one of them to be able to affect "a"'s content.
149
149
150 return an empty list and an empty dict, if the specified path does not
150 return an empty list and an empty dict, if the specified path does not
151 exist in stack[-1] (the top of the stack).
151 exist in stack[-1] (the top of the stack).
152
152
153 otherwise, return a list of de-duplicated filecontexts, and the map to
153 otherwise, return a list of de-duplicated filecontexts, and the map to
154 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
154 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
155 of the list would be outside the stack and should be considered immutable.
155 of the list would be outside the stack and should be considered immutable.
156 the remaining items are within the stack.
156 the remaining items are within the stack.
157
157
158 for example, given the following changelog and corresponding filelog
158 for example, given the following changelog and corresponding filelog
159 revisions:
159 revisions:
160
160
161 changelog: 3----4----5----6----7
161 changelog: 3----4----5----6----7
162 filelog: x 0----1----1----2 (x: no such file yet)
162 filelog: x 0----1----1----2 (x: no such file yet)
163
163
164 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
164 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
165 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
165 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
166 dummy empty filecontext.
166 dummy empty filecontext.
167 - if stack = [2], returns ([], {})
167 - if stack = [2], returns ([], {})
168 - if stack = [7], returns ([1, 2], {7: 2})
168 - if stack = [7], returns ([1, 2], {7: 2})
169 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
169 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
170 removed, since 1 is immutable.
170 removed, since 1 is immutable.
171 """
171 """
172 if seenfctxs is None:
172 if seenfctxs is None:
173 seenfctxs = set()
173 seenfctxs = set()
174 assert stack
174 assert stack
175
175
176 if path not in stack[-1]:
176 if path not in stack[-1]:
177 return [], {}
177 return [], {}
178
178
179 fctxs = []
179 fctxs = []
180 fctxmap = {}
180 fctxmap = {}
181
181
182 pctx = stack[0].p1() # the public (immutable) ctx we stop at
182 pctx = stack[0].p1() # the public (immutable) ctx we stop at
183 for ctx in reversed(stack):
183 for ctx in reversed(stack):
184 if path not in ctx: # the file is added in the next commit
184 if path not in ctx: # the file is added in the next commit
185 pctx = ctx
185 pctx = ctx
186 break
186 break
187 fctx = ctx[path]
187 fctx = ctx[path]
188 fctxs.append(fctx)
188 fctxs.append(fctx)
189 if fctx in seenfctxs: # treat fctx as the immutable one
189 if fctx in seenfctxs: # treat fctx as the immutable one
190 pctx = None # do not add another immutable fctx
190 pctx = None # do not add another immutable fctx
191 break
191 break
192 fctxmap[ctx] = fctx # only for mutable fctxs
192 fctxmap[ctx] = fctx # only for mutable fctxs
193 copy = fctx.copysource()
193 copy = fctx.copysource()
194 if copy:
194 if copy:
195 path = copy # follow rename
195 path = copy # follow rename
196 if path in ctx: # but do not follow copy
196 if path in ctx: # but do not follow copy
197 pctx = ctx.p1()
197 pctx = ctx.p1()
198 break
198 break
199
199
200 if pctx is not None: # need an extra immutable fctx
200 if pctx is not None: # need an extra immutable fctx
201 if path in pctx:
201 if path in pctx:
202 fctxs.append(pctx[path])
202 fctxs.append(pctx[path])
203 else:
203 else:
204 fctxs.append(emptyfilecontext())
204 fctxs.append(emptyfilecontext())
205
205
206 fctxs.reverse()
206 fctxs.reverse()
207 # note: we rely on a property of hg: filerev is not reused for linear
207 # note: we rely on a property of hg: filerev is not reused for linear
208 # history. i.e. it's impossible to have:
208 # history. i.e. it's impossible to have:
209 # changelog: 4----5----6 (linear, no merges)
209 # changelog: 4----5----6 (linear, no merges)
210 # filelog: 1----2----1
210 # filelog: 1----2----1
211 # ^ reuse filerev (impossible)
211 # ^ reuse filerev (impossible)
212 # because parents are part of the hash. if that's not true, we need to
212 # because parents are part of the hash. if that's not true, we need to
213 # remove uniq and find a different way to identify fctxs.
213 # remove uniq and find a different way to identify fctxs.
214 return uniq(fctxs), fctxmap
214 return uniq(fctxs), fctxmap
215
215
216 class overlaystore(patch.filestore):
216 class overlaystore(patch.filestore):
217 """read-only, hybrid store based on a dict and ctx.
217 """read-only, hybrid store based on a dict and ctx.
218 memworkingcopy: {path: content}, overrides file contents.
218 memworkingcopy: {path: content}, overrides file contents.
219 """
219 """
220 def __init__(self, basectx, memworkingcopy):
220 def __init__(self, basectx, memworkingcopy):
221 self.basectx = basectx
221 self.basectx = basectx
222 self.memworkingcopy = memworkingcopy
222 self.memworkingcopy = memworkingcopy
223
223
224 def getfile(self, path):
224 def getfile(self, path):
225 """comply with mercurial.patch.filestore.getfile"""
225 """comply with mercurial.patch.filestore.getfile"""
226 if path not in self.basectx:
226 if path not in self.basectx:
227 return None, None, None
227 return None, None, None
228 fctx = self.basectx[path]
228 fctx = self.basectx[path]
229 if path in self.memworkingcopy:
229 if path in self.memworkingcopy:
230 content = self.memworkingcopy[path]
230 content = self.memworkingcopy[path]
231 else:
231 else:
232 content = fctx.data()
232 content = fctx.data()
233 mode = (fctx.islink(), fctx.isexec())
233 mode = (fctx.islink(), fctx.isexec())
234 copy = fctx.copysource()
234 copy = fctx.copysource()
235 return content, mode, copy
235 return content, mode, copy
236
236
237 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
237 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
238 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
238 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
239 memworkingcopy overrides file contents.
239 memworkingcopy overrides file contents.
240 """
240 """
241 # parents must contain 2 items: (node1, node2)
241 # parents must contain 2 items: (node1, node2)
242 if parents is None:
242 if parents is None:
243 parents = ctx.repo().changelog.parents(ctx.node())
243 parents = ctx.repo().changelog.parents(ctx.node())
244 if extra is None:
244 if extra is None:
245 extra = ctx.extra()
245 extra = ctx.extra()
246 date = ctx.date()
246 date = ctx.date()
247 desc = ctx.description()
247 desc = ctx.description()
248 user = ctx.user()
248 user = ctx.user()
249 files = set(ctx.files()).union(memworkingcopy)
249 files = set(ctx.files()).union(memworkingcopy)
250 store = overlaystore(ctx, memworkingcopy)
250 store = overlaystore(ctx, memworkingcopy)
251 return context.memctx(
251 return context.memctx(
252 repo=ctx.repo(), parents=parents, text=desc,
252 repo=ctx.repo(), parents=parents, text=desc,
253 files=files, filectxfn=store, user=user, date=date,
253 files=files, filectxfn=store, user=user, date=date,
254 branch=None, extra=extra)
254 branch=None, extra=extra)
255
255
256 class filefixupstate(object):
256 class filefixupstate(object):
257 """state needed to apply fixups to a single file
257 """state needed to apply fixups to a single file
258
258
259 internally, it keeps file contents of several revisions and a linelog.
259 internally, it keeps file contents of several revisions and a linelog.
260
260
261 the linelog uses odd revision numbers for original contents (fctxs passed
261 the linelog uses odd revision numbers for original contents (fctxs passed
262 to __init__), and even revision numbers for fixups, like:
262 to __init__), and even revision numbers for fixups, like:
263
263
264 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
264 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
265 linelog rev 2: fixups made to self.fctxs[0]
265 linelog rev 2: fixups made to self.fctxs[0]
266 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
266 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
267 linelog rev 4: fixups made to self.fctxs[1]
267 linelog rev 4: fixups made to self.fctxs[1]
268 ...
268 ...
269
269
270 a typical use is like:
270 a typical use is like:
271
271
272 1. call diffwith, to calculate self.fixups
272 1. call diffwith, to calculate self.fixups
273 2. (optionally), present self.fixups to the user, or change it
273 2. (optionally), present self.fixups to the user, or change it
274 3. call apply, to apply changes
274 3. call apply, to apply changes
275 4. read results from "finalcontents", or call getfinalcontent
275 4. read results from "finalcontents", or call getfinalcontent
276 """
276 """
277
277
278 def __init__(self, fctxs, path, ui=None, opts=None):
278 def __init__(self, fctxs, path, ui=None, opts=None):
279 """([fctx], ui or None) -> None
279 """([fctx], ui or None) -> None
280
280
281 fctxs should be linear, and sorted by topo order - oldest first.
281 fctxs should be linear, and sorted by topo order - oldest first.
282 fctxs[0] will be considered as "immutable" and will not be changed.
282 fctxs[0] will be considered as "immutable" and will not be changed.
283 """
283 """
284 self.fctxs = fctxs
284 self.fctxs = fctxs
285 self.path = path
285 self.path = path
286 self.ui = ui or nullui()
286 self.ui = ui or nullui()
287 self.opts = opts or {}
287 self.opts = opts or {}
288
288
289 # following fields are built from fctxs. they exist for perf reason
289 # following fields are built from fctxs. they exist for perf reason
290 self.contents = [f.data() for f in fctxs]
290 self.contents = [f.data() for f in fctxs]
291 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
291 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
292 self.linelog = self._buildlinelog()
292 self.linelog = self._buildlinelog()
293 if self.ui.debugflag:
293 if self.ui.debugflag:
294 assert self._checkoutlinelog() == self.contents
294 assert self._checkoutlinelog() == self.contents
295
295
296 # following fields will be filled later
296 # following fields will be filled later
297 self.chunkstats = [0, 0] # [adopted, total : int]
297 self.chunkstats = [0, 0] # [adopted, total : int]
298 self.targetlines = [] # [str]
298 self.targetlines = [] # [str]
299 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
299 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
300 self.finalcontents = [] # [str]
300 self.finalcontents = [] # [str]
301 self.ctxaffected = set()
301 self.ctxaffected = set()
302
302
303 def diffwith(self, targetfctx, fm=None):
303 def diffwith(self, targetfctx, fm=None):
304 """calculate fixups needed by examining the differences between
304 """calculate fixups needed by examining the differences between
305 self.fctxs[-1] and targetfctx, chunk by chunk.
305 self.fctxs[-1] and targetfctx, chunk by chunk.
306
306
307 targetfctx is the target state we move towards. we may or may not be
307 targetfctx is the target state we move towards. we may or may not be
308 able to get there because not all modified chunks can be amended into
308 able to get there because not all modified chunks can be amended into
309 a non-public fctx unambiguously.
309 a non-public fctx unambiguously.
310
310
311 call this only once, before apply().
311 call this only once, before apply().
312
312
313 update self.fixups, self.chunkstats, and self.targetlines.
313 update self.fixups, self.chunkstats, and self.targetlines.
314 """
314 """
315 a = self.contents[-1]
315 a = self.contents[-1]
316 alines = self.contentlines[-1]
316 alines = self.contentlines[-1]
317 b = targetfctx.data()
317 b = targetfctx.data()
318 blines = mdiff.splitnewlines(b)
318 blines = mdiff.splitnewlines(b)
319 self.targetlines = blines
319 self.targetlines = blines
320
320
321 self.linelog.annotate(self.linelog.maxrev)
321 self.linelog.annotate(self.linelog.maxrev)
322 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
322 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
323 assert len(annotated) == len(alines)
323 assert len(annotated) == len(alines)
324 # add a dummy end line to make insertion at the end easier
324 # add a dummy end line to make insertion at the end easier
325 if annotated:
325 if annotated:
326 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
326 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
327 annotated.append(dummyendline)
327 annotated.append(dummyendline)
328
328
329 # analyse diff blocks
329 # analyse diff blocks
330 for chunk in self._alldiffchunks(a, b, alines, blines):
330 for chunk in self._alldiffchunks(a, b, alines, blines):
331 newfixups = self._analysediffchunk(chunk, annotated)
331 newfixups = self._analysediffchunk(chunk, annotated)
332 self.chunkstats[0] += bool(newfixups) # 1 or 0
332 self.chunkstats[0] += bool(newfixups) # 1 or 0
333 self.chunkstats[1] += 1
333 self.chunkstats[1] += 1
334 self.fixups += newfixups
334 self.fixups += newfixups
335 if fm is not None:
335 if fm is not None:
336 self._showchanges(fm, alines, blines, chunk, newfixups)
336 self._showchanges(fm, alines, blines, chunk, newfixups)
337
337
338 def apply(self):
338 def apply(self):
339 """apply self.fixups. update self.linelog, self.finalcontents.
339 """apply self.fixups. update self.linelog, self.finalcontents.
340
340
341 call this only once, before getfinalcontent(), after diffwith().
341 call this only once, before getfinalcontent(), after diffwith().
342 """
342 """
343 # the following is unnecessary, as it's done by "diffwith":
343 # the following is unnecessary, as it's done by "diffwith":
344 # self.linelog.annotate(self.linelog.maxrev)
344 # self.linelog.annotate(self.linelog.maxrev)
345 for rev, a1, a2, b1, b2 in reversed(self.fixups):
345 for rev, a1, a2, b1, b2 in reversed(self.fixups):
346 blines = self.targetlines[b1:b2]
346 blines = self.targetlines[b1:b2]
347 if self.ui.debugflag:
347 if self.ui.debugflag:
348 idx = (max(rev - 1, 0)) // 2
348 idx = (max(rev - 1, 0)) // 2
349 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
349 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
350 % (node.short(self.fctxs[idx].node()),
350 % (node.short(self.fctxs[idx].node()),
351 a1, a2, len(blines)))
351 a1, a2, len(blines)))
352 self.linelog.replacelines(rev, a1, a2, b1, b2)
352 self.linelog.replacelines(rev, a1, a2, b1, b2)
353 if self.opts.get('edit_lines', False):
353 if self.opts.get('edit_lines', False):
354 self.finalcontents = self._checkoutlinelogwithedits()
354 self.finalcontents = self._checkoutlinelogwithedits()
355 else:
355 else:
356 self.finalcontents = self._checkoutlinelog()
356 self.finalcontents = self._checkoutlinelog()
357
357
358 def getfinalcontent(self, fctx):
358 def getfinalcontent(self, fctx):
359 """(fctx) -> str. get modified file content for a given filecontext"""
359 """(fctx) -> str. get modified file content for a given filecontext"""
360 idx = self.fctxs.index(fctx)
360 idx = self.fctxs.index(fctx)
361 return self.finalcontents[idx]
361 return self.finalcontents[idx]
362
362
363 def _analysediffchunk(self, chunk, annotated):
363 def _analysediffchunk(self, chunk, annotated):
364 """analyse a different chunk and return new fixups found
364 """analyse a different chunk and return new fixups found
365
365
366 return [] if no lines from the chunk can be safely applied.
366 return [] if no lines from the chunk can be safely applied.
367
367
368 the chunk (or lines) cannot be safely applied, if, for example:
368 the chunk (or lines) cannot be safely applied, if, for example:
369 - the modified (deleted) lines belong to a public changeset
369 - the modified (deleted) lines belong to a public changeset
370 (self.fctxs[0])
370 (self.fctxs[0])
371 - the chunk is a pure insertion and the adjacent lines (at most 2
371 - the chunk is a pure insertion and the adjacent lines (at most 2
372 lines) belong to different non-public changesets, or do not belong
372 lines) belong to different non-public changesets, or do not belong
373 to any non-public changesets.
373 to any non-public changesets.
374 - the chunk is modifying lines from different changesets.
374 - the chunk is modifying lines from different changesets.
375 in this case, if the number of lines deleted equals to the number
375 in this case, if the number of lines deleted equals to the number
376 of lines added, assume it's a simple 1:1 map (could be wrong).
376 of lines added, assume it's a simple 1:1 map (could be wrong).
377 otherwise, give up.
377 otherwise, give up.
378 - the chunk is modifying lines from a single non-public changeset,
378 - the chunk is modifying lines from a single non-public changeset,
379 but other revisions touch the area as well. i.e. the lines are
379 but other revisions touch the area as well. i.e. the lines are
380 not continuous as seen from the linelog.
380 not continuous as seen from the linelog.
381 """
381 """
382 a1, a2, b1, b2 = chunk
382 a1, a2, b1, b2 = chunk
383 # find involved indexes from annotate result
383 # find involved indexes from annotate result
384 involved = annotated[a1:a2]
384 involved = annotated[a1:a2]
385 if not involved and annotated: # a1 == a2 and a is not empty
385 if not involved and annotated: # a1 == a2 and a is not empty
386 # pure insertion, check nearby lines. ignore lines belong
386 # pure insertion, check nearby lines. ignore lines belong
387 # to the public (first) changeset (i.e. annotated[i][0] == 1)
387 # to the public (first) changeset (i.e. annotated[i][0] == 1)
388 nearbylinenums = {a2, max(0, a1 - 1)}
388 nearbylinenums = {a2, max(0, a1 - 1)}
389 involved = [annotated[i]
389 involved = [annotated[i]
390 for i in nearbylinenums if annotated[i][0] != 1]
390 for i in nearbylinenums if annotated[i][0] != 1]
391 involvedrevs = list(set(r for r, l in involved))
391 involvedrevs = list(set(r for r, l in involved))
392 newfixups = []
392 newfixups = []
393 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
393 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
394 # chunk belongs to a single revision
394 # chunk belongs to a single revision
395 rev = involvedrevs[0]
395 rev = involvedrevs[0]
396 if rev > 1:
396 if rev > 1:
397 fixuprev = rev + 1
397 fixuprev = rev + 1
398 newfixups.append((fixuprev, a1, a2, b1, b2))
398 newfixups.append((fixuprev, a1, a2, b1, b2))
399 elif a2 - a1 == b2 - b1 or b1 == b2:
399 elif a2 - a1 == b2 - b1 or b1 == b2:
400 # 1:1 line mapping, or chunk was deleted
400 # 1:1 line mapping, or chunk was deleted
401 for i in pycompat.xrange(a1, a2):
401 for i in pycompat.xrange(a1, a2):
402 rev, linenum = annotated[i]
402 rev, linenum = annotated[i]
403 if rev > 1:
403 if rev > 1:
404 if b1 == b2: # deletion, simply remove that single line
404 if b1 == b2: # deletion, simply remove that single line
405 nb1 = nb2 = 0
405 nb1 = nb2 = 0
406 else: # 1:1 line mapping, change the corresponding rev
406 else: # 1:1 line mapping, change the corresponding rev
407 nb1 = b1 + i - a1
407 nb1 = b1 + i - a1
408 nb2 = nb1 + 1
408 nb2 = nb1 + 1
409 fixuprev = rev + 1
409 fixuprev = rev + 1
410 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
410 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
411 return self._optimizefixups(newfixups)
411 return self._optimizefixups(newfixups)
412
412
413 @staticmethod
413 @staticmethod
414 def _alldiffchunks(a, b, alines, blines):
414 def _alldiffchunks(a, b, alines, blines):
415 """like mdiff.allblocks, but only care about differences"""
415 """like mdiff.allblocks, but only care about differences"""
416 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
416 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
417 for chunk, btype in blocks:
417 for chunk, btype in blocks:
418 if btype != '!':
418 if btype != '!':
419 continue
419 continue
420 yield chunk
420 yield chunk
421
421
422 def _buildlinelog(self):
422 def _buildlinelog(self):
423 """calculate the initial linelog based on self.content{,line}s.
423 """calculate the initial linelog based on self.content{,line}s.
424 this is similar to running a partial "annotate".
424 this is similar to running a partial "annotate".
425 """
425 """
426 llog = linelog.linelog()
426 llog = linelog.linelog()
427 a, alines = '', []
427 a, alines = '', []
428 for i in pycompat.xrange(len(self.contents)):
428 for i in pycompat.xrange(len(self.contents)):
429 b, blines = self.contents[i], self.contentlines[i]
429 b, blines = self.contents[i], self.contentlines[i]
430 llrev = i * 2 + 1
430 llrev = i * 2 + 1
431 chunks = self._alldiffchunks(a, b, alines, blines)
431 chunks = self._alldiffchunks(a, b, alines, blines)
432 for a1, a2, b1, b2 in reversed(list(chunks)):
432 for a1, a2, b1, b2 in reversed(list(chunks)):
433 llog.replacelines(llrev, a1, a2, b1, b2)
433 llog.replacelines(llrev, a1, a2, b1, b2)
434 a, alines = b, blines
434 a, alines = b, blines
435 return llog
435 return llog
436
436
437 def _checkoutlinelog(self):
437 def _checkoutlinelog(self):
438 """() -> [str]. check out file contents from linelog"""
438 """() -> [str]. check out file contents from linelog"""
439 contents = []
439 contents = []
440 for i in pycompat.xrange(len(self.contents)):
440 for i in pycompat.xrange(len(self.contents)):
441 rev = (i + 1) * 2
441 rev = (i + 1) * 2
442 self.linelog.annotate(rev)
442 self.linelog.annotate(rev)
443 content = ''.join(map(self._getline, self.linelog.annotateresult))
443 content = ''.join(map(self._getline, self.linelog.annotateresult))
444 contents.append(content)
444 contents.append(content)
445 return contents
445 return contents
446
446
447 def _checkoutlinelogwithedits(self):
447 def _checkoutlinelogwithedits(self):
448 """() -> [str]. prompt all lines for edit"""
448 """() -> [str]. prompt all lines for edit"""
449 alllines = self.linelog.getalllines()
449 alllines = self.linelog.getalllines()
450 # header
450 # header
451 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
451 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
452 'exists in the changeset to the top\nHG:\n')
452 'exists in the changeset to the top\nHG:\n')
453 % self.fctxs[-1].path())
453 % self.fctxs[-1].path())
454 # [(idx, fctx)]. hide the dummy emptyfilecontext
454 # [(idx, fctx)]. hide the dummy emptyfilecontext
455 visiblefctxs = [(i, f)
455 visiblefctxs = [(i, f)
456 for i, f in enumerate(self.fctxs)
456 for i, f in enumerate(self.fctxs)
457 if not isinstance(f, emptyfilecontext)]
457 if not isinstance(f, emptyfilecontext)]
458 for i, (j, f) in enumerate(visiblefctxs):
458 for i, (j, f) in enumerate(visiblefctxs):
459 editortext += (_('HG: %s/%s %s %s\n') %
459 editortext += (_('HG: %s/%s %s %s\n') %
460 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
460 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
461 node.short(f.node()),
461 node.short(f.node()),
462 f.description().split('\n',1)[0]))
462 f.description().split('\n',1)[0]))
463 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
463 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
464 # figure out the lifetime of a line, this is relatively inefficient,
464 # figure out the lifetime of a line, this is relatively inefficient,
465 # but probably fine
465 # but probably fine
466 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
466 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
467 for i, f in visiblefctxs:
467 for i, f in visiblefctxs:
468 self.linelog.annotate((i + 1) * 2)
468 self.linelog.annotate((i + 1) * 2)
469 for l in self.linelog.annotateresult:
469 for l in self.linelog.annotateresult:
470 lineset[l].add(i)
470 lineset[l].add(i)
471 # append lines
471 # append lines
472 for l in alllines:
472 for l in alllines:
473 editortext += (' %s : %s' %
473 editortext += (' %s : %s' %
474 (''.join([('y' if i in lineset[l] else ' ')
474 (''.join([('y' if i in lineset[l] else ' ')
475 for i, _f in visiblefctxs]),
475 for i, _f in visiblefctxs]),
476 self._getline(l)))
476 self._getline(l)))
477 # run editor
477 # run editor
478 editedtext = self.ui.edit(editortext, '', action='absorb')
478 editedtext = self.ui.edit(editortext, '', action='absorb')
479 if not editedtext:
479 if not editedtext:
480 raise error.Abort(_('empty editor text'))
480 raise error.Abort(_('empty editor text'))
481 # parse edited result
481 # parse edited result
482 contents = ['' for i in self.fctxs]
482 contents = ['' for i in self.fctxs]
483 leftpadpos = 4
483 leftpadpos = 4
484 colonpos = leftpadpos + len(visiblefctxs) + 1
484 colonpos = leftpadpos + len(visiblefctxs) + 1
485 for l in mdiff.splitnewlines(editedtext):
485 for l in mdiff.splitnewlines(editedtext):
486 if l.startswith('HG:'):
486 if l.startswith('HG:'):
487 continue
487 continue
488 if l[colonpos - 1:colonpos + 2] != ' : ':
488 if l[colonpos - 1:colonpos + 2] != ' : ':
489 raise error.Abort(_('malformed line: %s') % l)
489 raise error.Abort(_('malformed line: %s') % l)
490 linecontent = l[colonpos + 2:]
490 linecontent = l[colonpos + 2:]
491 for i, ch in enumerate(
491 for i, ch in enumerate(
492 pycompat.bytestr(l[leftpadpos:colonpos - 1])):
492 pycompat.bytestr(l[leftpadpos:colonpos - 1])):
493 if ch == 'y':
493 if ch == 'y':
494 contents[visiblefctxs[i][0]] += linecontent
494 contents[visiblefctxs[i][0]] += linecontent
495 # chunkstats is hard to calculate if anything changes, therefore
495 # chunkstats is hard to calculate if anything changes, therefore
496 # set them to just a simple value (1, 1).
496 # set them to just a simple value (1, 1).
497 if editedtext != editortext:
497 if editedtext != editortext:
498 self.chunkstats = [1, 1]
498 self.chunkstats = [1, 1]
499 return contents
499 return contents
500
500
501 def _getline(self, lineinfo):
501 def _getline(self, lineinfo):
502 """((rev, linenum)) -> str. convert rev+line number to line content"""
502 """((rev, linenum)) -> str. convert rev+line number to line content"""
503 rev, linenum = lineinfo
503 rev, linenum = lineinfo
504 if rev & 1: # odd: original line taken from fctxs
504 if rev & 1: # odd: original line taken from fctxs
505 return self.contentlines[rev // 2][linenum]
505 return self.contentlines[rev // 2][linenum]
506 else: # even: fixup line from targetfctx
506 else: # even: fixup line from targetfctx
507 return self.targetlines[linenum]
507 return self.targetlines[linenum]
508
508
509 def _iscontinuous(self, a1, a2, closedinterval=False):
509 def _iscontinuous(self, a1, a2, closedinterval=False):
510 """(a1, a2 : int) -> bool
510 """(a1, a2 : int) -> bool
511
511
512 check if these lines are continuous. i.e. no other insertions or
512 check if these lines are continuous. i.e. no other insertions or
513 deletions (from other revisions) among these lines.
513 deletions (from other revisions) among these lines.
514
514
515 closedinterval decides whether a2 should be included or not. i.e. is
515 closedinterval decides whether a2 should be included or not. i.e. is
516 it [a1, a2), or [a1, a2] ?
516 it [a1, a2), or [a1, a2] ?
517 """
517 """
518 if a1 >= a2:
518 if a1 >= a2:
519 return True
519 return True
520 llog = self.linelog
520 llog = self.linelog
521 offset1 = llog.getoffset(a1)
521 offset1 = llog.getoffset(a1)
522 offset2 = llog.getoffset(a2) + int(closedinterval)
522 offset2 = llog.getoffset(a2) + int(closedinterval)
523 linesinbetween = llog.getalllines(offset1, offset2)
523 linesinbetween = llog.getalllines(offset1, offset2)
524 return len(linesinbetween) == a2 - a1 + int(closedinterval)
524 return len(linesinbetween) == a2 - a1 + int(closedinterval)
525
525
526 def _optimizefixups(self, fixups):
526 def _optimizefixups(self, fixups):
527 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
527 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
528 merge adjacent fixups to make them less fragmented.
528 merge adjacent fixups to make them less fragmented.
529 """
529 """
530 result = []
530 result = []
531 pcurrentchunk = [[-1, -1, -1, -1, -1]]
531 pcurrentchunk = [[-1, -1, -1, -1, -1]]
532
532
533 def pushchunk():
533 def pushchunk():
534 if pcurrentchunk[0][0] != -1:
534 if pcurrentchunk[0][0] != -1:
535 result.append(tuple(pcurrentchunk[0]))
535 result.append(tuple(pcurrentchunk[0]))
536
536
537 for i, chunk in enumerate(fixups):
537 for i, chunk in enumerate(fixups):
538 rev, a1, a2, b1, b2 = chunk
538 rev, a1, a2, b1, b2 = chunk
539 lastrev = pcurrentchunk[0][0]
539 lastrev = pcurrentchunk[0][0]
540 lasta2 = pcurrentchunk[0][2]
540 lasta2 = pcurrentchunk[0][2]
541 lastb2 = pcurrentchunk[0][4]
541 lastb2 = pcurrentchunk[0][4]
542 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
542 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
543 self._iscontinuous(max(a1 - 1, 0), a1)):
543 self._iscontinuous(max(a1 - 1, 0), a1)):
544 # merge into currentchunk
544 # merge into currentchunk
545 pcurrentchunk[0][2] = a2
545 pcurrentchunk[0][2] = a2
546 pcurrentchunk[0][4] = b2
546 pcurrentchunk[0][4] = b2
547 else:
547 else:
548 pushchunk()
548 pushchunk()
549 pcurrentchunk[0] = list(chunk)
549 pcurrentchunk[0] = list(chunk)
550 pushchunk()
550 pushchunk()
551 return result
551 return result
552
552
553 def _showchanges(self, fm, alines, blines, chunk, fixups):
553 def _showchanges(self, fm, alines, blines, chunk, fixups):
554
554
555 def trim(line):
555 def trim(line):
556 if line.endswith('\n'):
556 if line.endswith('\n'):
557 line = line[:-1]
557 line = line[:-1]
558 return line
558 return line
559
559
560 # this is not optimized for perf but _showchanges only gets executed
560 # this is not optimized for perf but _showchanges only gets executed
561 # with an extra command-line flag.
561 # with an extra command-line flag.
562 a1, a2, b1, b2 = chunk
562 a1, a2, b1, b2 = chunk
563 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
563 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
564 for idx, fa1, fa2, fb1, fb2 in fixups:
564 for idx, fa1, fa2, fb1, fb2 in fixups:
565 for i in pycompat.xrange(fa1, fa2):
565 for i in pycompat.xrange(fa1, fa2):
566 aidxs[i - a1] = (max(idx, 1) - 1) // 2
566 aidxs[i - a1] = (max(idx, 1) - 1) // 2
567 for i in pycompat.xrange(fb1, fb2):
567 for i in pycompat.xrange(fb1, fb2):
568 bidxs[i - b1] = (max(idx, 1) - 1) // 2
568 bidxs[i - b1] = (max(idx, 1) - 1) // 2
569
569
570 fm.startitem()
570 fm.startitem()
571 fm.write('hunk', ' %s\n',
571 fm.write('hunk', ' %s\n',
572 '@@ -%d,%d +%d,%d @@'
572 '@@ -%d,%d +%d,%d @@'
573 % (a1, a2 - a1, b1, b2 - b1), label='diff.hunk')
573 % (a1, a2 - a1, b1, b2 - b1), label='diff.hunk')
574 fm.data(path=self.path, linetype='hunk')
574 fm.data(path=self.path, linetype='hunk')
575
575
576 def writeline(idx, diffchar, line, linetype, linelabel):
576 def writeline(idx, diffchar, line, linetype, linelabel):
577 fm.startitem()
577 fm.startitem()
578 node = ''
578 node = ''
579 if idx:
579 if idx:
580 ctx = self.fctxs[idx]
580 ctx = self.fctxs[idx]
581 fm.context(fctx=ctx)
581 fm.context(fctx=ctx)
582 node = ctx.hex()
582 node = ctx.hex()
583 self.ctxaffected.add(ctx.changectx())
583 self.ctxaffected.add(ctx.changectx())
584 fm.write('node', '%-7.7s ', node, label='absorb.node')
584 fm.write('node', '%-7.7s ', node, label='absorb.node')
585 fm.write('diffchar ' + linetype, '%s%s\n', diffchar, line,
585 fm.write('diffchar ' + linetype, '%s%s\n', diffchar, line,
586 label=linelabel)
586 label=linelabel)
587 fm.data(path=self.path, linetype=linetype)
587 fm.data(path=self.path, linetype=linetype)
588
588
589 for i in pycompat.xrange(a1, a2):
589 for i in pycompat.xrange(a1, a2):
590 writeline(aidxs[i - a1], '-', trim(alines[i]), 'deleted',
590 writeline(aidxs[i - a1], '-', trim(alines[i]), 'deleted',
591 'diff.deleted')
591 'diff.deleted')
592 for i in pycompat.xrange(b1, b2):
592 for i in pycompat.xrange(b1, b2):
593 writeline(bidxs[i - b1], '+', trim(blines[i]), 'inserted',
593 writeline(bidxs[i - b1], '+', trim(blines[i]), 'inserted',
594 'diff.inserted')
594 'diff.inserted')
595
595
596 class fixupstate(object):
596 class fixupstate(object):
597 """state needed to run absorb
597 """state needed to run absorb
598
598
599 internally, it keeps paths and filefixupstates.
599 internally, it keeps paths and filefixupstates.
600
600
601 a typical use is like filefixupstates:
601 a typical use is like filefixupstates:
602
602
603 1. call diffwith, to calculate fixups
603 1. call diffwith, to calculate fixups
604 2. (optionally), present fixups to the user, or edit fixups
604 2. (optionally), present fixups to the user, or edit fixups
605 3. call apply, to apply changes to memory
605 3. call apply, to apply changes to memory
606 4. call commit, to commit changes to hg database
606 4. call commit, to commit changes to hg database
607 """
607 """
608
608
609 def __init__(self, stack, ui=None, opts=None):
609 def __init__(self, stack, ui=None, opts=None):
610 """([ctx], ui or None) -> None
610 """([ctx], ui or None) -> None
611
611
612 stack: should be linear, and sorted by topo order - oldest first.
612 stack: should be linear, and sorted by topo order - oldest first.
613 all commits in stack are considered mutable.
613 all commits in stack are considered mutable.
614 """
614 """
615 assert stack
615 assert stack
616 self.ui = ui or nullui()
616 self.ui = ui or nullui()
617 self.opts = opts or {}
617 self.opts = opts or {}
618 self.stack = stack
618 self.stack = stack
619 self.repo = stack[-1].repo().unfiltered()
619 self.repo = stack[-1].repo().unfiltered()
620
620
621 # following fields will be filled later
621 # following fields will be filled later
622 self.paths = [] # [str]
622 self.paths = [] # [str]
623 self.status = None # ctx.status output
623 self.status = None # ctx.status output
624 self.fctxmap = {} # {path: {ctx: fctx}}
624 self.fctxmap = {} # {path: {ctx: fctx}}
625 self.fixupmap = {} # {path: filefixupstate}
625 self.fixupmap = {} # {path: filefixupstate}
626 self.replacemap = {} # {oldnode: newnode or None}
626 self.replacemap = {} # {oldnode: newnode or None}
627 self.finalnode = None # head after all fixups
627 self.finalnode = None # head after all fixups
628 self.ctxaffected = set() # ctx that will be absorbed into
628 self.ctxaffected = set() # ctx that will be absorbed into
629
629
630 def diffwith(self, targetctx, match=None, fm=None):
630 def diffwith(self, targetctx, match=None, fm=None):
631 """diff and prepare fixups. update self.fixupmap, self.paths"""
631 """diff and prepare fixups. update self.fixupmap, self.paths"""
632 # only care about modified files
632 # only care about modified files
633 self.status = self.stack[-1].status(targetctx, match)
633 self.status = self.stack[-1].status(targetctx, match)
634 self.paths = []
634 self.paths = []
635 # but if --edit-lines is used, the user may want to edit files
635 # but if --edit-lines is used, the user may want to edit files
636 # even if they are not modified
636 # even if they are not modified
637 editopt = self.opts.get('edit_lines')
637 editopt = self.opts.get('edit_lines')
638 if not self.status.modified and editopt and match:
638 if not self.status.modified and editopt and match:
639 interestingpaths = match.files()
639 interestingpaths = match.files()
640 else:
640 else:
641 interestingpaths = self.status.modified
641 interestingpaths = self.status.modified
642 # prepare the filefixupstate
642 # prepare the filefixupstate
643 seenfctxs = set()
643 seenfctxs = set()
644 # sorting is necessary to eliminate ambiguity for the "double move"
644 # sorting is necessary to eliminate ambiguity for the "double move"
645 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
645 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
646 for path in sorted(interestingpaths):
646 for path in sorted(interestingpaths):
647 self.ui.debug('calculating fixups for %s\n' % path)
647 self.ui.debug('calculating fixups for %s\n' % path)
648 targetfctx = targetctx[path]
648 targetfctx = targetctx[path]
649 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
649 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
650 # ignore symbolic links or binary, or unchanged files
650 # ignore symbolic links or binary, or unchanged files
651 if any(f.islink() or stringutil.binary(f.data())
651 if any(f.islink() or stringutil.binary(f.data())
652 for f in [targetfctx] + fctxs
652 for f in [targetfctx] + fctxs
653 if not isinstance(f, emptyfilecontext)):
653 if not isinstance(f, emptyfilecontext)):
654 continue
654 continue
655 if targetfctx.data() == fctxs[-1].data() and not editopt:
655 if targetfctx.data() == fctxs[-1].data() and not editopt:
656 continue
656 continue
657 seenfctxs.update(fctxs[1:])
657 seenfctxs.update(fctxs[1:])
658 self.fctxmap[path] = ctx2fctx
658 self.fctxmap[path] = ctx2fctx
659 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
659 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
660 if fm is not None:
660 if fm is not None:
661 fm.startitem()
661 fm.startitem()
662 fm.plain('showing changes for ')
662 fm.plain('showing changes for ')
663 fm.write('path', '%s\n', path, label='absorb.path')
663 fm.write('path', '%s\n', path, label='absorb.path')
664 fm.data(linetype='path')
664 fm.data(linetype='path')
665 fstate.diffwith(targetfctx, fm)
665 fstate.diffwith(targetfctx, fm)
666 self.fixupmap[path] = fstate
666 self.fixupmap[path] = fstate
667 self.paths.append(path)
667 self.paths.append(path)
668 self.ctxaffected.update(fstate.ctxaffected)
668 self.ctxaffected.update(fstate.ctxaffected)
669
669
670 def apply(self):
670 def apply(self):
671 """apply fixups to individual filefixupstates"""
671 """apply fixups to individual filefixupstates"""
672 for path, state in self.fixupmap.iteritems():
672 for path, state in self.fixupmap.iteritems():
673 if self.ui.debugflag:
673 if self.ui.debugflag:
674 self.ui.write(_('applying fixups to %s\n') % path)
674 self.ui.write(_('applying fixups to %s\n') % path)
675 state.apply()
675 state.apply()
676
676
677 @property
677 @property
678 def chunkstats(self):
678 def chunkstats(self):
679 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
679 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
680 return dict((path, state.chunkstats)
680 return dict((path, state.chunkstats)
681 for path, state in self.fixupmap.iteritems())
681 for path, state in self.fixupmap.iteritems())
682
682
683 def commit(self):
683 def commit(self):
684 """commit changes. update self.finalnode, self.replacemap"""
684 """commit changes. update self.finalnode, self.replacemap"""
685 with self.repo.transaction('absorb') as tr:
685 with self.repo.transaction('absorb') as tr:
686 self._commitstack()
686 self._commitstack()
687 self._movebookmarks(tr)
687 self._movebookmarks(tr)
688 if self.repo['.'].node() in self.replacemap:
688 if self.repo['.'].node() in self.replacemap:
689 self._moveworkingdirectoryparent()
689 self._moveworkingdirectoryparent()
690 self._cleanupoldcommits()
690 self._cleanupoldcommits()
691 return self.finalnode
691 return self.finalnode
692
692
693 def printchunkstats(self):
693 def printchunkstats(self):
694 """print things like '1 of 2 chunk(s) applied'"""
694 """print things like '1 of 2 chunk(s) applied'"""
695 ui = self.ui
695 ui = self.ui
696 chunkstats = self.chunkstats
696 chunkstats = self.chunkstats
697 if ui.verbose:
697 if ui.verbose:
698 # chunkstats for each file
698 # chunkstats for each file
699 for path, stat in chunkstats.iteritems():
699 for path, stat in chunkstats.iteritems():
700 if stat[0]:
700 if stat[0]:
701 ui.write(_('%s: %d of %d chunk(s) applied\n')
701 ui.write(_('%s: %d of %d chunk(s) applied\n')
702 % (path, stat[0], stat[1]))
702 % (path, stat[0], stat[1]))
703 elif not ui.quiet:
703 elif not ui.quiet:
704 # a summary for all files
704 # a summary for all files
705 stats = chunkstats.values()
705 stats = chunkstats.values()
706 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
706 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
707 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
707 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
708
708
709 def _commitstack(self):
709 def _commitstack(self):
710 """make new commits. update self.finalnode, self.replacemap.
710 """make new commits. update self.finalnode, self.replacemap.
711 it is splitted from "commit" to avoid too much indentation.
711 it is splitted from "commit" to avoid too much indentation.
712 """
712 """
713 # last node (20-char) committed by us
713 # last node (20-char) committed by us
714 lastcommitted = None
714 lastcommitted = None
715 # p1 which overrides the parent of the next commit, "None" means use
715 # p1 which overrides the parent of the next commit, "None" means use
716 # the original parent unchanged
716 # the original parent unchanged
717 nextp1 = None
717 nextp1 = None
718 for ctx in self.stack:
718 for ctx in self.stack:
719 memworkingcopy = self._getnewfilecontents(ctx)
719 memworkingcopy = self._getnewfilecontents(ctx)
720 if not memworkingcopy and not lastcommitted:
720 if not memworkingcopy and not lastcommitted:
721 # nothing changed, nothing commited
721 # nothing changed, nothing commited
722 nextp1 = ctx
722 nextp1 = ctx
723 continue
723 continue
724 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
724 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
725 # changeset is no longer necessary
725 # changeset is no longer necessary
726 self.replacemap[ctx.node()] = None
726 self.replacemap[ctx.node()] = None
727 msg = _('became empty and was dropped')
727 msg = _('became empty and was dropped')
728 else:
728 else:
729 # changeset needs re-commit
729 # changeset needs re-commit
730 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
730 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
731 lastcommitted = self.repo[nodestr]
731 lastcommitted = self.repo[nodestr]
732 nextp1 = lastcommitted
732 nextp1 = lastcommitted
733 self.replacemap[ctx.node()] = lastcommitted.node()
733 self.replacemap[ctx.node()] = lastcommitted.node()
734 if memworkingcopy:
734 if memworkingcopy:
735 msg = _('%d file(s) changed, became %s') % (
735 msg = _('%d file(s) changed, became %s') % (
736 len(memworkingcopy), self._ctx2str(lastcommitted))
736 len(memworkingcopy), self._ctx2str(lastcommitted))
737 else:
737 else:
738 msg = _('became %s') % self._ctx2str(lastcommitted)
738 msg = _('became %s') % self._ctx2str(lastcommitted)
739 if self.ui.verbose and msg:
739 if self.ui.verbose and msg:
740 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
740 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
741 self.finalnode = lastcommitted and lastcommitted.node()
741 self.finalnode = lastcommitted and lastcommitted.node()
742
742
743 def _ctx2str(self, ctx):
743 def _ctx2str(self, ctx):
744 if self.ui.debugflag:
744 if self.ui.debugflag:
745 return '%d:%s' % (ctx.rev(), ctx.hex())
745 return '%d:%s' % (ctx.rev(), ctx.hex())
746 else:
746 else:
747 return '%d:%s' % (ctx.rev(), node.short(ctx.node()))
747 return '%d:%s' % (ctx.rev(), node.short(ctx.node()))
748
748
749 def _getnewfilecontents(self, ctx):
749 def _getnewfilecontents(self, ctx):
750 """(ctx) -> {path: str}
750 """(ctx) -> {path: str}
751
751
752 fetch file contents from filefixupstates.
752 fetch file contents from filefixupstates.
753 return the working copy overrides - files different from ctx.
753 return the working copy overrides - files different from ctx.
754 """
754 """
755 result = {}
755 result = {}
756 for path in self.paths:
756 for path in self.paths:
757 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
757 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
758 if ctx not in ctx2fctx:
758 if ctx not in ctx2fctx:
759 continue
759 continue
760 fctx = ctx2fctx[ctx]
760 fctx = ctx2fctx[ctx]
761 content = fctx.data()
761 content = fctx.data()
762 newcontent = self.fixupmap[path].getfinalcontent(fctx)
762 newcontent = self.fixupmap[path].getfinalcontent(fctx)
763 if content != newcontent:
763 if content != newcontent:
764 result[fctx.path()] = newcontent
764 result[fctx.path()] = newcontent
765 return result
765 return result
766
766
767 def _movebookmarks(self, tr):
767 def _movebookmarks(self, tr):
768 repo = self.repo
768 repo = self.repo
769 needupdate = [(name, self.replacemap[hsh])
769 needupdate = [(name, self.replacemap[hsh])
770 for name, hsh in repo._bookmarks.iteritems()
770 for name, hsh in repo._bookmarks.iteritems()
771 if hsh in self.replacemap]
771 if hsh in self.replacemap]
772 changes = []
772 changes = []
773 for name, hsh in needupdate:
773 for name, hsh in needupdate:
774 if hsh:
774 if hsh:
775 changes.append((name, hsh))
775 changes.append((name, hsh))
776 if self.ui.verbose:
776 if self.ui.verbose:
777 self.ui.write(_('moving bookmark %s to %s\n')
777 self.ui.write(_('moving bookmark %s to %s\n')
778 % (name, node.hex(hsh)))
778 % (name, node.hex(hsh)))
779 else:
779 else:
780 changes.append((name, None))
780 changes.append((name, None))
781 if self.ui.verbose:
781 if self.ui.verbose:
782 self.ui.write(_('deleting bookmark %s\n') % name)
782 self.ui.write(_('deleting bookmark %s\n') % name)
783 repo._bookmarks.applychanges(repo, tr, changes)
783 repo._bookmarks.applychanges(repo, tr, changes)
784
784
785 def _moveworkingdirectoryparent(self):
785 def _moveworkingdirectoryparent(self):
786 if not self.finalnode:
786 if not self.finalnode:
787 # Find the latest not-{obsoleted,stripped} parent.
787 # Find the latest not-{obsoleted,stripped} parent.
788 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
788 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
789 ctx = self.repo[revs.first()]
789 ctx = self.repo[revs.first()]
790 self.finalnode = ctx.node()
790 self.finalnode = ctx.node()
791 else:
791 else:
792 ctx = self.repo[self.finalnode]
792 ctx = self.repo[self.finalnode]
793
793
794 dirstate = self.repo.dirstate
794 dirstate = self.repo.dirstate
795 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
795 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
796 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
796 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
797 noop = lambda: 0
797 noop = lambda: 0
798 restore = noop
798 restore = noop
799 if util.safehasattr(dirstate, '_fsmonitorstate'):
799 if util.safehasattr(dirstate, '_fsmonitorstate'):
800 bak = dirstate._fsmonitorstate.invalidate
800 bak = dirstate._fsmonitorstate.invalidate
801 def restore():
801 def restore():
802 dirstate._fsmonitorstate.invalidate = bak
802 dirstate._fsmonitorstate.invalidate = bak
803 dirstate._fsmonitorstate.invalidate = noop
803 dirstate._fsmonitorstate.invalidate = noop
804 try:
804 try:
805 with dirstate.parentchange():
805 with dirstate.parentchange():
806 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
806 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
807 finally:
807 finally:
808 restore()
808 restore()
809
809
810 @staticmethod
810 @staticmethod
811 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
811 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
812 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
812 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
813
813
814 if it will become an empty commit (does not change anything, after the
814 if it will become an empty commit (does not change anything, after the
815 memworkingcopy overrides), return True. otherwise return False.
815 memworkingcopy overrides), return True. otherwise return False.
816 """
816 """
817 if not pctx:
817 if not pctx:
818 parents = ctx.parents()
818 parents = ctx.parents()
819 if len(parents) != 1:
819 if len(parents) != 1:
820 return False
820 return False
821 pctx = parents[0]
821 pctx = parents[0]
822 # ctx changes more files (not a subset of memworkingcopy)
822 # ctx changes more files (not a subset of memworkingcopy)
823 if not set(ctx.files()).issubset(set(memworkingcopy)):
823 if not set(ctx.files()).issubset(set(memworkingcopy)):
824 return False
824 return False
825 for path, content in memworkingcopy.iteritems():
825 for path, content in memworkingcopy.iteritems():
826 if path not in pctx or path not in ctx:
826 if path not in pctx or path not in ctx:
827 return False
827 return False
828 fctx = ctx[path]
828 fctx = ctx[path]
829 pfctx = pctx[path]
829 pfctx = pctx[path]
830 if pfctx.flags() != fctx.flags():
830 if pfctx.flags() != fctx.flags():
831 return False
831 return False
832 if pfctx.data() != content:
832 if pfctx.data() != content:
833 return False
833 return False
834 return True
834 return True
835
835
836 def _commitsingle(self, memworkingcopy, ctx, p1=None):
836 def _commitsingle(self, memworkingcopy, ctx, p1=None):
837 """(ctx, {path: content}, node) -> node. make a single commit
837 """(ctx, {path: content}, node) -> node. make a single commit
838
838
839 the commit is a clone from ctx, with a (optionally) different p1, and
839 the commit is a clone from ctx, with a (optionally) different p1, and
840 different file contents replaced by memworkingcopy.
840 different file contents replaced by memworkingcopy.
841 """
841 """
842 parents = p1 and (p1, node.nullid)
842 parents = p1 and (p1, node.nullid)
843 extra = ctx.extra()
843 extra = ctx.extra()
844 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
844 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
845 extra['absorb_source'] = ctx.hex()
845 extra['absorb_source'] = ctx.hex()
846 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
846 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
847 return mctx.commit()
847 return mctx.commit()
848
848
849 @util.propertycache
849 @util.propertycache
850 def _useobsolete(self):
850 def _useobsolete(self):
851 """() -> bool"""
851 """() -> bool"""
852 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
852 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
853
853
854 def _cleanupoldcommits(self):
854 def _cleanupoldcommits(self):
855 replacements = {k: ([v] if v is not None else [])
855 replacements = {k: ([v] if v is not None else [])
856 for k, v in self.replacemap.iteritems()}
856 for k, v in self.replacemap.iteritems()}
857 if replacements:
857 if replacements:
858 scmutil.cleanupnodes(self.repo, replacements, operation='absorb',
858 scmutil.cleanupnodes(self.repo, replacements, operation='absorb',
859 fixphase=True)
859 fixphase=True)
860
860
861 def _parsechunk(hunk):
861 def _parsechunk(hunk):
862 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
862 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
863 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
863 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
864 return None, None
864 return None, None
865 path = hunk.header.filename()
865 path = hunk.header.filename()
866 a1 = hunk.fromline + len(hunk.before) - 1
866 a1 = hunk.fromline + len(hunk.before) - 1
867 # remove before and after context
867 # remove before and after context
868 hunk.before = hunk.after = []
868 hunk.before = hunk.after = []
869 buf = util.stringio()
869 buf = util.stringio()
870 hunk.write(buf)
870 hunk.write(buf)
871 patchlines = mdiff.splitnewlines(buf.getvalue())
871 patchlines = mdiff.splitnewlines(buf.getvalue())
872 # hunk.prettystr() will update hunk.removed
872 # hunk.prettystr() will update hunk.removed
873 a2 = a1 + hunk.removed
873 a2 = a1 + hunk.removed
874 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
874 blines = [l[1:] for l in patchlines[1:] if not l.startswith('-')]
875 return path, (a1, a2, blines)
875 return path, (a1, a2, blines)
876
876
877 def overlaydiffcontext(ctx, chunks):
877 def overlaydiffcontext(ctx, chunks):
878 """(ctx, [crecord.uihunk]) -> memctx
878 """(ctx, [crecord.uihunk]) -> memctx
879
879
880 return a memctx with some [1] patches (chunks) applied to ctx.
880 return a memctx with some [1] patches (chunks) applied to ctx.
881 [1]: modifications are handled. renames, mode changes, etc. are ignored.
881 [1]: modifications are handled. renames, mode changes, etc. are ignored.
882 """
882 """
883 # sadly the applying-patch logic is hardly reusable, and messy:
883 # sadly the applying-patch logic is hardly reusable, and messy:
884 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
884 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
885 # needs a file stream of a patch and will re-parse it, while we have
885 # needs a file stream of a patch and will re-parse it, while we have
886 # structured hunk objects at hand.
886 # structured hunk objects at hand.
887 # 2. a lot of different implementations about "chunk" (patch.hunk,
887 # 2. a lot of different implementations about "chunk" (patch.hunk,
888 # patch.recordhunk, crecord.uihunk)
888 # patch.recordhunk, crecord.uihunk)
889 # as we only care about applying changes to modified files, no mode
889 # as we only care about applying changes to modified files, no mode
890 # change, no binary diff, and no renames, it's probably okay to
890 # change, no binary diff, and no renames, it's probably okay to
891 # re-invent the logic using much simpler code here.
891 # re-invent the logic using much simpler code here.
892 memworkingcopy = {} # {path: content}
892 memworkingcopy = {} # {path: content}
893 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
893 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
894 for path, info in map(_parsechunk, chunks):
894 for path, info in map(_parsechunk, chunks):
895 if not path or not info:
895 if not path or not info:
896 continue
896 continue
897 patchmap[path].append(info)
897 patchmap[path].append(info)
898 for path, patches in patchmap.iteritems():
898 for path, patches in patchmap.iteritems():
899 if path not in ctx or not patches:
899 if path not in ctx or not patches:
900 continue
900 continue
901 patches.sort(reverse=True)
901 patches.sort(reverse=True)
902 lines = mdiff.splitnewlines(ctx[path].data())
902 lines = mdiff.splitnewlines(ctx[path].data())
903 for a1, a2, blines in patches:
903 for a1, a2, blines in patches:
904 lines[a1:a2] = blines
904 lines[a1:a2] = blines
905 memworkingcopy[path] = ''.join(lines)
905 memworkingcopy[path] = ''.join(lines)
906 return overlaycontext(memworkingcopy, ctx)
906 return overlaycontext(memworkingcopy, ctx)
907
907
908 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
908 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
909 """pick fixup chunks from targetctx, apply them to stack.
909 """pick fixup chunks from targetctx, apply them to stack.
910
910
911 if targetctx is None, the working copy context will be used.
911 if targetctx is None, the working copy context will be used.
912 if stack is None, the current draft stack will be used.
912 if stack is None, the current draft stack will be used.
913 return fixupstate.
913 return fixupstate.
914 """
914 """
915 if stack is None:
915 if stack is None:
916 limit = ui.configint('absorb', 'max-stack-size')
916 limit = ui.configint('absorb', 'max-stack-size')
917 headctx = repo['.']
917 headctx = repo['.']
918 if len(headctx.parents()) > 1:
918 if len(headctx.parents()) > 1:
919 raise error.Abort(_('cannot absorb into a merge'))
919 raise error.Abort(_('cannot absorb into a merge'))
920 stack = getdraftstack(headctx, limit)
920 stack = getdraftstack(headctx, limit)
921 if limit and len(stack) >= limit:
921 if limit and len(stack) >= limit:
922 ui.warn(_('absorb: only the recent %d changesets will '
922 ui.warn(_('absorb: only the recent %d changesets will '
923 'be analysed\n')
923 'be analysed\n')
924 % limit)
924 % limit)
925 if not stack:
925 if not stack:
926 raise error.Abort(_('no mutable changeset to change'))
926 raise error.Abort(_('no mutable changeset to change'))
927 if targetctx is None: # default to working copy
927 if targetctx is None: # default to working copy
928 targetctx = repo[None]
928 targetctx = repo[None]
929 if pats is None:
929 if pats is None:
930 pats = ()
930 pats = ()
931 if opts is None:
931 if opts is None:
932 opts = {}
932 opts = {}
933 state = fixupstate(stack, ui=ui, opts=opts)
933 state = fixupstate(stack, ui=ui, opts=opts)
934 matcher = scmutil.match(targetctx, pats, opts)
934 matcher = scmutil.match(targetctx, pats, opts)
935 if opts.get('interactive'):
935 if opts.get('interactive'):
936 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
936 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
937 origchunks = patch.parsepatch(diff)
937 origchunks = patch.parsepatch(diff)
938 chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
938 chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
939 targetctx = overlaydiffcontext(stack[-1], chunks)
939 targetctx = overlaydiffcontext(stack[-1], chunks)
940 fm = None
940 fm = None
941 if opts.get('print_changes') or not opts.get('apply_changes'):
941 if opts.get('print_changes') or not opts.get('apply_changes'):
942 fm = ui.formatter('absorb', opts)
942 fm = ui.formatter('absorb', opts)
943 state.diffwith(targetctx, matcher, fm)
943 state.diffwith(targetctx, matcher, fm)
944 if fm is not None:
944 if fm is not None:
945 fm.startitem()
945 fm.startitem()
946 fm.write("count", "\n%d changesets affected\n", len(state.ctxaffected))
946 fm.write("count", "\n%d changesets affected\n", len(state.ctxaffected))
947 fm.data(linetype='summary')
947 fm.data(linetype='summary')
948 for ctx in reversed(stack):
948 for ctx in reversed(stack):
949 if ctx not in state.ctxaffected:
949 if ctx not in state.ctxaffected:
950 continue
950 continue
951 fm.startitem()
951 fm.startitem()
952 fm.context(ctx=ctx)
952 fm.context(ctx=ctx)
953 fm.data(linetype='changeset')
953 fm.data(linetype='changeset')
954 fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
954 fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
955 descfirstline = ctx.description().splitlines()[0]
955 descfirstline = ctx.description().splitlines()[0]
956 fm.write('descfirstline', '%s\n', descfirstline,
956 fm.write('descfirstline', '%s\n', descfirstline,
957 label='absorb.description')
957 label='absorb.description')
958 fm.end()
958 fm.end()
959 if not opts.get('dry_run'):
959 if not opts.get('dry_run'):
960 if (not opts.get('apply_changes') and
960 if (not opts.get('apply_changes') and
961 state.ctxaffected and
961 state.ctxaffected and
962 ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1)):
962 ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1)):
963 raise error.Abort(_('absorb cancelled\n'))
963 raise error.Abort(_('absorb cancelled\n'))
964
964
965 state.apply()
965 state.apply()
966 if state.commit():
966 if state.commit():
967 state.printchunkstats()
967 state.printchunkstats()
968 elif not ui.quiet:
968 elif not ui.quiet:
969 ui.write(_('nothing applied\n'))
969 ui.write(_('nothing applied\n'))
970 return state
970 return state
971
971
972 @command('absorb',
972 @command('absorb',
973 [('a', 'apply-changes', None,
973 [('a', 'apply-changes', None,
974 _('apply changes without prompting for confirmation')),
974 _('apply changes without prompting for confirmation')),
975 ('p', 'print-changes', None,
975 ('p', 'print-changes', None,
976 _('always print which changesets are modified by which changes')),
976 _('always print which changesets are modified by which changes')),
977 ('i', 'interactive', None,
977 ('i', 'interactive', None,
978 _('interactively select which chunks to apply (EXPERIMENTAL)')),
978 _('interactively select which chunks to apply (EXPERIMENTAL)')),
979 ('e', 'edit-lines', None,
979 ('e', 'edit-lines', None,
980 _('edit what lines belong to which changesets before commit '
980 _('edit what lines belong to which changesets before commit '
981 '(EXPERIMENTAL)')),
981 '(EXPERIMENTAL)')),
982 ] + commands.dryrunopts + commands.templateopts + commands.walkopts,
982 ] + commands.dryrunopts + commands.templateopts + commands.walkopts,
983 _('hg absorb [OPTION] [FILE]...'),
983 _('hg absorb [OPTION] [FILE]...'),
984 helpcategory=command.CATEGORY_COMMITTING,
984 helpcategory=command.CATEGORY_COMMITTING,
985 helpbasic=True)
985 helpbasic=True)
986 def absorbcmd(ui, repo, *pats, **opts):
986 def absorbcmd(ui, repo, *pats, **opts):
987 """incorporate corrections into the stack of draft changesets
987 """incorporate corrections into the stack of draft changesets
988
988
989 absorb analyzes each change in your working directory and attempts to
989 absorb analyzes each change in your working directory and attempts to
990 amend the changed lines into the changesets in your stack that first
990 amend the changed lines into the changesets in your stack that first
991 introduced those lines.
991 introduced those lines.
992
992
993 If absorb cannot find an unambiguous changeset to amend for a change,
993 If absorb cannot find an unambiguous changeset to amend for a change,
994 that change will be left in the working directory, untouched. They can be
994 that change will be left in the working directory, untouched. They can be
995 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
995 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
996 absorb does not write to the working directory.
996 absorb does not write to the working directory.
997
997
998 Changesets outside the revset `::. and not public() and not merge()` will
998 Changesets outside the revset `::. and not public() and not merge()` will
999 not be changed.
999 not be changed.
1000
1000
1001 Changesets that become empty after applying the changes will be deleted.
1001 Changesets that become empty after applying the changes will be deleted.
1002
1002
1003 By default, absorb will show what it plans to do and prompt for
1003 By default, absorb will show what it plans to do and prompt for
1004 confirmation. If you are confident that the changes will be absorbed
1004 confirmation. If you are confident that the changes will be absorbed
1005 to the correct place, run :hg:`absorb -a` to apply the changes
1005 to the correct place, run :hg:`absorb -a` to apply the changes
1006 immediately.
1006 immediately.
1007
1007
1008 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1008 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1009 """
1009 """
1010 opts = pycompat.byteskwargs(opts)
1010 opts = pycompat.byteskwargs(opts)
1011
1011
1012 with repo.wlock(), repo.lock():
1012 with repo.wlock(), repo.lock():
1013 if not opts['dry_run']:
1013 if not opts['dry_run']:
1014 cmdutil.checkunfinished(repo)
1014 cmdutil.checkunfinished(repo)
1015
1015
1016 state = absorb(ui, repo, pats=pats, opts=opts)
1016 state = absorb(ui, repo, pats=pats, opts=opts)
1017 if sum(s[0] for s in state.chunkstats.values()) == 0:
1017 if sum(s[0] for s in state.chunkstats.values()) == 0:
1018 return 1
1018 return 1
General Comments 0
You need to be logged in to leave comments. Login now