##// END OF EJS Templates
absorb: update help text...
Mark Thomas -
r40246:8f192f2c default
parent child Browse files
Show More
@@ -1,1022 +1,1024 b''
1 # absorb.py
1 # absorb.py
2 #
2 #
3 # Copyright 2016 Facebook, Inc.
3 # Copyright 2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """apply working directory changes to changesets (EXPERIMENTAL)
8 """apply working directory changes to changesets (EXPERIMENTAL)
9
9
10 The absorb extension provides a command to use annotate information to
10 The absorb extension provides a command to use annotate information to
11 amend modified chunks into the corresponding non-public changesets.
11 amend modified chunks into the corresponding non-public changesets.
12
12
13 ::
13 ::
14
14
15 [absorb]
15 [absorb]
16 # only check 50 recent non-public changesets at most
16 # only check 50 recent non-public changesets at most
17 max-stack-size = 50
17 max-stack-size = 50
18 # whether to add noise to new commits to avoid obsolescence cycle
18 # whether to add noise to new commits to avoid obsolescence cycle
19 add-noise = 1
19 add-noise = 1
20 # make `amend --correlated` a shortcut to the main command
20 # make `amend --correlated` a shortcut to the main command
21 amend-flag = correlated
21 amend-flag = correlated
22
22
23 [color]
23 [color]
24 absorb.description = yellow
24 absorb.description = yellow
25 absorb.node = blue bold
25 absorb.node = blue bold
26 absorb.path = bold
26 absorb.path = bold
27 """
27 """
28
28
29 # TODO:
29 # TODO:
30 # * Rename config items to [commands] namespace
30 # * Rename config items to [commands] namespace
31 # * Converge getdraftstack() with other code in core
31 # * Converge getdraftstack() with other code in core
32 # * move many attributes on fixupstate to be private
32 # * move many attributes on fixupstate to be private
33
33
34 from __future__ import absolute_import
34 from __future__ import absolute_import
35
35
36 import collections
36 import collections
37
37
38 from mercurial.i18n import _
38 from mercurial.i18n import _
39 from mercurial import (
39 from mercurial import (
40 cmdutil,
40 cmdutil,
41 commands,
41 commands,
42 context,
42 context,
43 crecord,
43 crecord,
44 error,
44 error,
45 linelog,
45 linelog,
46 mdiff,
46 mdiff,
47 node,
47 node,
48 obsolete,
48 obsolete,
49 patch,
49 patch,
50 phases,
50 phases,
51 pycompat,
51 pycompat,
52 registrar,
52 registrar,
53 repair,
53 repair,
54 scmutil,
54 scmutil,
55 util,
55 util,
56 )
56 )
57 from mercurial.utils import (
57 from mercurial.utils import (
58 stringutil,
58 stringutil,
59 )
59 )
60
60
61 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
62 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
63 # be specifying the version(s) of Mercurial they are tested with, or
63 # be specifying the version(s) of Mercurial they are tested with, or
64 # leave the attribute unspecified.
64 # leave the attribute unspecified.
65 testedwith = 'ships-with-hg-core'
65 testedwith = 'ships-with-hg-core'
66
66
67 cmdtable = {}
67 cmdtable = {}
68 command = registrar.command(cmdtable)
68 command = registrar.command(cmdtable)
69
69
70 configtable = {}
70 configtable = {}
71 configitem = registrar.configitem(configtable)
71 configitem = registrar.configitem(configtable)
72
72
73 configitem('absorb', 'add-noise', default=True)
73 configitem('absorb', 'add-noise', default=True)
74 configitem('absorb', 'amend-flag', default=None)
74 configitem('absorb', 'amend-flag', default=None)
75 configitem('absorb', 'max-stack-size', default=50)
75 configitem('absorb', 'max-stack-size', default=50)
76
76
77 colortable = {
77 colortable = {
78 'absorb.description': 'yellow',
78 'absorb.description': 'yellow',
79 'absorb.node': 'blue bold',
79 'absorb.node': 'blue bold',
80 'absorb.path': 'bold',
80 'absorb.path': 'bold',
81 }
81 }
82
82
83 defaultdict = collections.defaultdict
83 defaultdict = collections.defaultdict
84
84
85 class nullui(object):
85 class nullui(object):
86 """blank ui object doing nothing"""
86 """blank ui object doing nothing"""
87 debugflag = False
87 debugflag = False
88 verbose = False
88 verbose = False
89 quiet = True
89 quiet = True
90
90
91 def __getitem__(name):
91 def __getitem__(name):
92 def nullfunc(*args, **kwds):
92 def nullfunc(*args, **kwds):
93 return
93 return
94 return nullfunc
94 return nullfunc
95
95
96 class emptyfilecontext(object):
96 class emptyfilecontext(object):
97 """minimal filecontext representing an empty file"""
97 """minimal filecontext representing an empty file"""
98 def data(self):
98 def data(self):
99 return ''
99 return ''
100
100
101 def node(self):
101 def node(self):
102 return node.nullid
102 return node.nullid
103
103
104 def uniq(lst):
104 def uniq(lst):
105 """list -> list. remove duplicated items without changing the order"""
105 """list -> list. remove duplicated items without changing the order"""
106 seen = set()
106 seen = set()
107 result = []
107 result = []
108 for x in lst:
108 for x in lst:
109 if x not in seen:
109 if x not in seen:
110 seen.add(x)
110 seen.add(x)
111 result.append(x)
111 result.append(x)
112 return result
112 return result
113
113
114 def getdraftstack(headctx, limit=None):
114 def getdraftstack(headctx, limit=None):
115 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
115 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
116
116
117 changesets are sorted in topo order, oldest first.
117 changesets are sorted in topo order, oldest first.
118 return at most limit items, if limit is a positive number.
118 return at most limit items, if limit is a positive number.
119
119
120 merges are considered as non-draft as well. i.e. every commit
120 merges are considered as non-draft as well. i.e. every commit
121 returned has and only has 1 parent.
121 returned has and only has 1 parent.
122 """
122 """
123 ctx = headctx
123 ctx = headctx
124 result = []
124 result = []
125 while ctx.phase() != phases.public:
125 while ctx.phase() != phases.public:
126 if limit and len(result) >= limit:
126 if limit and len(result) >= limit:
127 break
127 break
128 parents = ctx.parents()
128 parents = ctx.parents()
129 if len(parents) != 1:
129 if len(parents) != 1:
130 break
130 break
131 result.append(ctx)
131 result.append(ctx)
132 ctx = parents[0]
132 ctx = parents[0]
133 result.reverse()
133 result.reverse()
134 return result
134 return result
135
135
136 def getfilestack(stack, path, seenfctxs=None):
136 def getfilestack(stack, path, seenfctxs=None):
137 """([ctx], str, set) -> [fctx], {ctx: fctx}
137 """([ctx], str, set) -> [fctx], {ctx: fctx}
138
138
139 stack is a list of contexts, from old to new. usually they are what
139 stack is a list of contexts, from old to new. usually they are what
140 "getdraftstack" returns.
140 "getdraftstack" returns.
141
141
142 follows renames, but not copies.
142 follows renames, but not copies.
143
143
144 seenfctxs is a set of filecontexts that will be considered "immutable".
144 seenfctxs is a set of filecontexts that will be considered "immutable".
145 they are usually what this function returned in earlier calls, useful
145 they are usually what this function returned in earlier calls, useful
146 to avoid issues that a file was "moved" to multiple places and was then
146 to avoid issues that a file was "moved" to multiple places and was then
147 modified differently, like: "a" was copied to "b", "a" was also copied to
147 modified differently, like: "a" was copied to "b", "a" was also copied to
148 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
148 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
149 and we enforce only one of them to be able to affect "a"'s content.
149 and we enforce only one of them to be able to affect "a"'s content.
150
150
151 return an empty list and an empty dict, if the specified path does not
151 return an empty list and an empty dict, if the specified path does not
152 exist in stack[-1] (the top of the stack).
152 exist in stack[-1] (the top of the stack).
153
153
154 otherwise, return a list of de-duplicated filecontexts, and the map to
154 otherwise, return a list of de-duplicated filecontexts, and the map to
155 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
155 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
156 of the list would be outside the stack and should be considered immutable.
156 of the list would be outside the stack and should be considered immutable.
157 the remaining items are within the stack.
157 the remaining items are within the stack.
158
158
159 for example, given the following changelog and corresponding filelog
159 for example, given the following changelog and corresponding filelog
160 revisions:
160 revisions:
161
161
162 changelog: 3----4----5----6----7
162 changelog: 3----4----5----6----7
163 filelog: x 0----1----1----2 (x: no such file yet)
163 filelog: x 0----1----1----2 (x: no such file yet)
164
164
165 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
165 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
166 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
166 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
167 dummy empty filecontext.
167 dummy empty filecontext.
168 - if stack = [2], returns ([], {})
168 - if stack = [2], returns ([], {})
169 - if stack = [7], returns ([1, 2], {7: 2})
169 - if stack = [7], returns ([1, 2], {7: 2})
170 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
170 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
171 removed, since 1 is immutable.
171 removed, since 1 is immutable.
172 """
172 """
173 if seenfctxs is None:
173 if seenfctxs is None:
174 seenfctxs = set()
174 seenfctxs = set()
175 assert stack
175 assert stack
176
176
177 if path not in stack[-1]:
177 if path not in stack[-1]:
178 return [], {}
178 return [], {}
179
179
180 fctxs = []
180 fctxs = []
181 fctxmap = {}
181 fctxmap = {}
182
182
183 pctx = stack[0].p1() # the public (immutable) ctx we stop at
183 pctx = stack[0].p1() # the public (immutable) ctx we stop at
184 for ctx in reversed(stack):
184 for ctx in reversed(stack):
185 if path not in ctx: # the file is added in the next commit
185 if path not in ctx: # the file is added in the next commit
186 pctx = ctx
186 pctx = ctx
187 break
187 break
188 fctx = ctx[path]
188 fctx = ctx[path]
189 fctxs.append(fctx)
189 fctxs.append(fctx)
190 if fctx in seenfctxs: # treat fctx as the immutable one
190 if fctx in seenfctxs: # treat fctx as the immutable one
191 pctx = None # do not add another immutable fctx
191 pctx = None # do not add another immutable fctx
192 break
192 break
193 fctxmap[ctx] = fctx # only for mutable fctxs
193 fctxmap[ctx] = fctx # only for mutable fctxs
194 renamed = fctx.renamed()
194 renamed = fctx.renamed()
195 if renamed:
195 if renamed:
196 path = renamed[0] # follow rename
196 path = renamed[0] # follow rename
197 if path in ctx: # but do not follow copy
197 if path in ctx: # but do not follow copy
198 pctx = ctx.p1()
198 pctx = ctx.p1()
199 break
199 break
200
200
201 if pctx is not None: # need an extra immutable fctx
201 if pctx is not None: # need an extra immutable fctx
202 if path in pctx:
202 if path in pctx:
203 fctxs.append(pctx[path])
203 fctxs.append(pctx[path])
204 else:
204 else:
205 fctxs.append(emptyfilecontext())
205 fctxs.append(emptyfilecontext())
206
206
207 fctxs.reverse()
207 fctxs.reverse()
208 # note: we rely on a property of hg: filerev is not reused for linear
208 # note: we rely on a property of hg: filerev is not reused for linear
209 # history. i.e. it's impossible to have:
209 # history. i.e. it's impossible to have:
210 # changelog: 4----5----6 (linear, no merges)
210 # changelog: 4----5----6 (linear, no merges)
211 # filelog: 1----2----1
211 # filelog: 1----2----1
212 # ^ reuse filerev (impossible)
212 # ^ reuse filerev (impossible)
213 # because parents are part of the hash. if that's not true, we need to
213 # because parents are part of the hash. if that's not true, we need to
214 # remove uniq and find a different way to identify fctxs.
214 # remove uniq and find a different way to identify fctxs.
215 return uniq(fctxs), fctxmap
215 return uniq(fctxs), fctxmap
216
216
217 class overlaystore(patch.filestore):
217 class overlaystore(patch.filestore):
218 """read-only, hybrid store based on a dict and ctx.
218 """read-only, hybrid store based on a dict and ctx.
219 memworkingcopy: {path: content}, overrides file contents.
219 memworkingcopy: {path: content}, overrides file contents.
220 """
220 """
221 def __init__(self, basectx, memworkingcopy):
221 def __init__(self, basectx, memworkingcopy):
222 self.basectx = basectx
222 self.basectx = basectx
223 self.memworkingcopy = memworkingcopy
223 self.memworkingcopy = memworkingcopy
224
224
225 def getfile(self, path):
225 def getfile(self, path):
226 """comply with mercurial.patch.filestore.getfile"""
226 """comply with mercurial.patch.filestore.getfile"""
227 if path not in self.basectx:
227 if path not in self.basectx:
228 return None, None, None
228 return None, None, None
229 fctx = self.basectx[path]
229 fctx = self.basectx[path]
230 if path in self.memworkingcopy:
230 if path in self.memworkingcopy:
231 content = self.memworkingcopy[path]
231 content = self.memworkingcopy[path]
232 else:
232 else:
233 content = fctx.data()
233 content = fctx.data()
234 mode = (fctx.islink(), fctx.isexec())
234 mode = (fctx.islink(), fctx.isexec())
235 renamed = fctx.renamed() # False or (path, node)
235 renamed = fctx.renamed() # False or (path, node)
236 return content, mode, (renamed and renamed[0])
236 return content, mode, (renamed and renamed[0])
237
237
238 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
238 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
239 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
239 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
240 memworkingcopy overrides file contents.
240 memworkingcopy overrides file contents.
241 """
241 """
242 # parents must contain 2 items: (node1, node2)
242 # parents must contain 2 items: (node1, node2)
243 if parents is None:
243 if parents is None:
244 parents = ctx.repo().changelog.parents(ctx.node())
244 parents = ctx.repo().changelog.parents(ctx.node())
245 if extra is None:
245 if extra is None:
246 extra = ctx.extra()
246 extra = ctx.extra()
247 date = ctx.date()
247 date = ctx.date()
248 desc = ctx.description()
248 desc = ctx.description()
249 user = ctx.user()
249 user = ctx.user()
250 files = set(ctx.files()).union(memworkingcopy)
250 files = set(ctx.files()).union(memworkingcopy)
251 store = overlaystore(ctx, memworkingcopy)
251 store = overlaystore(ctx, memworkingcopy)
252 return context.memctx(
252 return context.memctx(
253 repo=ctx.repo(), parents=parents, text=desc,
253 repo=ctx.repo(), parents=parents, text=desc,
254 files=files, filectxfn=store, user=user, date=date,
254 files=files, filectxfn=store, user=user, date=date,
255 branch=None, extra=extra)
255 branch=None, extra=extra)
256
256
257 class filefixupstate(object):
257 class filefixupstate(object):
258 """state needed to apply fixups to a single file
258 """state needed to apply fixups to a single file
259
259
260 internally, it keeps file contents of several revisions and a linelog.
260 internally, it keeps file contents of several revisions and a linelog.
261
261
262 the linelog uses odd revision numbers for original contents (fctxs passed
262 the linelog uses odd revision numbers for original contents (fctxs passed
263 to __init__), and even revision numbers for fixups, like:
263 to __init__), and even revision numbers for fixups, like:
264
264
265 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
265 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
266 linelog rev 2: fixups made to self.fctxs[0]
266 linelog rev 2: fixups made to self.fctxs[0]
267 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
267 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
268 linelog rev 4: fixups made to self.fctxs[1]
268 linelog rev 4: fixups made to self.fctxs[1]
269 ...
269 ...
270
270
271 a typical use is like:
271 a typical use is like:
272
272
273 1. call diffwith, to calculate self.fixups
273 1. call diffwith, to calculate self.fixups
274 2. (optionally), present self.fixups to the user, or change it
274 2. (optionally), present self.fixups to the user, or change it
275 3. call apply, to apply changes
275 3. call apply, to apply changes
276 4. read results from "finalcontents", or call getfinalcontent
276 4. read results from "finalcontents", or call getfinalcontent
277 """
277 """
278
278
279 def __init__(self, fctxs, path, ui=None, opts=None):
279 def __init__(self, fctxs, path, ui=None, opts=None):
280 """([fctx], ui or None) -> None
280 """([fctx], ui or None) -> None
281
281
282 fctxs should be linear, and sorted by topo order - oldest first.
282 fctxs should be linear, and sorted by topo order - oldest first.
283 fctxs[0] will be considered as "immutable" and will not be changed.
283 fctxs[0] will be considered as "immutable" and will not be changed.
284 """
284 """
285 self.fctxs = fctxs
285 self.fctxs = fctxs
286 self.path = path
286 self.path = path
287 self.ui = ui or nullui()
287 self.ui = ui or nullui()
288 self.opts = opts or {}
288 self.opts = opts or {}
289
289
290 # following fields are built from fctxs. they exist for perf reason
290 # following fields are built from fctxs. they exist for perf reason
291 self.contents = [f.data() for f in fctxs]
291 self.contents = [f.data() for f in fctxs]
292 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
292 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
293 self.linelog = self._buildlinelog()
293 self.linelog = self._buildlinelog()
294 if self.ui.debugflag:
294 if self.ui.debugflag:
295 assert self._checkoutlinelog() == self.contents
295 assert self._checkoutlinelog() == self.contents
296
296
297 # following fields will be filled later
297 # following fields will be filled later
298 self.chunkstats = [0, 0] # [adopted, total : int]
298 self.chunkstats = [0, 0] # [adopted, total : int]
299 self.targetlines = [] # [str]
299 self.targetlines = [] # [str]
300 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
300 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
301 self.finalcontents = [] # [str]
301 self.finalcontents = [] # [str]
302 self.ctxaffected = set()
302 self.ctxaffected = set()
303
303
304 def diffwith(self, targetfctx, fm=None):
304 def diffwith(self, targetfctx, fm=None):
305 """calculate fixups needed by examining the differences between
305 """calculate fixups needed by examining the differences between
306 self.fctxs[-1] and targetfctx, chunk by chunk.
306 self.fctxs[-1] and targetfctx, chunk by chunk.
307
307
308 targetfctx is the target state we move towards. we may or may not be
308 targetfctx is the target state we move towards. we may or may not be
309 able to get there because not all modified chunks can be amended into
309 able to get there because not all modified chunks can be amended into
310 a non-public fctx unambiguously.
310 a non-public fctx unambiguously.
311
311
312 call this only once, before apply().
312 call this only once, before apply().
313
313
314 update self.fixups, self.chunkstats, and self.targetlines.
314 update self.fixups, self.chunkstats, and self.targetlines.
315 """
315 """
316 a = self.contents[-1]
316 a = self.contents[-1]
317 alines = self.contentlines[-1]
317 alines = self.contentlines[-1]
318 b = targetfctx.data()
318 b = targetfctx.data()
319 blines = mdiff.splitnewlines(b)
319 blines = mdiff.splitnewlines(b)
320 self.targetlines = blines
320 self.targetlines = blines
321
321
322 self.linelog.annotate(self.linelog.maxrev)
322 self.linelog.annotate(self.linelog.maxrev)
323 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
323 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
324 assert len(annotated) == len(alines)
324 assert len(annotated) == len(alines)
325 # add a dummy end line to make insertion at the end easier
325 # add a dummy end line to make insertion at the end easier
326 if annotated:
326 if annotated:
327 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
327 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
328 annotated.append(dummyendline)
328 annotated.append(dummyendline)
329
329
330 # analyse diff blocks
330 # analyse diff blocks
331 for chunk in self._alldiffchunks(a, b, alines, blines):
331 for chunk in self._alldiffchunks(a, b, alines, blines):
332 newfixups = self._analysediffchunk(chunk, annotated)
332 newfixups = self._analysediffchunk(chunk, annotated)
333 self.chunkstats[0] += bool(newfixups) # 1 or 0
333 self.chunkstats[0] += bool(newfixups) # 1 or 0
334 self.chunkstats[1] += 1
334 self.chunkstats[1] += 1
335 self.fixups += newfixups
335 self.fixups += newfixups
336 if fm is not None:
336 if fm is not None:
337 self._showchanges(fm, alines, blines, chunk, newfixups)
337 self._showchanges(fm, alines, blines, chunk, newfixups)
338
338
339 def apply(self):
339 def apply(self):
340 """apply self.fixups. update self.linelog, self.finalcontents.
340 """apply self.fixups. update self.linelog, self.finalcontents.
341
341
342 call this only once, before getfinalcontent(), after diffwith().
342 call this only once, before getfinalcontent(), after diffwith().
343 """
343 """
344 # the following is unnecessary, as it's done by "diffwith":
344 # the following is unnecessary, as it's done by "diffwith":
345 # self.linelog.annotate(self.linelog.maxrev)
345 # self.linelog.annotate(self.linelog.maxrev)
346 for rev, a1, a2, b1, b2 in reversed(self.fixups):
346 for rev, a1, a2, b1, b2 in reversed(self.fixups):
347 blines = self.targetlines[b1:b2]
347 blines = self.targetlines[b1:b2]
348 if self.ui.debugflag:
348 if self.ui.debugflag:
349 idx = (max(rev - 1, 0)) // 2
349 idx = (max(rev - 1, 0)) // 2
350 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
350 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
351 % (node.short(self.fctxs[idx].node()),
351 % (node.short(self.fctxs[idx].node()),
352 a1, a2, len(blines)))
352 a1, a2, len(blines)))
353 self.linelog.replacelines(rev, a1, a2, b1, b2)
353 self.linelog.replacelines(rev, a1, a2, b1, b2)
354 if self.opts.get('edit_lines', False):
354 if self.opts.get('edit_lines', False):
355 self.finalcontents = self._checkoutlinelogwithedits()
355 self.finalcontents = self._checkoutlinelogwithedits()
356 else:
356 else:
357 self.finalcontents = self._checkoutlinelog()
357 self.finalcontents = self._checkoutlinelog()
358
358
359 def getfinalcontent(self, fctx):
359 def getfinalcontent(self, fctx):
360 """(fctx) -> str. get modified file content for a given filecontext"""
360 """(fctx) -> str. get modified file content for a given filecontext"""
361 idx = self.fctxs.index(fctx)
361 idx = self.fctxs.index(fctx)
362 return self.finalcontents[idx]
362 return self.finalcontents[idx]
363
363
364 def _analysediffchunk(self, chunk, annotated):
364 def _analysediffchunk(self, chunk, annotated):
365 """analyse a different chunk and return new fixups found
365 """analyse a different chunk and return new fixups found
366
366
367 return [] if no lines from the chunk can be safely applied.
367 return [] if no lines from the chunk can be safely applied.
368
368
369 the chunk (or lines) cannot be safely applied, if, for example:
369 the chunk (or lines) cannot be safely applied, if, for example:
370 - the modified (deleted) lines belong to a public changeset
370 - the modified (deleted) lines belong to a public changeset
371 (self.fctxs[0])
371 (self.fctxs[0])
372 - the chunk is a pure insertion and the adjacent lines (at most 2
372 - the chunk is a pure insertion and the adjacent lines (at most 2
373 lines) belong to different non-public changesets, or do not belong
373 lines) belong to different non-public changesets, or do not belong
374 to any non-public changesets.
374 to any non-public changesets.
375 - the chunk is modifying lines from different changesets.
375 - the chunk is modifying lines from different changesets.
376 in this case, if the number of lines deleted equals to the number
376 in this case, if the number of lines deleted equals to the number
377 of lines added, assume it's a simple 1:1 map (could be wrong).
377 of lines added, assume it's a simple 1:1 map (could be wrong).
378 otherwise, give up.
378 otherwise, give up.
379 - the chunk is modifying lines from a single non-public changeset,
379 - the chunk is modifying lines from a single non-public changeset,
380 but other revisions touch the area as well. i.e. the lines are
380 but other revisions touch the area as well. i.e. the lines are
381 not continuous as seen from the linelog.
381 not continuous as seen from the linelog.
382 """
382 """
383 a1, a2, b1, b2 = chunk
383 a1, a2, b1, b2 = chunk
384 # find involved indexes from annotate result
384 # find involved indexes from annotate result
385 involved = annotated[a1:a2]
385 involved = annotated[a1:a2]
386 if not involved and annotated: # a1 == a2 and a is not empty
386 if not involved and annotated: # a1 == a2 and a is not empty
387 # pure insertion, check nearby lines. ignore lines belong
387 # pure insertion, check nearby lines. ignore lines belong
388 # to the public (first) changeset (i.e. annotated[i][0] == 1)
388 # to the public (first) changeset (i.e. annotated[i][0] == 1)
389 nearbylinenums = {a2, max(0, a1 - 1)}
389 nearbylinenums = {a2, max(0, a1 - 1)}
390 involved = [annotated[i]
390 involved = [annotated[i]
391 for i in nearbylinenums if annotated[i][0] != 1]
391 for i in nearbylinenums if annotated[i][0] != 1]
392 involvedrevs = list(set(r for r, l in involved))
392 involvedrevs = list(set(r for r, l in involved))
393 newfixups = []
393 newfixups = []
394 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
394 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
395 # chunk belongs to a single revision
395 # chunk belongs to a single revision
396 rev = involvedrevs[0]
396 rev = involvedrevs[0]
397 if rev > 1:
397 if rev > 1:
398 fixuprev = rev + 1
398 fixuprev = rev + 1
399 newfixups.append((fixuprev, a1, a2, b1, b2))
399 newfixups.append((fixuprev, a1, a2, b1, b2))
400 elif a2 - a1 == b2 - b1 or b1 == b2:
400 elif a2 - a1 == b2 - b1 or b1 == b2:
401 # 1:1 line mapping, or chunk was deleted
401 # 1:1 line mapping, or chunk was deleted
402 for i in pycompat.xrange(a1, a2):
402 for i in pycompat.xrange(a1, a2):
403 rev, linenum = annotated[i]
403 rev, linenum = annotated[i]
404 if rev > 1:
404 if rev > 1:
405 if b1 == b2: # deletion, simply remove that single line
405 if b1 == b2: # deletion, simply remove that single line
406 nb1 = nb2 = 0
406 nb1 = nb2 = 0
407 else: # 1:1 line mapping, change the corresponding rev
407 else: # 1:1 line mapping, change the corresponding rev
408 nb1 = b1 + i - a1
408 nb1 = b1 + i - a1
409 nb2 = nb1 + 1
409 nb2 = nb1 + 1
410 fixuprev = rev + 1
410 fixuprev = rev + 1
411 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
411 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
412 return self._optimizefixups(newfixups)
412 return self._optimizefixups(newfixups)
413
413
414 @staticmethod
414 @staticmethod
415 def _alldiffchunks(a, b, alines, blines):
415 def _alldiffchunks(a, b, alines, blines):
416 """like mdiff.allblocks, but only care about differences"""
416 """like mdiff.allblocks, but only care about differences"""
417 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
417 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
418 for chunk, btype in blocks:
418 for chunk, btype in blocks:
419 if btype != '!':
419 if btype != '!':
420 continue
420 continue
421 yield chunk
421 yield chunk
422
422
423 def _buildlinelog(self):
423 def _buildlinelog(self):
424 """calculate the initial linelog based on self.content{,line}s.
424 """calculate the initial linelog based on self.content{,line}s.
425 this is similar to running a partial "annotate".
425 this is similar to running a partial "annotate".
426 """
426 """
427 llog = linelog.linelog()
427 llog = linelog.linelog()
428 a, alines = '', []
428 a, alines = '', []
429 for i in pycompat.xrange(len(self.contents)):
429 for i in pycompat.xrange(len(self.contents)):
430 b, blines = self.contents[i], self.contentlines[i]
430 b, blines = self.contents[i], self.contentlines[i]
431 llrev = i * 2 + 1
431 llrev = i * 2 + 1
432 chunks = self._alldiffchunks(a, b, alines, blines)
432 chunks = self._alldiffchunks(a, b, alines, blines)
433 for a1, a2, b1, b2 in reversed(list(chunks)):
433 for a1, a2, b1, b2 in reversed(list(chunks)):
434 llog.replacelines(llrev, a1, a2, b1, b2)
434 llog.replacelines(llrev, a1, a2, b1, b2)
435 a, alines = b, blines
435 a, alines = b, blines
436 return llog
436 return llog
437
437
438 def _checkoutlinelog(self):
438 def _checkoutlinelog(self):
439 """() -> [str]. check out file contents from linelog"""
439 """() -> [str]. check out file contents from linelog"""
440 contents = []
440 contents = []
441 for i in pycompat.xrange(len(self.contents)):
441 for i in pycompat.xrange(len(self.contents)):
442 rev = (i + 1) * 2
442 rev = (i + 1) * 2
443 self.linelog.annotate(rev)
443 self.linelog.annotate(rev)
444 content = ''.join(map(self._getline, self.linelog.annotateresult))
444 content = ''.join(map(self._getline, self.linelog.annotateresult))
445 contents.append(content)
445 contents.append(content)
446 return contents
446 return contents
447
447
448 def _checkoutlinelogwithedits(self):
448 def _checkoutlinelogwithedits(self):
449 """() -> [str]. prompt all lines for edit"""
449 """() -> [str]. prompt all lines for edit"""
450 alllines = self.linelog.getalllines()
450 alllines = self.linelog.getalllines()
451 # header
451 # header
452 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
452 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
453 'exists in the changeset to the top\nHG:\n')
453 'exists in the changeset to the top\nHG:\n')
454 % self.fctxs[-1].path())
454 % self.fctxs[-1].path())
455 # [(idx, fctx)]. hide the dummy emptyfilecontext
455 # [(idx, fctx)]. hide the dummy emptyfilecontext
456 visiblefctxs = [(i, f)
456 visiblefctxs = [(i, f)
457 for i, f in enumerate(self.fctxs)
457 for i, f in enumerate(self.fctxs)
458 if not isinstance(f, emptyfilecontext)]
458 if not isinstance(f, emptyfilecontext)]
459 for i, (j, f) in enumerate(visiblefctxs):
459 for i, (j, f) in enumerate(visiblefctxs):
460 editortext += (_('HG: %s/%s %s %s\n') %
460 editortext += (_('HG: %s/%s %s %s\n') %
461 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
461 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
462 node.short(f.node()),
462 node.short(f.node()),
463 f.description().split('\n',1)[0]))
463 f.description().split('\n',1)[0]))
464 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
464 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
465 # figure out the lifetime of a line, this is relatively inefficient,
465 # figure out the lifetime of a line, this is relatively inefficient,
466 # but probably fine
466 # but probably fine
467 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
467 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
468 for i, f in visiblefctxs:
468 for i, f in visiblefctxs:
469 self.linelog.annotate((i + 1) * 2)
469 self.linelog.annotate((i + 1) * 2)
470 for l in self.linelog.annotateresult:
470 for l in self.linelog.annotateresult:
471 lineset[l].add(i)
471 lineset[l].add(i)
472 # append lines
472 # append lines
473 for l in alllines:
473 for l in alllines:
474 editortext += (' %s : %s' %
474 editortext += (' %s : %s' %
475 (''.join([('y' if i in lineset[l] else ' ')
475 (''.join([('y' if i in lineset[l] else ' ')
476 for i, _f in visiblefctxs]),
476 for i, _f in visiblefctxs]),
477 self._getline(l)))
477 self._getline(l)))
478 # run editor
478 # run editor
479 editedtext = self.ui.edit(editortext, '', action='absorb')
479 editedtext = self.ui.edit(editortext, '', action='absorb')
480 if not editedtext:
480 if not editedtext:
481 raise error.Abort(_('empty editor text'))
481 raise error.Abort(_('empty editor text'))
482 # parse edited result
482 # parse edited result
483 contents = ['' for i in self.fctxs]
483 contents = ['' for i in self.fctxs]
484 leftpadpos = 4
484 leftpadpos = 4
485 colonpos = leftpadpos + len(visiblefctxs) + 1
485 colonpos = leftpadpos + len(visiblefctxs) + 1
486 for l in mdiff.splitnewlines(editedtext):
486 for l in mdiff.splitnewlines(editedtext):
487 if l.startswith('HG:'):
487 if l.startswith('HG:'):
488 continue
488 continue
489 if l[colonpos - 1:colonpos + 2] != ' : ':
489 if l[colonpos - 1:colonpos + 2] != ' : ':
490 raise error.Abort(_('malformed line: %s') % l)
490 raise error.Abort(_('malformed line: %s') % l)
491 linecontent = l[colonpos + 2:]
491 linecontent = l[colonpos + 2:]
492 for i, ch in enumerate(l[leftpadpos:colonpos - 1]):
492 for i, ch in enumerate(l[leftpadpos:colonpos - 1]):
493 if ch == 'y':
493 if ch == 'y':
494 contents[visiblefctxs[i][0]] += linecontent
494 contents[visiblefctxs[i][0]] += linecontent
495 # chunkstats is hard to calculate if anything changes, therefore
495 # chunkstats is hard to calculate if anything changes, therefore
496 # set them to just a simple value (1, 1).
496 # set them to just a simple value (1, 1).
497 if editedtext != editortext:
497 if editedtext != editortext:
498 self.chunkstats = [1, 1]
498 self.chunkstats = [1, 1]
499 return contents
499 return contents
500
500
501 def _getline(self, lineinfo):
501 def _getline(self, lineinfo):
502 """((rev, linenum)) -> str. convert rev+line number to line content"""
502 """((rev, linenum)) -> str. convert rev+line number to line content"""
503 rev, linenum = lineinfo
503 rev, linenum = lineinfo
504 if rev & 1: # odd: original line taken from fctxs
504 if rev & 1: # odd: original line taken from fctxs
505 return self.contentlines[rev // 2][linenum]
505 return self.contentlines[rev // 2][linenum]
506 else: # even: fixup line from targetfctx
506 else: # even: fixup line from targetfctx
507 return self.targetlines[linenum]
507 return self.targetlines[linenum]
508
508
509 def _iscontinuous(self, a1, a2, closedinterval=False):
509 def _iscontinuous(self, a1, a2, closedinterval=False):
510 """(a1, a2 : int) -> bool
510 """(a1, a2 : int) -> bool
511
511
512 check if these lines are continuous. i.e. no other insertions or
512 check if these lines are continuous. i.e. no other insertions or
513 deletions (from other revisions) among these lines.
513 deletions (from other revisions) among these lines.
514
514
515 closedinterval decides whether a2 should be included or not. i.e. is
515 closedinterval decides whether a2 should be included or not. i.e. is
516 it [a1, a2), or [a1, a2] ?
516 it [a1, a2), or [a1, a2] ?
517 """
517 """
518 if a1 >= a2:
518 if a1 >= a2:
519 return True
519 return True
520 llog = self.linelog
520 llog = self.linelog
521 offset1 = llog.getoffset(a1)
521 offset1 = llog.getoffset(a1)
522 offset2 = llog.getoffset(a2) + int(closedinterval)
522 offset2 = llog.getoffset(a2) + int(closedinterval)
523 linesinbetween = llog.getalllines(offset1, offset2)
523 linesinbetween = llog.getalllines(offset1, offset2)
524 return len(linesinbetween) == a2 - a1 + int(closedinterval)
524 return len(linesinbetween) == a2 - a1 + int(closedinterval)
525
525
526 def _optimizefixups(self, fixups):
526 def _optimizefixups(self, fixups):
527 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
527 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
528 merge adjacent fixups to make them less fragmented.
528 merge adjacent fixups to make them less fragmented.
529 """
529 """
530 result = []
530 result = []
531 pcurrentchunk = [[-1, -1, -1, -1, -1]]
531 pcurrentchunk = [[-1, -1, -1, -1, -1]]
532
532
533 def pushchunk():
533 def pushchunk():
534 if pcurrentchunk[0][0] != -1:
534 if pcurrentchunk[0][0] != -1:
535 result.append(tuple(pcurrentchunk[0]))
535 result.append(tuple(pcurrentchunk[0]))
536
536
537 for i, chunk in enumerate(fixups):
537 for i, chunk in enumerate(fixups):
538 rev, a1, a2, b1, b2 = chunk
538 rev, a1, a2, b1, b2 = chunk
539 lastrev = pcurrentchunk[0][0]
539 lastrev = pcurrentchunk[0][0]
540 lasta2 = pcurrentchunk[0][2]
540 lasta2 = pcurrentchunk[0][2]
541 lastb2 = pcurrentchunk[0][4]
541 lastb2 = pcurrentchunk[0][4]
542 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
542 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
543 self._iscontinuous(max(a1 - 1, 0), a1)):
543 self._iscontinuous(max(a1 - 1, 0), a1)):
544 # merge into currentchunk
544 # merge into currentchunk
545 pcurrentchunk[0][2] = a2
545 pcurrentchunk[0][2] = a2
546 pcurrentchunk[0][4] = b2
546 pcurrentchunk[0][4] = b2
547 else:
547 else:
548 pushchunk()
548 pushchunk()
549 pcurrentchunk[0] = list(chunk)
549 pcurrentchunk[0] = list(chunk)
550 pushchunk()
550 pushchunk()
551 return result
551 return result
552
552
553 def _showchanges(self, fm, alines, blines, chunk, fixups):
553 def _showchanges(self, fm, alines, blines, chunk, fixups):
554
554
555 def trim(line):
555 def trim(line):
556 if line.endswith('\n'):
556 if line.endswith('\n'):
557 line = line[:-1]
557 line = line[:-1]
558 return line
558 return line
559
559
560 # this is not optimized for perf but _showchanges only gets executed
560 # this is not optimized for perf but _showchanges only gets executed
561 # with an extra command-line flag.
561 # with an extra command-line flag.
562 a1, a2, b1, b2 = chunk
562 a1, a2, b1, b2 = chunk
563 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
563 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
564 for idx, fa1, fa2, fb1, fb2 in fixups:
564 for idx, fa1, fa2, fb1, fb2 in fixups:
565 for i in pycompat.xrange(fa1, fa2):
565 for i in pycompat.xrange(fa1, fa2):
566 aidxs[i - a1] = (max(idx, 1) - 1) // 2
566 aidxs[i - a1] = (max(idx, 1) - 1) // 2
567 for i in pycompat.xrange(fb1, fb2):
567 for i in pycompat.xrange(fb1, fb2):
568 bidxs[i - b1] = (max(idx, 1) - 1) // 2
568 bidxs[i - b1] = (max(idx, 1) - 1) // 2
569
569
570 fm.startitem()
570 fm.startitem()
571 fm.write('hunk', ' %s\n',
571 fm.write('hunk', ' %s\n',
572 '@@ -%d,%d +%d,%d @@'
572 '@@ -%d,%d +%d,%d @@'
573 % (a1, a2 - a1, b1, b2 - b1), label='diff.hunk')
573 % (a1, a2 - a1, b1, b2 - b1), label='diff.hunk')
574 fm.data(path=self.path, linetype='hunk')
574 fm.data(path=self.path, linetype='hunk')
575
575
576 def writeline(idx, diffchar, line, linetype, linelabel):
576 def writeline(idx, diffchar, line, linetype, linelabel):
577 fm.startitem()
577 fm.startitem()
578 node = ''
578 node = ''
579 if idx:
579 if idx:
580 ctx = self.fctxs[idx]
580 ctx = self.fctxs[idx]
581 fm.context(fctx=ctx)
581 fm.context(fctx=ctx)
582 node = ctx.hex()
582 node = ctx.hex()
583 self.ctxaffected.add(ctx.changectx())
583 self.ctxaffected.add(ctx.changectx())
584 fm.write('node', '%-7.7s ', node, label='absorb.node')
584 fm.write('node', '%-7.7s ', node, label='absorb.node')
585 fm.write('diffchar ' + linetype, '%s%s\n', diffchar, line,
585 fm.write('diffchar ' + linetype, '%s%s\n', diffchar, line,
586 label=linelabel)
586 label=linelabel)
587 fm.data(path=self.path, linetype=linetype)
587 fm.data(path=self.path, linetype=linetype)
588
588
589 for i in pycompat.xrange(a1, a2):
589 for i in pycompat.xrange(a1, a2):
590 writeline(aidxs[i - a1], '-', trim(alines[i]), 'deleted',
590 writeline(aidxs[i - a1], '-', trim(alines[i]), 'deleted',
591 'diff.deleted')
591 'diff.deleted')
592 for i in pycompat.xrange(b1, b2):
592 for i in pycompat.xrange(b1, b2):
593 writeline(bidxs[i - b1], '+', trim(blines[i]), 'inserted',
593 writeline(bidxs[i - b1], '+', trim(blines[i]), 'inserted',
594 'diff.inserted')
594 'diff.inserted')
595
595
596 class fixupstate(object):
596 class fixupstate(object):
597 """state needed to run absorb
597 """state needed to run absorb
598
598
599 internally, it keeps paths and filefixupstates.
599 internally, it keeps paths and filefixupstates.
600
600
601 a typical use is like filefixupstates:
601 a typical use is like filefixupstates:
602
602
603 1. call diffwith, to calculate fixups
603 1. call diffwith, to calculate fixups
604 2. (optionally), present fixups to the user, or edit fixups
604 2. (optionally), present fixups to the user, or edit fixups
605 3. call apply, to apply changes to memory
605 3. call apply, to apply changes to memory
606 4. call commit, to commit changes to hg database
606 4. call commit, to commit changes to hg database
607 """
607 """
608
608
609 def __init__(self, stack, ui=None, opts=None):
609 def __init__(self, stack, ui=None, opts=None):
610 """([ctx], ui or None) -> None
610 """([ctx], ui or None) -> None
611
611
612 stack: should be linear, and sorted by topo order - oldest first.
612 stack: should be linear, and sorted by topo order - oldest first.
613 all commits in stack are considered mutable.
613 all commits in stack are considered mutable.
614 """
614 """
615 assert stack
615 assert stack
616 self.ui = ui or nullui()
616 self.ui = ui or nullui()
617 self.opts = opts or {}
617 self.opts = opts or {}
618 self.stack = stack
618 self.stack = stack
619 self.repo = stack[-1].repo().unfiltered()
619 self.repo = stack[-1].repo().unfiltered()
620
620
621 # following fields will be filled later
621 # following fields will be filled later
622 self.paths = [] # [str]
622 self.paths = [] # [str]
623 self.status = None # ctx.status output
623 self.status = None # ctx.status output
624 self.fctxmap = {} # {path: {ctx: fctx}}
624 self.fctxmap = {} # {path: {ctx: fctx}}
625 self.fixupmap = {} # {path: filefixupstate}
625 self.fixupmap = {} # {path: filefixupstate}
626 self.replacemap = {} # {oldnode: newnode or None}
626 self.replacemap = {} # {oldnode: newnode or None}
627 self.finalnode = None # head after all fixups
627 self.finalnode = None # head after all fixups
628 self.ctxaffected = set() # ctx that will be absorbed into
628 self.ctxaffected = set() # ctx that will be absorbed into
629
629
630 def diffwith(self, targetctx, match=None, fm=None):
630 def diffwith(self, targetctx, match=None, fm=None):
631 """diff and prepare fixups. update self.fixupmap, self.paths"""
631 """diff and prepare fixups. update self.fixupmap, self.paths"""
632 # only care about modified files
632 # only care about modified files
633 self.status = self.stack[-1].status(targetctx, match)
633 self.status = self.stack[-1].status(targetctx, match)
634 self.paths = []
634 self.paths = []
635 # but if --edit-lines is used, the user may want to edit files
635 # but if --edit-lines is used, the user may want to edit files
636 # even if they are not modified
636 # even if they are not modified
637 editopt = self.opts.get('edit_lines')
637 editopt = self.opts.get('edit_lines')
638 if not self.status.modified and editopt and match:
638 if not self.status.modified and editopt and match:
639 interestingpaths = match.files()
639 interestingpaths = match.files()
640 else:
640 else:
641 interestingpaths = self.status.modified
641 interestingpaths = self.status.modified
642 # prepare the filefixupstate
642 # prepare the filefixupstate
643 seenfctxs = set()
643 seenfctxs = set()
644 # sorting is necessary to eliminate ambiguity for the "double move"
644 # sorting is necessary to eliminate ambiguity for the "double move"
645 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
645 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
646 for path in sorted(interestingpaths):
646 for path in sorted(interestingpaths):
647 self.ui.debug('calculating fixups for %s\n' % path)
647 self.ui.debug('calculating fixups for %s\n' % path)
648 targetfctx = targetctx[path]
648 targetfctx = targetctx[path]
649 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
649 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
650 # ignore symbolic links or binary, or unchanged files
650 # ignore symbolic links or binary, or unchanged files
651 if any(f.islink() or stringutil.binary(f.data())
651 if any(f.islink() or stringutil.binary(f.data())
652 for f in [targetfctx] + fctxs
652 for f in [targetfctx] + fctxs
653 if not isinstance(f, emptyfilecontext)):
653 if not isinstance(f, emptyfilecontext)):
654 continue
654 continue
655 if targetfctx.data() == fctxs[-1].data() and not editopt:
655 if targetfctx.data() == fctxs[-1].data() and not editopt:
656 continue
656 continue
657 seenfctxs.update(fctxs[1:])
657 seenfctxs.update(fctxs[1:])
658 self.fctxmap[path] = ctx2fctx
658 self.fctxmap[path] = ctx2fctx
659 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
659 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
660 if fm is not None:
660 if fm is not None:
661 fm.startitem()
661 fm.startitem()
662 fm.plain('showing changes for ')
662 fm.plain('showing changes for ')
663 fm.write('path', '%s\n', path, label='absorb.path')
663 fm.write('path', '%s\n', path, label='absorb.path')
664 fm.data(linetype='path')
664 fm.data(linetype='path')
665 fstate.diffwith(targetfctx, fm)
665 fstate.diffwith(targetfctx, fm)
666 self.fixupmap[path] = fstate
666 self.fixupmap[path] = fstate
667 self.paths.append(path)
667 self.paths.append(path)
668 self.ctxaffected.update(fstate.ctxaffected)
668 self.ctxaffected.update(fstate.ctxaffected)
669
669
670 def apply(self):
670 def apply(self):
671 """apply fixups to individual filefixupstates"""
671 """apply fixups to individual filefixupstates"""
672 for path, state in self.fixupmap.iteritems():
672 for path, state in self.fixupmap.iteritems():
673 if self.ui.debugflag:
673 if self.ui.debugflag:
674 self.ui.write(_('applying fixups to %s\n') % path)
674 self.ui.write(_('applying fixups to %s\n') % path)
675 state.apply()
675 state.apply()
676
676
677 @property
677 @property
678 def chunkstats(self):
678 def chunkstats(self):
679 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
679 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
680 return dict((path, state.chunkstats)
680 return dict((path, state.chunkstats)
681 for path, state in self.fixupmap.iteritems())
681 for path, state in self.fixupmap.iteritems())
682
682
683 def commit(self):
683 def commit(self):
684 """commit changes. update self.finalnode, self.replacemap"""
684 """commit changes. update self.finalnode, self.replacemap"""
685 with self.repo.wlock(), self.repo.lock():
685 with self.repo.wlock(), self.repo.lock():
686 with self.repo.transaction('absorb') as tr:
686 with self.repo.transaction('absorb') as tr:
687 self._commitstack()
687 self._commitstack()
688 self._movebookmarks(tr)
688 self._movebookmarks(tr)
689 if self.repo['.'].node() in self.replacemap:
689 if self.repo['.'].node() in self.replacemap:
690 self._moveworkingdirectoryparent()
690 self._moveworkingdirectoryparent()
691 if self._useobsolete:
691 if self._useobsolete:
692 self._obsoleteoldcommits()
692 self._obsoleteoldcommits()
693 if not self._useobsolete: # strip must be outside transactions
693 if not self._useobsolete: # strip must be outside transactions
694 self._stripoldcommits()
694 self._stripoldcommits()
695 return self.finalnode
695 return self.finalnode
696
696
697 def printchunkstats(self):
697 def printchunkstats(self):
698 """print things like '1 of 2 chunk(s) applied'"""
698 """print things like '1 of 2 chunk(s) applied'"""
699 ui = self.ui
699 ui = self.ui
700 chunkstats = self.chunkstats
700 chunkstats = self.chunkstats
701 if ui.verbose:
701 if ui.verbose:
702 # chunkstats for each file
702 # chunkstats for each file
703 for path, stat in chunkstats.iteritems():
703 for path, stat in chunkstats.iteritems():
704 if stat[0]:
704 if stat[0]:
705 ui.write(_('%s: %d of %d chunk(s) applied\n')
705 ui.write(_('%s: %d of %d chunk(s) applied\n')
706 % (path, stat[0], stat[1]))
706 % (path, stat[0], stat[1]))
707 elif not ui.quiet:
707 elif not ui.quiet:
708 # a summary for all files
708 # a summary for all files
709 stats = chunkstats.values()
709 stats = chunkstats.values()
710 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
710 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
711 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
711 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
712
712
713 def _commitstack(self):
713 def _commitstack(self):
714 """make new commits. update self.finalnode, self.replacemap.
714 """make new commits. update self.finalnode, self.replacemap.
715 it is splitted from "commit" to avoid too much indentation.
715 it is splitted from "commit" to avoid too much indentation.
716 """
716 """
717 # last node (20-char) committed by us
717 # last node (20-char) committed by us
718 lastcommitted = None
718 lastcommitted = None
719 # p1 which overrides the parent of the next commit, "None" means use
719 # p1 which overrides the parent of the next commit, "None" means use
720 # the original parent unchanged
720 # the original parent unchanged
721 nextp1 = None
721 nextp1 = None
722 for ctx in self.stack:
722 for ctx in self.stack:
723 memworkingcopy = self._getnewfilecontents(ctx)
723 memworkingcopy = self._getnewfilecontents(ctx)
724 if not memworkingcopy and not lastcommitted:
724 if not memworkingcopy and not lastcommitted:
725 # nothing changed, nothing commited
725 # nothing changed, nothing commited
726 nextp1 = ctx
726 nextp1 = ctx
727 continue
727 continue
728 msg = ''
728 msg = ''
729 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
729 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
730 # changeset is no longer necessary
730 # changeset is no longer necessary
731 self.replacemap[ctx.node()] = None
731 self.replacemap[ctx.node()] = None
732 msg = _('became empty and was dropped')
732 msg = _('became empty and was dropped')
733 else:
733 else:
734 # changeset needs re-commit
734 # changeset needs re-commit
735 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
735 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
736 lastcommitted = self.repo[nodestr]
736 lastcommitted = self.repo[nodestr]
737 nextp1 = lastcommitted
737 nextp1 = lastcommitted
738 self.replacemap[ctx.node()] = lastcommitted.node()
738 self.replacemap[ctx.node()] = lastcommitted.node()
739 if memworkingcopy:
739 if memworkingcopy:
740 msg = _('%d file(s) changed, became %s') % (
740 msg = _('%d file(s) changed, became %s') % (
741 len(memworkingcopy), self._ctx2str(lastcommitted))
741 len(memworkingcopy), self._ctx2str(lastcommitted))
742 else:
742 else:
743 msg = _('became %s') % self._ctx2str(lastcommitted)
743 msg = _('became %s') % self._ctx2str(lastcommitted)
744 if self.ui.verbose and msg:
744 if self.ui.verbose and msg:
745 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
745 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
746 self.finalnode = lastcommitted and lastcommitted.node()
746 self.finalnode = lastcommitted and lastcommitted.node()
747
747
748 def _ctx2str(self, ctx):
748 def _ctx2str(self, ctx):
749 if self.ui.debugflag:
749 if self.ui.debugflag:
750 return '%d:%s' % (ctx.rev(), ctx.hex())
750 return '%d:%s' % (ctx.rev(), ctx.hex())
751 else:
751 else:
752 return '%d:%s' % (ctx.rev(), node.short(ctx.node()))
752 return '%d:%s' % (ctx.rev(), node.short(ctx.node()))
753
753
754 def _getnewfilecontents(self, ctx):
754 def _getnewfilecontents(self, ctx):
755 """(ctx) -> {path: str}
755 """(ctx) -> {path: str}
756
756
757 fetch file contents from filefixupstates.
757 fetch file contents from filefixupstates.
758 return the working copy overrides - files different from ctx.
758 return the working copy overrides - files different from ctx.
759 """
759 """
760 result = {}
760 result = {}
761 for path in self.paths:
761 for path in self.paths:
762 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
762 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
763 if ctx not in ctx2fctx:
763 if ctx not in ctx2fctx:
764 continue
764 continue
765 fctx = ctx2fctx[ctx]
765 fctx = ctx2fctx[ctx]
766 content = fctx.data()
766 content = fctx.data()
767 newcontent = self.fixupmap[path].getfinalcontent(fctx)
767 newcontent = self.fixupmap[path].getfinalcontent(fctx)
768 if content != newcontent:
768 if content != newcontent:
769 result[fctx.path()] = newcontent
769 result[fctx.path()] = newcontent
770 return result
770 return result
771
771
772 def _movebookmarks(self, tr):
772 def _movebookmarks(self, tr):
773 repo = self.repo
773 repo = self.repo
774 needupdate = [(name, self.replacemap[hsh])
774 needupdate = [(name, self.replacemap[hsh])
775 for name, hsh in repo._bookmarks.iteritems()
775 for name, hsh in repo._bookmarks.iteritems()
776 if hsh in self.replacemap]
776 if hsh in self.replacemap]
777 changes = []
777 changes = []
778 for name, hsh in needupdate:
778 for name, hsh in needupdate:
779 if hsh:
779 if hsh:
780 changes.append((name, hsh))
780 changes.append((name, hsh))
781 if self.ui.verbose:
781 if self.ui.verbose:
782 self.ui.write(_('moving bookmark %s to %s\n')
782 self.ui.write(_('moving bookmark %s to %s\n')
783 % (name, node.hex(hsh)))
783 % (name, node.hex(hsh)))
784 else:
784 else:
785 changes.append((name, None))
785 changes.append((name, None))
786 if self.ui.verbose:
786 if self.ui.verbose:
787 self.ui.write(_('deleting bookmark %s\n') % name)
787 self.ui.write(_('deleting bookmark %s\n') % name)
788 repo._bookmarks.applychanges(repo, tr, changes)
788 repo._bookmarks.applychanges(repo, tr, changes)
789
789
790 def _moveworkingdirectoryparent(self):
790 def _moveworkingdirectoryparent(self):
791 if not self.finalnode:
791 if not self.finalnode:
792 # Find the latest not-{obsoleted,stripped} parent.
792 # Find the latest not-{obsoleted,stripped} parent.
793 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
793 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
794 ctx = self.repo[revs.first()]
794 ctx = self.repo[revs.first()]
795 self.finalnode = ctx.node()
795 self.finalnode = ctx.node()
796 else:
796 else:
797 ctx = self.repo[self.finalnode]
797 ctx = self.repo[self.finalnode]
798
798
799 dirstate = self.repo.dirstate
799 dirstate = self.repo.dirstate
800 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
800 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
801 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
801 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
802 noop = lambda: 0
802 noop = lambda: 0
803 restore = noop
803 restore = noop
804 if util.safehasattr(dirstate, '_fsmonitorstate'):
804 if util.safehasattr(dirstate, '_fsmonitorstate'):
805 bak = dirstate._fsmonitorstate.invalidate
805 bak = dirstate._fsmonitorstate.invalidate
806 def restore():
806 def restore():
807 dirstate._fsmonitorstate.invalidate = bak
807 dirstate._fsmonitorstate.invalidate = bak
808 dirstate._fsmonitorstate.invalidate = noop
808 dirstate._fsmonitorstate.invalidate = noop
809 try:
809 try:
810 with dirstate.parentchange():
810 with dirstate.parentchange():
811 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
811 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
812 finally:
812 finally:
813 restore()
813 restore()
814
814
815 @staticmethod
815 @staticmethod
816 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
816 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
817 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
817 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
818
818
819 if it will become an empty commit (does not change anything, after the
819 if it will become an empty commit (does not change anything, after the
820 memworkingcopy overrides), return True. otherwise return False.
820 memworkingcopy overrides), return True. otherwise return False.
821 """
821 """
822 if not pctx:
822 if not pctx:
823 parents = ctx.parents()
823 parents = ctx.parents()
824 if len(parents) != 1:
824 if len(parents) != 1:
825 return False
825 return False
826 pctx = parents[0]
826 pctx = parents[0]
827 # ctx changes more files (not a subset of memworkingcopy)
827 # ctx changes more files (not a subset of memworkingcopy)
828 if not set(ctx.files()).issubset(set(memworkingcopy)):
828 if not set(ctx.files()).issubset(set(memworkingcopy)):
829 return False
829 return False
830 for path, content in memworkingcopy.iteritems():
830 for path, content in memworkingcopy.iteritems():
831 if path not in pctx or path not in ctx:
831 if path not in pctx or path not in ctx:
832 return False
832 return False
833 fctx = ctx[path]
833 fctx = ctx[path]
834 pfctx = pctx[path]
834 pfctx = pctx[path]
835 if pfctx.flags() != fctx.flags():
835 if pfctx.flags() != fctx.flags():
836 return False
836 return False
837 if pfctx.data() != content:
837 if pfctx.data() != content:
838 return False
838 return False
839 return True
839 return True
840
840
841 def _commitsingle(self, memworkingcopy, ctx, p1=None):
841 def _commitsingle(self, memworkingcopy, ctx, p1=None):
842 """(ctx, {path: content}, node) -> node. make a single commit
842 """(ctx, {path: content}, node) -> node. make a single commit
843
843
844 the commit is a clone from ctx, with a (optionally) different p1, and
844 the commit is a clone from ctx, with a (optionally) different p1, and
845 different file contents replaced by memworkingcopy.
845 different file contents replaced by memworkingcopy.
846 """
846 """
847 parents = p1 and (p1, node.nullid)
847 parents = p1 and (p1, node.nullid)
848 extra = ctx.extra()
848 extra = ctx.extra()
849 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
849 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
850 extra['absorb_source'] = ctx.hex()
850 extra['absorb_source'] = ctx.hex()
851 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
851 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
852 # preserve phase
852 # preserve phase
853 with mctx.repo().ui.configoverride({
853 with mctx.repo().ui.configoverride({
854 ('phases', 'new-commit'): ctx.phase()}):
854 ('phases', 'new-commit'): ctx.phase()}):
855 return mctx.commit()
855 return mctx.commit()
856
856
857 @util.propertycache
857 @util.propertycache
858 def _useobsolete(self):
858 def _useobsolete(self):
859 """() -> bool"""
859 """() -> bool"""
860 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
860 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
861
861
862 def _obsoleteoldcommits(self):
862 def _obsoleteoldcommits(self):
863 relations = [(self.repo[k], v and (self.repo[v],) or ())
863 relations = [(self.repo[k], v and (self.repo[v],) or ())
864 for k, v in self.replacemap.iteritems()]
864 for k, v in self.replacemap.iteritems()]
865 if relations:
865 if relations:
866 obsolete.createmarkers(self.repo, relations)
866 obsolete.createmarkers(self.repo, relations)
867
867
868 def _stripoldcommits(self):
868 def _stripoldcommits(self):
869 nodelist = self.replacemap.keys()
869 nodelist = self.replacemap.keys()
870 # make sure we don't strip innocent children
870 # make sure we don't strip innocent children
871 revs = self.repo.revs('%ln - (::(heads(%ln::)-%ln))', nodelist,
871 revs = self.repo.revs('%ln - (::(heads(%ln::)-%ln))', nodelist,
872 nodelist, nodelist)
872 nodelist, nodelist)
873 tonode = self.repo.changelog.node
873 tonode = self.repo.changelog.node
874 nodelist = [tonode(r) for r in revs]
874 nodelist = [tonode(r) for r in revs]
875 if nodelist:
875 if nodelist:
876 repair.strip(self.repo.ui, self.repo, nodelist)
876 repair.strip(self.repo.ui, self.repo, nodelist)
877
877
878 def _parsechunk(hunk):
878 def _parsechunk(hunk):
879 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
879 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
880 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
880 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
881 return None, None
881 return None, None
882 path = hunk.header.filename()
882 path = hunk.header.filename()
883 a1 = hunk.fromline + len(hunk.before) - 1
883 a1 = hunk.fromline + len(hunk.before) - 1
884 # remove before and after context
884 # remove before and after context
885 hunk.before = hunk.after = []
885 hunk.before = hunk.after = []
886 buf = util.stringio()
886 buf = util.stringio()
887 hunk.write(buf)
887 hunk.write(buf)
888 patchlines = mdiff.splitnewlines(buf.getvalue())
888 patchlines = mdiff.splitnewlines(buf.getvalue())
889 # hunk.prettystr() will update hunk.removed
889 # hunk.prettystr() will update hunk.removed
890 a2 = a1 + hunk.removed
890 a2 = a1 + hunk.removed
891 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
891 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
892 return path, (a1, a2, blines)
892 return path, (a1, a2, blines)
893
893
894 def overlaydiffcontext(ctx, chunks):
894 def overlaydiffcontext(ctx, chunks):
895 """(ctx, [crecord.uihunk]) -> memctx
895 """(ctx, [crecord.uihunk]) -> memctx
896
896
897 return a memctx with some [1] patches (chunks) applied to ctx.
897 return a memctx with some [1] patches (chunks) applied to ctx.
898 [1]: modifications are handled. renames, mode changes, etc. are ignored.
898 [1]: modifications are handled. renames, mode changes, etc. are ignored.
899 """
899 """
900 # sadly the applying-patch logic is hardly reusable, and messy:
900 # sadly the applying-patch logic is hardly reusable, and messy:
901 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
901 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
902 # needs a file stream of a patch and will re-parse it, while we have
902 # needs a file stream of a patch and will re-parse it, while we have
903 # structured hunk objects at hand.
903 # structured hunk objects at hand.
904 # 2. a lot of different implementations about "chunk" (patch.hunk,
904 # 2. a lot of different implementations about "chunk" (patch.hunk,
905 # patch.recordhunk, crecord.uihunk)
905 # patch.recordhunk, crecord.uihunk)
906 # as we only care about applying changes to modified files, no mode
906 # as we only care about applying changes to modified files, no mode
907 # change, no binary diff, and no renames, it's probably okay to
907 # change, no binary diff, and no renames, it's probably okay to
908 # re-invent the logic using much simpler code here.
908 # re-invent the logic using much simpler code here.
909 memworkingcopy = {} # {path: content}
909 memworkingcopy = {} # {path: content}
910 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
910 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
911 for path, info in map(_parsechunk, chunks):
911 for path, info in map(_parsechunk, chunks):
912 if not path or not info:
912 if not path or not info:
913 continue
913 continue
914 patchmap[path].append(info)
914 patchmap[path].append(info)
915 for path, patches in patchmap.iteritems():
915 for path, patches in patchmap.iteritems():
916 if path not in ctx or not patches:
916 if path not in ctx or not patches:
917 continue
917 continue
918 patches.sort(reverse=True)
918 patches.sort(reverse=True)
919 lines = mdiff.splitnewlines(ctx[path].data())
919 lines = mdiff.splitnewlines(ctx[path].data())
920 for a1, a2, blines in patches:
920 for a1, a2, blines in patches:
921 lines[a1:a2] = blines
921 lines[a1:a2] = blines
922 memworkingcopy[path] = ''.join(lines)
922 memworkingcopy[path] = ''.join(lines)
923 return overlaycontext(memworkingcopy, ctx)
923 return overlaycontext(memworkingcopy, ctx)
924
924
925 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
925 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
926 """pick fixup chunks from targetctx, apply them to stack.
926 """pick fixup chunks from targetctx, apply them to stack.
927
927
928 if targetctx is None, the working copy context will be used.
928 if targetctx is None, the working copy context will be used.
929 if stack is None, the current draft stack will be used.
929 if stack is None, the current draft stack will be used.
930 return fixupstate.
930 return fixupstate.
931 """
931 """
932 if stack is None:
932 if stack is None:
933 limit = ui.configint('absorb', 'max-stack-size')
933 limit = ui.configint('absorb', 'max-stack-size')
934 stack = getdraftstack(repo['.'], limit)
934 stack = getdraftstack(repo['.'], limit)
935 if limit and len(stack) >= limit:
935 if limit and len(stack) >= limit:
936 ui.warn(_('absorb: only the recent %d changesets will '
936 ui.warn(_('absorb: only the recent %d changesets will '
937 'be analysed\n')
937 'be analysed\n')
938 % limit)
938 % limit)
939 if not stack:
939 if not stack:
940 raise error.Abort(_('no mutable changeset to change'))
940 raise error.Abort(_('no mutable changeset to change'))
941 if targetctx is None: # default to working copy
941 if targetctx is None: # default to working copy
942 targetctx = repo[None]
942 targetctx = repo[None]
943 if pats is None:
943 if pats is None:
944 pats = ()
944 pats = ()
945 if opts is None:
945 if opts is None:
946 opts = {}
946 opts = {}
947 state = fixupstate(stack, ui=ui, opts=opts)
947 state = fixupstate(stack, ui=ui, opts=opts)
948 matcher = scmutil.match(targetctx, pats, opts)
948 matcher = scmutil.match(targetctx, pats, opts)
949 if opts.get('interactive'):
949 if opts.get('interactive'):
950 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
950 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
951 origchunks = patch.parsepatch(diff)
951 origchunks = patch.parsepatch(diff)
952 chunks = cmdutil.recordfilter(ui, origchunks)[0]
952 chunks = cmdutil.recordfilter(ui, origchunks)[0]
953 targetctx = overlaydiffcontext(stack[-1], chunks)
953 targetctx = overlaydiffcontext(stack[-1], chunks)
954 fm = None
954 fm = None
955 if opts.get('print_changes') or not opts.get('apply_changes'):
955 if opts.get('print_changes') or not opts.get('apply_changes'):
956 fm = ui.formatter('absorb', opts)
956 fm = ui.formatter('absorb', opts)
957 state.diffwith(targetctx, matcher, fm)
957 state.diffwith(targetctx, matcher, fm)
958 if fm is not None:
958 if fm is not None:
959 fm.startitem()
959 fm.startitem()
960 fm.write("count", "\n%d changesets affected\n", len(state.ctxaffected))
960 fm.write("count", "\n%d changesets affected\n", len(state.ctxaffected))
961 fm.data(linetype='summary')
961 fm.data(linetype='summary')
962 for ctx in reversed(stack):
962 for ctx in reversed(stack):
963 if ctx not in state.ctxaffected:
963 if ctx not in state.ctxaffected:
964 continue
964 continue
965 fm.startitem()
965 fm.startitem()
966 fm.context(ctx=ctx)
966 fm.context(ctx=ctx)
967 fm.data(linetype='changeset')
967 fm.data(linetype='changeset')
968 fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
968 fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
969 descfirstline = ctx.description().splitlines()[0]
969 descfirstline = ctx.description().splitlines()[0]
970 fm.write('descfirstline', '%s\n', descfirstline,
970 fm.write('descfirstline', '%s\n', descfirstline,
971 label='absorb.description')
971 label='absorb.description')
972 fm.end()
972 fm.end()
973 if not opts.get('dry_run'):
973 if not opts.get('dry_run'):
974 if not opts.get('apply_changes'):
974 if not opts.get('apply_changes'):
975 if ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1):
975 if ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1):
976 raise error.Abort(_('absorb cancelled\n'))
976 raise error.Abort(_('absorb cancelled\n'))
977
977
978 state.apply()
978 state.apply()
979 if state.commit():
979 if state.commit():
980 state.printchunkstats()
980 state.printchunkstats()
981 elif not ui.quiet:
981 elif not ui.quiet:
982 ui.write(_('nothing applied\n'))
982 ui.write(_('nothing applied\n'))
983 return state
983 return state
984
984
985 @command('^absorb',
985 @command('^absorb',
986 [('a', 'apply-changes', None,
986 [('a', 'apply-changes', None,
987 _('apply changes without prompting for confirmation')),
987 _('apply changes without prompting for confirmation')),
988 ('p', 'print-changes', None,
988 ('p', 'print-changes', None,
989 _('just print which changesets are modified by which changes')),
989 _('always print which changesets are modified by which changes')),
990 ('i', 'interactive', None,
990 ('i', 'interactive', None,
991 _('interactively select which chunks to apply (EXPERIMENTAL)')),
991 _('interactively select which chunks to apply (EXPERIMENTAL)')),
992 ('e', 'edit-lines', None,
992 ('e', 'edit-lines', None,
993 _('edit what lines belong to which changesets before commit '
993 _('edit what lines belong to which changesets before commit '
994 '(EXPERIMENTAL)')),
994 '(EXPERIMENTAL)')),
995 ] + commands.dryrunopts + commands.templateopts + commands.walkopts,
995 ] + commands.dryrunopts + commands.templateopts + commands.walkopts,
996 _('hg absorb [OPTION] [FILE]...'))
996 _('hg absorb [OPTION] [FILE]...'))
997 def absorbcmd(ui, repo, *pats, **opts):
997 def absorbcmd(ui, repo, *pats, **opts):
998 """incorporate corrections into the stack of draft changesets
998 """incorporate corrections into the stack of draft changesets
999
999
1000 absorb analyzes each change in your working directory and attempts to
1000 absorb analyzes each change in your working directory and attempts to
1001 amend the changed lines into the changesets in your stack that first
1001 amend the changed lines into the changesets in your stack that first
1002 introduced those lines.
1002 introduced those lines.
1003
1003
1004 If absorb cannot find an unambiguous changeset to amend for a change,
1004 If absorb cannot find an unambiguous changeset to amend for a change,
1005 that change will be left in the working directory, untouched. They can be
1005 that change will be left in the working directory, untouched. They can be
1006 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
1006 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
1007 absorb does not write to the working directory.
1007 absorb does not write to the working directory.
1008
1008
1009 Changesets outside the revset `::. and not public() and not merge()` will
1009 Changesets outside the revset `::. and not public() and not merge()` will
1010 not be changed.
1010 not be changed.
1011
1011
1012 Changesets that become empty after applying the changes will be deleted.
1012 Changesets that become empty after applying the changes will be deleted.
1013
1013
1014 If in doubt, run :hg:`absorb -pn` to preview what changesets will
1014 By default, absorb will show what it plans to do and prompt for
1015 be amended by what changed lines, without actually changing anything.
1015 confirmation. If you are confident that the changes will be absorbed
1016 to the correct place, run :hg:`absorb -a` to apply the changes
1017 immediately.
1016
1018
1017 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1019 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1018 """
1020 """
1019 opts = pycompat.byteskwargs(opts)
1021 opts = pycompat.byteskwargs(opts)
1020 state = absorb(ui, repo, pats=pats, opts=opts)
1022 state = absorb(ui, repo, pats=pats, opts=opts)
1021 if sum(s[0] for s in state.chunkstats.values()) == 0:
1023 if sum(s[0] for s in state.chunkstats.values()) == 0:
1022 return 1
1024 return 1
General Comments 0
You need to be logged in to leave comments. Login now