##// END OF EJS Templates
absorb: use pycompat to get xrange...
Augie Fackler -
r38956:a5c8c547 default
parent child Browse files
Show More
@@ -1,1043 +1,1044 b''
1 # absorb.py
1 # absorb.py
2 #
2 #
3 # Copyright 2016 Facebook, Inc.
3 # Copyright 2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """apply working directory changes to changesets (EXPERIMENTAL)
8 """apply working directory changes to changesets (EXPERIMENTAL)
9
9
10 The absorb extension provides a command to use annotate information to
10 The absorb extension provides a command to use annotate information to
11 amend modified chunks into the corresponding non-public changesets.
11 amend modified chunks into the corresponding non-public changesets.
12
12
13 ::
13 ::
14
14
15 [absorb]
15 [absorb]
16 # only check 50 recent non-public changesets at most
16 # only check 50 recent non-public changesets at most
17 maxstacksize = 50
17 maxstacksize = 50
18 # whether to add noise to new commits to avoid obsolescence cycle
18 # whether to add noise to new commits to avoid obsolescence cycle
19 addnoise = 1
19 addnoise = 1
20 # make `amend --correlated` a shortcut to the main command
20 # make `amend --correlated` a shortcut to the main command
21 amendflag = correlated
21 amendflag = correlated
22
22
23 [color]
23 [color]
24 absorb.node = blue bold
24 absorb.node = blue bold
25 absorb.path = bold
25 absorb.path = bold
26 """
26 """
27
27
28 from __future__ import absolute_import
28 from __future__ import absolute_import
29
29
30 import collections
30 import collections
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial import (
33 from mercurial import (
34 cmdutil,
34 cmdutil,
35 commands,
35 commands,
36 context,
36 context,
37 crecord,
37 crecord,
38 error,
38 error,
39 extensions,
39 extensions,
40 linelog,
40 linelog,
41 mdiff,
41 mdiff,
42 node,
42 node,
43 obsolete,
43 obsolete,
44 patch,
44 patch,
45 phases,
45 phases,
46 pycompat,
46 registrar,
47 registrar,
47 repair,
48 repair,
48 scmutil,
49 scmutil,
49 util,
50 util,
50 )
51 )
51 from mercurial.utils import (
52 from mercurial.utils import (
52 stringutil,
53 stringutil,
53 )
54 )
54
55
55 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
56 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
56 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
57 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
57 # be specifying the version(s) of Mercurial they are tested with, or
58 # be specifying the version(s) of Mercurial they are tested with, or
58 # leave the attribute unspecified.
59 # leave the attribute unspecified.
59 testedwith = 'ships-with-hg-core'
60 testedwith = 'ships-with-hg-core'
60
61
61 cmdtable = {}
62 cmdtable = {}
62 command = registrar.command(cmdtable)
63 command = registrar.command(cmdtable)
63
64
64 configtable = {}
65 configtable = {}
65 configitem = registrar.configitem(configtable)
66 configitem = registrar.configitem(configtable)
66
67
67 configitem('absorb', 'addnoise', default=True)
68 configitem('absorb', 'addnoise', default=True)
68 configitem('absorb', 'amendflag', default=None)
69 configitem('absorb', 'amendflag', default=None)
69 configitem('absorb', 'maxstacksize', default=50)
70 configitem('absorb', 'maxstacksize', default=50)
70
71
71 colortable = {
72 colortable = {
72 'absorb.node': 'blue bold',
73 'absorb.node': 'blue bold',
73 'absorb.path': 'bold',
74 'absorb.path': 'bold',
74 }
75 }
75
76
76 defaultdict = collections.defaultdict
77 defaultdict = collections.defaultdict
77
78
78 class nullui(object):
79 class nullui(object):
79 """blank ui object doing nothing"""
80 """blank ui object doing nothing"""
80 debugflag = False
81 debugflag = False
81 verbose = False
82 verbose = False
82 quiet = True
83 quiet = True
83
84
84 def __getitem__(name):
85 def __getitem__(name):
85 def nullfunc(*args, **kwds):
86 def nullfunc(*args, **kwds):
86 return
87 return
87 return nullfunc
88 return nullfunc
88
89
89 class emptyfilecontext(object):
90 class emptyfilecontext(object):
90 """minimal filecontext representing an empty file"""
91 """minimal filecontext representing an empty file"""
91 def data(self):
92 def data(self):
92 return ''
93 return ''
93
94
94 def node(self):
95 def node(self):
95 return node.nullid
96 return node.nullid
96
97
97 def uniq(lst):
98 def uniq(lst):
98 """list -> list. remove duplicated items without changing the order"""
99 """list -> list. remove duplicated items without changing the order"""
99 seen = set()
100 seen = set()
100 result = []
101 result = []
101 for x in lst:
102 for x in lst:
102 if x not in seen:
103 if x not in seen:
103 seen.add(x)
104 seen.add(x)
104 result.append(x)
105 result.append(x)
105 return result
106 return result
106
107
107 def getdraftstack(headctx, limit=None):
108 def getdraftstack(headctx, limit=None):
108 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
109 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
109
110
110 changesets are sorted in topo order, oldest first.
111 changesets are sorted in topo order, oldest first.
111 return at most limit items, if limit is a positive number.
112 return at most limit items, if limit is a positive number.
112
113
113 merges are considered as non-draft as well. i.e. every commit
114 merges are considered as non-draft as well. i.e. every commit
114 returned has and only has 1 parent.
115 returned has and only has 1 parent.
115 """
116 """
116 ctx = headctx
117 ctx = headctx
117 result = []
118 result = []
118 while ctx.phase() != phases.public:
119 while ctx.phase() != phases.public:
119 if limit and len(result) >= limit:
120 if limit and len(result) >= limit:
120 break
121 break
121 parents = ctx.parents()
122 parents = ctx.parents()
122 if len(parents) != 1:
123 if len(parents) != 1:
123 break
124 break
124 result.append(ctx)
125 result.append(ctx)
125 ctx = parents[0]
126 ctx = parents[0]
126 result.reverse()
127 result.reverse()
127 return result
128 return result
128
129
129 def getfilestack(stack, path, seenfctxs=None):
130 def getfilestack(stack, path, seenfctxs=None):
130 """([ctx], str, set) -> [fctx], {ctx: fctx}
131 """([ctx], str, set) -> [fctx], {ctx: fctx}
131
132
132 stack is a list of contexts, from old to new. usually they are what
133 stack is a list of contexts, from old to new. usually they are what
133 "getdraftstack" returns.
134 "getdraftstack" returns.
134
135
135 follows renames, but not copies.
136 follows renames, but not copies.
136
137
137 seenfctxs is a set of filecontexts that will be considered "immutable".
138 seenfctxs is a set of filecontexts that will be considered "immutable".
138 they are usually what this function returned in earlier calls, useful
139 they are usually what this function returned in earlier calls, useful
139 to avoid issues that a file was "moved" to multiple places and was then
140 to avoid issues that a file was "moved" to multiple places and was then
140 modified differently, like: "a" was copied to "b", "a" was also copied to
141 modified differently, like: "a" was copied to "b", "a" was also copied to
141 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
142 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
142 and we enforce only one of them to be able to affect "a"'s content.
143 and we enforce only one of them to be able to affect "a"'s content.
143
144
144 return an empty list and an empty dict, if the specified path does not
145 return an empty list and an empty dict, if the specified path does not
145 exist in stack[-1] (the top of the stack).
146 exist in stack[-1] (the top of the stack).
146
147
147 otherwise, return a list of de-duplicated filecontexts, and the map to
148 otherwise, return a list of de-duplicated filecontexts, and the map to
148 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
149 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
149 of the list would be outside the stack and should be considered immutable.
150 of the list would be outside the stack and should be considered immutable.
150 the remaining items are within the stack.
151 the remaining items are within the stack.
151
152
152 for example, given the following changelog and corresponding filelog
153 for example, given the following changelog and corresponding filelog
153 revisions:
154 revisions:
154
155
155 changelog: 3----4----5----6----7
156 changelog: 3----4----5----6----7
156 filelog: x 0----1----1----2 (x: no such file yet)
157 filelog: x 0----1----1----2 (x: no such file yet)
157
158
158 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
159 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
159 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
160 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
160 dummy empty filecontext.
161 dummy empty filecontext.
161 - if stack = [2], returns ([], {})
162 - if stack = [2], returns ([], {})
162 - if stack = [7], returns ([1, 2], {7: 2})
163 - if stack = [7], returns ([1, 2], {7: 2})
163 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
164 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
164 removed, since 1 is immutable.
165 removed, since 1 is immutable.
165 """
166 """
166 if seenfctxs is None:
167 if seenfctxs is None:
167 seenfctxs = set()
168 seenfctxs = set()
168 assert stack
169 assert stack
169
170
170 if path not in stack[-1]:
171 if path not in stack[-1]:
171 return [], {}
172 return [], {}
172
173
173 fctxs = []
174 fctxs = []
174 fctxmap = {}
175 fctxmap = {}
175
176
176 pctx = stack[0].p1() # the public (immutable) ctx we stop at
177 pctx = stack[0].p1() # the public (immutable) ctx we stop at
177 for ctx in reversed(stack):
178 for ctx in reversed(stack):
178 if path not in ctx: # the file is added in the next commit
179 if path not in ctx: # the file is added in the next commit
179 pctx = ctx
180 pctx = ctx
180 break
181 break
181 fctx = ctx[path]
182 fctx = ctx[path]
182 fctxs.append(fctx)
183 fctxs.append(fctx)
183 if fctx in seenfctxs: # treat fctx as the immutable one
184 if fctx in seenfctxs: # treat fctx as the immutable one
184 pctx = None # do not add another immutable fctx
185 pctx = None # do not add another immutable fctx
185 break
186 break
186 fctxmap[ctx] = fctx # only for mutable fctxs
187 fctxmap[ctx] = fctx # only for mutable fctxs
187 renamed = fctx.renamed()
188 renamed = fctx.renamed()
188 if renamed:
189 if renamed:
189 path = renamed[0] # follow rename
190 path = renamed[0] # follow rename
190 if path in ctx: # but do not follow copy
191 if path in ctx: # but do not follow copy
191 pctx = ctx.p1()
192 pctx = ctx.p1()
192 break
193 break
193
194
194 if pctx is not None: # need an extra immutable fctx
195 if pctx is not None: # need an extra immutable fctx
195 if path in pctx:
196 if path in pctx:
196 fctxs.append(pctx[path])
197 fctxs.append(pctx[path])
197 else:
198 else:
198 fctxs.append(emptyfilecontext())
199 fctxs.append(emptyfilecontext())
199
200
200 fctxs.reverse()
201 fctxs.reverse()
201 # note: we rely on a property of hg: filerev is not reused for linear
202 # note: we rely on a property of hg: filerev is not reused for linear
202 # history. i.e. it's impossible to have:
203 # history. i.e. it's impossible to have:
203 # changelog: 4----5----6 (linear, no merges)
204 # changelog: 4----5----6 (linear, no merges)
204 # filelog: 1----2----1
205 # filelog: 1----2----1
205 # ^ reuse filerev (impossible)
206 # ^ reuse filerev (impossible)
206 # because parents are part of the hash. if that's not true, we need to
207 # because parents are part of the hash. if that's not true, we need to
207 # remove uniq and find a different way to identify fctxs.
208 # remove uniq and find a different way to identify fctxs.
208 return uniq(fctxs), fctxmap
209 return uniq(fctxs), fctxmap
209
210
210 class overlaystore(patch.filestore):
211 class overlaystore(patch.filestore):
211 """read-only, hybrid store based on a dict and ctx.
212 """read-only, hybrid store based on a dict and ctx.
212 memworkingcopy: {path: content}, overrides file contents.
213 memworkingcopy: {path: content}, overrides file contents.
213 """
214 """
214 def __init__(self, basectx, memworkingcopy):
215 def __init__(self, basectx, memworkingcopy):
215 self.basectx = basectx
216 self.basectx = basectx
216 self.memworkingcopy = memworkingcopy
217 self.memworkingcopy = memworkingcopy
217
218
218 def getfile(self, path):
219 def getfile(self, path):
219 """comply with mercurial.patch.filestore.getfile"""
220 """comply with mercurial.patch.filestore.getfile"""
220 if path not in self.basectx:
221 if path not in self.basectx:
221 return None, None, None
222 return None, None, None
222 fctx = self.basectx[path]
223 fctx = self.basectx[path]
223 if path in self.memworkingcopy:
224 if path in self.memworkingcopy:
224 content = self.memworkingcopy[path]
225 content = self.memworkingcopy[path]
225 else:
226 else:
226 content = fctx.data()
227 content = fctx.data()
227 mode = (fctx.islink(), fctx.isexec())
228 mode = (fctx.islink(), fctx.isexec())
228 renamed = fctx.renamed() # False or (path, node)
229 renamed = fctx.renamed() # False or (path, node)
229 return content, mode, (renamed and renamed[0])
230 return content, mode, (renamed and renamed[0])
230
231
231 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
232 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
232 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
233 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
233 memworkingcopy overrides file contents.
234 memworkingcopy overrides file contents.
234 """
235 """
235 # parents must contain 2 items: (node1, node2)
236 # parents must contain 2 items: (node1, node2)
236 if parents is None:
237 if parents is None:
237 parents = ctx.repo().changelog.parents(ctx.node())
238 parents = ctx.repo().changelog.parents(ctx.node())
238 if extra is None:
239 if extra is None:
239 extra = ctx.extra()
240 extra = ctx.extra()
240 date = ctx.date()
241 date = ctx.date()
241 desc = ctx.description()
242 desc = ctx.description()
242 user = ctx.user()
243 user = ctx.user()
243 files = set(ctx.files()).union(memworkingcopy.iterkeys())
244 files = set(ctx.files()).union(memworkingcopy.iterkeys())
244 store = overlaystore(ctx, memworkingcopy)
245 store = overlaystore(ctx, memworkingcopy)
245 return context.memctx(
246 return context.memctx(
246 repo=ctx.repo(), parents=parents, text=desc,
247 repo=ctx.repo(), parents=parents, text=desc,
247 files=files, filectxfn=store, user=user, date=date,
248 files=files, filectxfn=store, user=user, date=date,
248 branch=None, extra=extra)
249 branch=None, extra=extra)
249
250
250 class filefixupstate(object):
251 class filefixupstate(object):
251 """state needed to apply fixups to a single file
252 """state needed to apply fixups to a single file
252
253
253 internally, it keeps file contents of several revisions and a linelog.
254 internally, it keeps file contents of several revisions and a linelog.
254
255
255 the linelog uses odd revision numbers for original contents (fctxs passed
256 the linelog uses odd revision numbers for original contents (fctxs passed
256 to __init__), and even revision numbers for fixups, like:
257 to __init__), and even revision numbers for fixups, like:
257
258
258 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
259 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
259 linelog rev 2: fixups made to self.fctxs[0]
260 linelog rev 2: fixups made to self.fctxs[0]
260 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
261 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
261 linelog rev 4: fixups made to self.fctxs[1]
262 linelog rev 4: fixups made to self.fctxs[1]
262 ...
263 ...
263
264
264 a typical use is like:
265 a typical use is like:
265
266
266 1. call diffwith, to calculate self.fixups
267 1. call diffwith, to calculate self.fixups
267 2. (optionally), present self.fixups to the user, or change it
268 2. (optionally), present self.fixups to the user, or change it
268 3. call apply, to apply changes
269 3. call apply, to apply changes
269 4. read results from "finalcontents", or call getfinalcontent
270 4. read results from "finalcontents", or call getfinalcontent
270 """
271 """
271
272
272 def __init__(self, fctxs, ui=None, opts=None):
273 def __init__(self, fctxs, ui=None, opts=None):
273 """([fctx], ui or None) -> None
274 """([fctx], ui or None) -> None
274
275
275 fctxs should be linear, and sorted by topo order - oldest first.
276 fctxs should be linear, and sorted by topo order - oldest first.
276 fctxs[0] will be considered as "immutable" and will not be changed.
277 fctxs[0] will be considered as "immutable" and will not be changed.
277 """
278 """
278 self.fctxs = fctxs
279 self.fctxs = fctxs
279 self.ui = ui or nullui()
280 self.ui = ui or nullui()
280 self.opts = opts or {}
281 self.opts = opts or {}
281
282
282 # following fields are built from fctxs. they exist for perf reason
283 # following fields are built from fctxs. they exist for perf reason
283 self.contents = [f.data() for f in fctxs]
284 self.contents = [f.data() for f in fctxs]
284 self.contentlines = map(mdiff.splitnewlines, self.contents)
285 self.contentlines = map(mdiff.splitnewlines, self.contents)
285 self.linelog = self._buildlinelog()
286 self.linelog = self._buildlinelog()
286 if self.ui.debugflag:
287 if self.ui.debugflag:
287 assert self._checkoutlinelog() == self.contents
288 assert self._checkoutlinelog() == self.contents
288
289
289 # following fields will be filled later
290 # following fields will be filled later
290 self.chunkstats = [0, 0] # [adopted, total : int]
291 self.chunkstats = [0, 0] # [adopted, total : int]
291 self.targetlines = [] # [str]
292 self.targetlines = [] # [str]
292 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
293 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
293 self.finalcontents = [] # [str]
294 self.finalcontents = [] # [str]
294
295
295 def diffwith(self, targetfctx, showchanges=False):
296 def diffwith(self, targetfctx, showchanges=False):
296 """calculate fixups needed by examining the differences between
297 """calculate fixups needed by examining the differences between
297 self.fctxs[-1] and targetfctx, chunk by chunk.
298 self.fctxs[-1] and targetfctx, chunk by chunk.
298
299
299 targetfctx is the target state we move towards. we may or may not be
300 targetfctx is the target state we move towards. we may or may not be
300 able to get there because not all modified chunks can be amended into
301 able to get there because not all modified chunks can be amended into
301 a non-public fctx unambiguously.
302 a non-public fctx unambiguously.
302
303
303 call this only once, before apply().
304 call this only once, before apply().
304
305
305 update self.fixups, self.chunkstats, and self.targetlines.
306 update self.fixups, self.chunkstats, and self.targetlines.
306 """
307 """
307 a = self.contents[-1]
308 a = self.contents[-1]
308 alines = self.contentlines[-1]
309 alines = self.contentlines[-1]
309 b = targetfctx.data()
310 b = targetfctx.data()
310 blines = mdiff.splitnewlines(b)
311 blines = mdiff.splitnewlines(b)
311 self.targetlines = blines
312 self.targetlines = blines
312
313
313 self.linelog.annotate(self.linelog.maxrev)
314 self.linelog.annotate(self.linelog.maxrev)
314 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
315 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
315 assert len(annotated) == len(alines)
316 assert len(annotated) == len(alines)
316 # add a dummy end line to make insertion at the end easier
317 # add a dummy end line to make insertion at the end easier
317 if annotated:
318 if annotated:
318 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
319 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
319 annotated.append(dummyendline)
320 annotated.append(dummyendline)
320
321
321 # analyse diff blocks
322 # analyse diff blocks
322 for chunk in self._alldiffchunks(a, b, alines, blines):
323 for chunk in self._alldiffchunks(a, b, alines, blines):
323 newfixups = self._analysediffchunk(chunk, annotated)
324 newfixups = self._analysediffchunk(chunk, annotated)
324 self.chunkstats[0] += bool(newfixups) # 1 or 0
325 self.chunkstats[0] += bool(newfixups) # 1 or 0
325 self.chunkstats[1] += 1
326 self.chunkstats[1] += 1
326 self.fixups += newfixups
327 self.fixups += newfixups
327 if showchanges:
328 if showchanges:
328 self._showchanges(alines, blines, chunk, newfixups)
329 self._showchanges(alines, blines, chunk, newfixups)
329
330
330 def apply(self):
331 def apply(self):
331 """apply self.fixups. update self.linelog, self.finalcontents.
332 """apply self.fixups. update self.linelog, self.finalcontents.
332
333
333 call this only once, before getfinalcontent(), after diffwith().
334 call this only once, before getfinalcontent(), after diffwith().
334 """
335 """
335 # the following is unnecessary, as it's done by "diffwith":
336 # the following is unnecessary, as it's done by "diffwith":
336 # self.linelog.annotate(self.linelog.maxrev)
337 # self.linelog.annotate(self.linelog.maxrev)
337 for rev, a1, a2, b1, b2 in reversed(self.fixups):
338 for rev, a1, a2, b1, b2 in reversed(self.fixups):
338 blines = self.targetlines[b1:b2]
339 blines = self.targetlines[b1:b2]
339 if self.ui.debugflag:
340 if self.ui.debugflag:
340 idx = (max(rev - 1, 0)) // 2
341 idx = (max(rev - 1, 0)) // 2
341 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
342 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
342 % (node.short(self.fctxs[idx].node()),
343 % (node.short(self.fctxs[idx].node()),
343 a1, a2, len(blines)))
344 a1, a2, len(blines)))
344 self.linelog.replacelines(rev, a1, a2, b1, b2)
345 self.linelog.replacelines(rev, a1, a2, b1, b2)
345 if self.opts.get('edit_lines', False):
346 if self.opts.get('edit_lines', False):
346 self.finalcontents = self._checkoutlinelogwithedits()
347 self.finalcontents = self._checkoutlinelogwithedits()
347 else:
348 else:
348 self.finalcontents = self._checkoutlinelog()
349 self.finalcontents = self._checkoutlinelog()
349
350
350 def getfinalcontent(self, fctx):
351 def getfinalcontent(self, fctx):
351 """(fctx) -> str. get modified file content for a given filecontext"""
352 """(fctx) -> str. get modified file content for a given filecontext"""
352 idx = self.fctxs.index(fctx)
353 idx = self.fctxs.index(fctx)
353 return self.finalcontents[idx]
354 return self.finalcontents[idx]
354
355
355 def _analysediffchunk(self, chunk, annotated):
356 def _analysediffchunk(self, chunk, annotated):
356 """analyse a different chunk and return new fixups found
357 """analyse a different chunk and return new fixups found
357
358
358 return [] if no lines from the chunk can be safely applied.
359 return [] if no lines from the chunk can be safely applied.
359
360
360 the chunk (or lines) cannot be safely applied, if, for example:
361 the chunk (or lines) cannot be safely applied, if, for example:
361 - the modified (deleted) lines belong to a public changeset
362 - the modified (deleted) lines belong to a public changeset
362 (self.fctxs[0])
363 (self.fctxs[0])
363 - the chunk is a pure insertion and the adjacent lines (at most 2
364 - the chunk is a pure insertion and the adjacent lines (at most 2
364 lines) belong to different non-public changesets, or do not belong
365 lines) belong to different non-public changesets, or do not belong
365 to any non-public changesets.
366 to any non-public changesets.
366 - the chunk is modifying lines from different changesets.
367 - the chunk is modifying lines from different changesets.
367 in this case, if the number of lines deleted equals to the number
368 in this case, if the number of lines deleted equals to the number
368 of lines added, assume it's a simple 1:1 map (could be wrong).
369 of lines added, assume it's a simple 1:1 map (could be wrong).
369 otherwise, give up.
370 otherwise, give up.
370 - the chunk is modifying lines from a single non-public changeset,
371 - the chunk is modifying lines from a single non-public changeset,
371 but other revisions touch the area as well. i.e. the lines are
372 but other revisions touch the area as well. i.e. the lines are
372 not continuous as seen from the linelog.
373 not continuous as seen from the linelog.
373 """
374 """
374 a1, a2, b1, b2 = chunk
375 a1, a2, b1, b2 = chunk
375 # find involved indexes from annotate result
376 # find involved indexes from annotate result
376 involved = annotated[a1:a2]
377 involved = annotated[a1:a2]
377 if not involved and annotated: # a1 == a2 and a is not empty
378 if not involved and annotated: # a1 == a2 and a is not empty
378 # pure insertion, check nearby lines. ignore lines belong
379 # pure insertion, check nearby lines. ignore lines belong
379 # to the public (first) changeset (i.e. annotated[i][0] == 1)
380 # to the public (first) changeset (i.e. annotated[i][0] == 1)
380 nearbylinenums = {a2, max(0, a1 - 1)}
381 nearbylinenums = {a2, max(0, a1 - 1)}
381 involved = [annotated[i]
382 involved = [annotated[i]
382 for i in nearbylinenums if annotated[i][0] != 1]
383 for i in nearbylinenums if annotated[i][0] != 1]
383 involvedrevs = list(set(r for r, l in involved))
384 involvedrevs = list(set(r for r, l in involved))
384 newfixups = []
385 newfixups = []
385 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
386 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
386 # chunk belongs to a single revision
387 # chunk belongs to a single revision
387 rev = involvedrevs[0]
388 rev = involvedrevs[0]
388 if rev > 1:
389 if rev > 1:
389 fixuprev = rev + 1
390 fixuprev = rev + 1
390 newfixups.append((fixuprev, a1, a2, b1, b2))
391 newfixups.append((fixuprev, a1, a2, b1, b2))
391 elif a2 - a1 == b2 - b1 or b1 == b2:
392 elif a2 - a1 == b2 - b1 or b1 == b2:
392 # 1:1 line mapping, or chunk was deleted
393 # 1:1 line mapping, or chunk was deleted
393 for i in xrange(a1, a2):
394 for i in pycompat.xrange(a1, a2):
394 rev, linenum = annotated[i]
395 rev, linenum = annotated[i]
395 if rev > 1:
396 if rev > 1:
396 if b1 == b2: # deletion, simply remove that single line
397 if b1 == b2: # deletion, simply remove that single line
397 nb1 = nb2 = 0
398 nb1 = nb2 = 0
398 else: # 1:1 line mapping, change the corresponding rev
399 else: # 1:1 line mapping, change the corresponding rev
399 nb1 = b1 + i - a1
400 nb1 = b1 + i - a1
400 nb2 = nb1 + 1
401 nb2 = nb1 + 1
401 fixuprev = rev + 1
402 fixuprev = rev + 1
402 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
403 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
403 return self._optimizefixups(newfixups)
404 return self._optimizefixups(newfixups)
404
405
405 @staticmethod
406 @staticmethod
406 def _alldiffchunks(a, b, alines, blines):
407 def _alldiffchunks(a, b, alines, blines):
407 """like mdiff.allblocks, but only care about differences"""
408 """like mdiff.allblocks, but only care about differences"""
408 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
409 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
409 for chunk, btype in blocks:
410 for chunk, btype in blocks:
410 if btype != '!':
411 if btype != '!':
411 continue
412 continue
412 yield chunk
413 yield chunk
413
414
414 def _buildlinelog(self):
415 def _buildlinelog(self):
415 """calculate the initial linelog based on self.content{,line}s.
416 """calculate the initial linelog based on self.content{,line}s.
416 this is similar to running a partial "annotate".
417 this is similar to running a partial "annotate".
417 """
418 """
418 llog = linelog.linelog()
419 llog = linelog.linelog()
419 a, alines = '', []
420 a, alines = '', []
420 for i in xrange(len(self.contents)):
421 for i in pycompat.xrange(len(self.contents)):
421 b, blines = self.contents[i], self.contentlines[i]
422 b, blines = self.contents[i], self.contentlines[i]
422 llrev = i * 2 + 1
423 llrev = i * 2 + 1
423 chunks = self._alldiffchunks(a, b, alines, blines)
424 chunks = self._alldiffchunks(a, b, alines, blines)
424 for a1, a2, b1, b2 in reversed(list(chunks)):
425 for a1, a2, b1, b2 in reversed(list(chunks)):
425 llog.replacelines(llrev, a1, a2, b1, b2)
426 llog.replacelines(llrev, a1, a2, b1, b2)
426 a, alines = b, blines
427 a, alines = b, blines
427 return llog
428 return llog
428
429
429 def _checkoutlinelog(self):
430 def _checkoutlinelog(self):
430 """() -> [str]. check out file contents from linelog"""
431 """() -> [str]. check out file contents from linelog"""
431 contents = []
432 contents = []
432 for i in xrange(len(self.contents)):
433 for i in pycompat.xrange(len(self.contents)):
433 rev = (i + 1) * 2
434 rev = (i + 1) * 2
434 self.linelog.annotate(rev)
435 self.linelog.annotate(rev)
435 content = ''.join(map(self._getline, self.linelog.annotateresult))
436 content = ''.join(map(self._getline, self.linelog.annotateresult))
436 contents.append(content)
437 contents.append(content)
437 return contents
438 return contents
438
439
439 def _checkoutlinelogwithedits(self):
440 def _checkoutlinelogwithedits(self):
440 """() -> [str]. prompt all lines for edit"""
441 """() -> [str]. prompt all lines for edit"""
441 alllines = self.linelog.getalllines()
442 alllines = self.linelog.getalllines()
442 # header
443 # header
443 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
444 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
444 'exists in the changeset to the top\nHG:\n')
445 'exists in the changeset to the top\nHG:\n')
445 % self.fctxs[-1].path())
446 % self.fctxs[-1].path())
446 # [(idx, fctx)]. hide the dummy emptyfilecontext
447 # [(idx, fctx)]. hide the dummy emptyfilecontext
447 visiblefctxs = [(i, f)
448 visiblefctxs = [(i, f)
448 for i, f in enumerate(self.fctxs)
449 for i, f in enumerate(self.fctxs)
449 if not isinstance(f, emptyfilecontext)]
450 if not isinstance(f, emptyfilecontext)]
450 for i, (j, f) in enumerate(visiblefctxs):
451 for i, (j, f) in enumerate(visiblefctxs):
451 editortext += (_('HG: %s/%s %s %s\n') %
452 editortext += (_('HG: %s/%s %s %s\n') %
452 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
453 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
453 node.short(f.node()),
454 node.short(f.node()),
454 f.description().split('\n',1)[0]))
455 f.description().split('\n',1)[0]))
455 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
456 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
456 # figure out the lifetime of a line, this is relatively inefficient,
457 # figure out the lifetime of a line, this is relatively inefficient,
457 # but probably fine
458 # but probably fine
458 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
459 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
459 for i, f in visiblefctxs:
460 for i, f in visiblefctxs:
460 self.linelog.annotate((i + 1) * 2)
461 self.linelog.annotate((i + 1) * 2)
461 for l in self.linelog.annotateresult:
462 for l in self.linelog.annotateresult:
462 lineset[l].add(i)
463 lineset[l].add(i)
463 # append lines
464 # append lines
464 for l in alllines:
465 for l in alllines:
465 editortext += (' %s : %s' %
466 editortext += (' %s : %s' %
466 (''.join([('y' if i in lineset[l] else ' ')
467 (''.join([('y' if i in lineset[l] else ' ')
467 for i, _f in visiblefctxs]),
468 for i, _f in visiblefctxs]),
468 self._getline(l)))
469 self._getline(l)))
469 # run editor
470 # run editor
470 editedtext = self.ui.edit(editortext, '', action='absorb')
471 editedtext = self.ui.edit(editortext, '', action='absorb')
471 if not editedtext:
472 if not editedtext:
472 raise error.Abort(_('empty editor text'))
473 raise error.Abort(_('empty editor text'))
473 # parse edited result
474 # parse edited result
474 contents = ['' for i in self.fctxs]
475 contents = ['' for i in self.fctxs]
475 leftpadpos = 4
476 leftpadpos = 4
476 colonpos = leftpadpos + len(visiblefctxs) + 1
477 colonpos = leftpadpos + len(visiblefctxs) + 1
477 for l in mdiff.splitnewlines(editedtext):
478 for l in mdiff.splitnewlines(editedtext):
478 if l.startswith('HG:'):
479 if l.startswith('HG:'):
479 continue
480 continue
480 if l[colonpos - 1:colonpos + 2] != ' : ':
481 if l[colonpos - 1:colonpos + 2] != ' : ':
481 raise error.Abort(_('malformed line: %s') % l)
482 raise error.Abort(_('malformed line: %s') % l)
482 linecontent = l[colonpos + 2:]
483 linecontent = l[colonpos + 2:]
483 for i, ch in enumerate(l[leftpadpos:colonpos - 1]):
484 for i, ch in enumerate(l[leftpadpos:colonpos - 1]):
484 if ch == 'y':
485 if ch == 'y':
485 contents[visiblefctxs[i][0]] += linecontent
486 contents[visiblefctxs[i][0]] += linecontent
486 # chunkstats is hard to calculate if anything changes, therefore
487 # chunkstats is hard to calculate if anything changes, therefore
487 # set them to just a simple value (1, 1).
488 # set them to just a simple value (1, 1).
488 if editedtext != editortext:
489 if editedtext != editortext:
489 self.chunkstats = [1, 1]
490 self.chunkstats = [1, 1]
490 return contents
491 return contents
491
492
492 def _getline(self, lineinfo):
493 def _getline(self, lineinfo):
493 """((rev, linenum)) -> str. convert rev+line number to line content"""
494 """((rev, linenum)) -> str. convert rev+line number to line content"""
494 rev, linenum = lineinfo
495 rev, linenum = lineinfo
495 if rev & 1: # odd: original line taken from fctxs
496 if rev & 1: # odd: original line taken from fctxs
496 return self.contentlines[rev // 2][linenum]
497 return self.contentlines[rev // 2][linenum]
497 else: # even: fixup line from targetfctx
498 else: # even: fixup line from targetfctx
498 return self.targetlines[linenum]
499 return self.targetlines[linenum]
499
500
500 def _iscontinuous(self, a1, a2, closedinterval=False):
501 def _iscontinuous(self, a1, a2, closedinterval=False):
501 """(a1, a2 : int) -> bool
502 """(a1, a2 : int) -> bool
502
503
503 check if these lines are continuous. i.e. no other insertions or
504 check if these lines are continuous. i.e. no other insertions or
504 deletions (from other revisions) among these lines.
505 deletions (from other revisions) among these lines.
505
506
506 closedinterval decides whether a2 should be included or not. i.e. is
507 closedinterval decides whether a2 should be included or not. i.e. is
507 it [a1, a2), or [a1, a2] ?
508 it [a1, a2), or [a1, a2] ?
508 """
509 """
509 if a1 >= a2:
510 if a1 >= a2:
510 return True
511 return True
511 llog = self.linelog
512 llog = self.linelog
512 offset1 = llog.getoffset(a1)
513 offset1 = llog.getoffset(a1)
513 offset2 = llog.getoffset(a2) + int(closedinterval)
514 offset2 = llog.getoffset(a2) + int(closedinterval)
514 linesinbetween = llog.getalllines(offset1, offset2)
515 linesinbetween = llog.getalllines(offset1, offset2)
515 return len(linesinbetween) == a2 - a1 + int(closedinterval)
516 return len(linesinbetween) == a2 - a1 + int(closedinterval)
516
517
517 def _optimizefixups(self, fixups):
518 def _optimizefixups(self, fixups):
518 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
519 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
519 merge adjacent fixups to make them less fragmented.
520 merge adjacent fixups to make them less fragmented.
520 """
521 """
521 result = []
522 result = []
522 pcurrentchunk = [[-1, -1, -1, -1, -1]]
523 pcurrentchunk = [[-1, -1, -1, -1, -1]]
523
524
524 def pushchunk():
525 def pushchunk():
525 if pcurrentchunk[0][0] != -1:
526 if pcurrentchunk[0][0] != -1:
526 result.append(tuple(pcurrentchunk[0]))
527 result.append(tuple(pcurrentchunk[0]))
527
528
528 for i, chunk in enumerate(fixups):
529 for i, chunk in enumerate(fixups):
529 rev, a1, a2, b1, b2 = chunk
530 rev, a1, a2, b1, b2 = chunk
530 lastrev = pcurrentchunk[0][0]
531 lastrev = pcurrentchunk[0][0]
531 lasta2 = pcurrentchunk[0][2]
532 lasta2 = pcurrentchunk[0][2]
532 lastb2 = pcurrentchunk[0][4]
533 lastb2 = pcurrentchunk[0][4]
533 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
534 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
534 self._iscontinuous(max(a1 - 1, 0), a1)):
535 self._iscontinuous(max(a1 - 1, 0), a1)):
535 # merge into currentchunk
536 # merge into currentchunk
536 pcurrentchunk[0][2] = a2
537 pcurrentchunk[0][2] = a2
537 pcurrentchunk[0][4] = b2
538 pcurrentchunk[0][4] = b2
538 else:
539 else:
539 pushchunk()
540 pushchunk()
540 pcurrentchunk[0] = list(chunk)
541 pcurrentchunk[0] = list(chunk)
541 pushchunk()
542 pushchunk()
542 return result
543 return result
543
544
544 def _showchanges(self, alines, blines, chunk, fixups):
545 def _showchanges(self, alines, blines, chunk, fixups):
545 ui = self.ui
546 ui = self.ui
546
547
547 def label(line, label):
548 def label(line, label):
548 if line.endswith('\n'):
549 if line.endswith('\n'):
549 line = line[:-1]
550 line = line[:-1]
550 return ui.label(line, label)
551 return ui.label(line, label)
551
552
552 # this is not optimized for perf but _showchanges only gets executed
553 # this is not optimized for perf but _showchanges only gets executed
553 # with an extra command-line flag.
554 # with an extra command-line flag.
554 a1, a2, b1, b2 = chunk
555 a1, a2, b1, b2 = chunk
555 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
556 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
556 for idx, fa1, fa2, fb1, fb2 in fixups:
557 for idx, fa1, fa2, fb1, fb2 in fixups:
557 for i in xrange(fa1, fa2):
558 for i in pycompat.xrange(fa1, fa2):
558 aidxs[i - a1] = (max(idx, 1) - 1) // 2
559 aidxs[i - a1] = (max(idx, 1) - 1) // 2
559 for i in xrange(fb1, fb2):
560 for i in pycompat.xrange(fb1, fb2):
560 bidxs[i - b1] = (max(idx, 1) - 1) // 2
561 bidxs[i - b1] = (max(idx, 1) - 1) // 2
561
562
562 buf = [] # [(idx, content)]
563 buf = [] # [(idx, content)]
563 buf.append((0, label('@@ -%d,%d +%d,%d @@'
564 buf.append((0, label('@@ -%d,%d +%d,%d @@'
564 % (a1, a2 - a1, b1, b2 - b1), 'diff.hunk')))
565 % (a1, a2 - a1, b1, b2 - b1), 'diff.hunk')))
565 buf += [(aidxs[i - a1], label('-' + alines[i], 'diff.deleted'))
566 buf += [(aidxs[i - a1], label('-' + alines[i], 'diff.deleted'))
566 for i in xrange(a1, a2)]
567 for i in pycompat.xrange(a1, a2)]
567 buf += [(bidxs[i - b1], label('+' + blines[i], 'diff.inserted'))
568 buf += [(bidxs[i - b1], label('+' + blines[i], 'diff.inserted'))
568 for i in xrange(b1, b2)]
569 for i in pycompat.xrange(b1, b2)]
569 for idx, line in buf:
570 for idx, line in buf:
570 shortnode = idx and node.short(self.fctxs[idx].node()) or ''
571 shortnode = idx and node.short(self.fctxs[idx].node()) or ''
571 ui.write(ui.label(shortnode[0:7].ljust(8), 'absorb.node') +
572 ui.write(ui.label(shortnode[0:7].ljust(8), 'absorb.node') +
572 line + '\n')
573 line + '\n')
573
574
574 class fixupstate(object):
575 class fixupstate(object):
575 """state needed to run absorb
576 """state needed to run absorb
576
577
577 internally, it keeps paths and filefixupstates.
578 internally, it keeps paths and filefixupstates.
578
579
579 a typical use is like filefixupstates:
580 a typical use is like filefixupstates:
580
581
581 1. call diffwith, to calculate fixups
582 1. call diffwith, to calculate fixups
582 2. (optionally), present fixups to the user, or edit fixups
583 2. (optionally), present fixups to the user, or edit fixups
583 3. call apply, to apply changes to memory
584 3. call apply, to apply changes to memory
584 4. call commit, to commit changes to hg database
585 4. call commit, to commit changes to hg database
585 """
586 """
586
587
587 def __init__(self, stack, ui=None, opts=None):
588 def __init__(self, stack, ui=None, opts=None):
588 """([ctx], ui or None) -> None
589 """([ctx], ui or None) -> None
589
590
590 stack: should be linear, and sorted by topo order - oldest first.
591 stack: should be linear, and sorted by topo order - oldest first.
591 all commits in stack are considered mutable.
592 all commits in stack are considered mutable.
592 """
593 """
593 assert stack
594 assert stack
594 self.ui = ui or nullui()
595 self.ui = ui or nullui()
595 self.opts = opts or {}
596 self.opts = opts or {}
596 self.stack = stack
597 self.stack = stack
597 self.repo = stack[-1].repo().unfiltered()
598 self.repo = stack[-1].repo().unfiltered()
598
599
599 # following fields will be filled later
600 # following fields will be filled later
600 self.paths = [] # [str]
601 self.paths = [] # [str]
601 self.status = None # ctx.status output
602 self.status = None # ctx.status output
602 self.fctxmap = {} # {path: {ctx: fctx}}
603 self.fctxmap = {} # {path: {ctx: fctx}}
603 self.fixupmap = {} # {path: filefixupstate}
604 self.fixupmap = {} # {path: filefixupstate}
604 self.replacemap = {} # {oldnode: newnode or None}
605 self.replacemap = {} # {oldnode: newnode or None}
605 self.finalnode = None # head after all fixups
606 self.finalnode = None # head after all fixups
606
607
607 def diffwith(self, targetctx, match=None, showchanges=False):
608 def diffwith(self, targetctx, match=None, showchanges=False):
608 """diff and prepare fixups. update self.fixupmap, self.paths"""
609 """diff and prepare fixups. update self.fixupmap, self.paths"""
609 # only care about modified files
610 # only care about modified files
610 self.status = self.stack[-1].status(targetctx, match)
611 self.status = self.stack[-1].status(targetctx, match)
611 self.paths = []
612 self.paths = []
612 # but if --edit-lines is used, the user may want to edit files
613 # but if --edit-lines is used, the user may want to edit files
613 # even if they are not modified
614 # even if they are not modified
614 editopt = self.opts.get('edit_lines')
615 editopt = self.opts.get('edit_lines')
615 if not self.status.modified and editopt and match:
616 if not self.status.modified and editopt and match:
616 interestingpaths = match.files()
617 interestingpaths = match.files()
617 else:
618 else:
618 interestingpaths = self.status.modified
619 interestingpaths = self.status.modified
619 # prepare the filefixupstate
620 # prepare the filefixupstate
620 seenfctxs = set()
621 seenfctxs = set()
621 # sorting is necessary to eliminate ambiguity for the "double move"
622 # sorting is necessary to eliminate ambiguity for the "double move"
622 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
623 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
623 for path in sorted(interestingpaths):
624 for path in sorted(interestingpaths):
624 if self.ui.debugflag:
625 if self.ui.debugflag:
625 self.ui.write(_('calculating fixups for %s\n') % path)
626 self.ui.write(_('calculating fixups for %s\n') % path)
626 targetfctx = targetctx[path]
627 targetfctx = targetctx[path]
627 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
628 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
628 # ignore symbolic links or binary, or unchanged files
629 # ignore symbolic links or binary, or unchanged files
629 if any(f.islink() or stringutil.binary(f.data())
630 if any(f.islink() or stringutil.binary(f.data())
630 for f in [targetfctx] + fctxs
631 for f in [targetfctx] + fctxs
631 if not isinstance(f, emptyfilecontext)):
632 if not isinstance(f, emptyfilecontext)):
632 continue
633 continue
633 if targetfctx.data() == fctxs[-1].data() and not editopt:
634 if targetfctx.data() == fctxs[-1].data() and not editopt:
634 continue
635 continue
635 seenfctxs.update(fctxs[1:])
636 seenfctxs.update(fctxs[1:])
636 self.fctxmap[path] = ctx2fctx
637 self.fctxmap[path] = ctx2fctx
637 fstate = filefixupstate(fctxs, ui=self.ui, opts=self.opts)
638 fstate = filefixupstate(fctxs, ui=self.ui, opts=self.opts)
638 if showchanges:
639 if showchanges:
639 colorpath = self.ui.label(path, 'absorb.path')
640 colorpath = self.ui.label(path, 'absorb.path')
640 header = 'showing changes for ' + colorpath
641 header = 'showing changes for ' + colorpath
641 self.ui.write(header + '\n')
642 self.ui.write(header + '\n')
642 fstate.diffwith(targetfctx, showchanges=showchanges)
643 fstate.diffwith(targetfctx, showchanges=showchanges)
643 self.fixupmap[path] = fstate
644 self.fixupmap[path] = fstate
644 self.paths.append(path)
645 self.paths.append(path)
645
646
646 def apply(self):
647 def apply(self):
647 """apply fixups to individual filefixupstates"""
648 """apply fixups to individual filefixupstates"""
648 for path, state in self.fixupmap.iteritems():
649 for path, state in self.fixupmap.iteritems():
649 if self.ui.debugflag:
650 if self.ui.debugflag:
650 self.ui.write(_('applying fixups to %s\n') % path)
651 self.ui.write(_('applying fixups to %s\n') % path)
651 state.apply()
652 state.apply()
652
653
653 @property
654 @property
654 def chunkstats(self):
655 def chunkstats(self):
655 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
656 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
656 return dict((path, state.chunkstats)
657 return dict((path, state.chunkstats)
657 for path, state in self.fixupmap.iteritems())
658 for path, state in self.fixupmap.iteritems())
658
659
659 def commit(self):
660 def commit(self):
660 """commit changes. update self.finalnode, self.replacemap"""
661 """commit changes. update self.finalnode, self.replacemap"""
661 with self.repo.wlock(), self.repo.lock():
662 with self.repo.wlock(), self.repo.lock():
662 with self.repo.transaction('absorb') as tr:
663 with self.repo.transaction('absorb') as tr:
663 self._commitstack()
664 self._commitstack()
664 self._movebookmarks(tr)
665 self._movebookmarks(tr)
665 if self.repo['.'].node() in self.replacemap:
666 if self.repo['.'].node() in self.replacemap:
666 self._moveworkingdirectoryparent()
667 self._moveworkingdirectoryparent()
667 if self._useobsolete:
668 if self._useobsolete:
668 self._obsoleteoldcommits()
669 self._obsoleteoldcommits()
669 if not self._useobsolete: # strip must be outside transactions
670 if not self._useobsolete: # strip must be outside transactions
670 self._stripoldcommits()
671 self._stripoldcommits()
671 return self.finalnode
672 return self.finalnode
672
673
673 def printchunkstats(self):
674 def printchunkstats(self):
674 """print things like '1 of 2 chunk(s) applied'"""
675 """print things like '1 of 2 chunk(s) applied'"""
675 ui = self.ui
676 ui = self.ui
676 chunkstats = self.chunkstats
677 chunkstats = self.chunkstats
677 if ui.verbose:
678 if ui.verbose:
678 # chunkstats for each file
679 # chunkstats for each file
679 for path, stat in chunkstats.iteritems():
680 for path, stat in chunkstats.iteritems():
680 if stat[0]:
681 if stat[0]:
681 ui.write(_('%s: %d of %d chunk(s) applied\n')
682 ui.write(_('%s: %d of %d chunk(s) applied\n')
682 % (path, stat[0], stat[1]))
683 % (path, stat[0], stat[1]))
683 elif not ui.quiet:
684 elif not ui.quiet:
684 # a summary for all files
685 # a summary for all files
685 stats = chunkstats.values()
686 stats = chunkstats.values()
686 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
687 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
687 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
688 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
688
689
689 def _commitstack(self):
690 def _commitstack(self):
690 """make new commits. update self.finalnode, self.replacemap.
691 """make new commits. update self.finalnode, self.replacemap.
691 it is splitted from "commit" to avoid too much indentation.
692 it is splitted from "commit" to avoid too much indentation.
692 """
693 """
693 # last node (20-char) committed by us
694 # last node (20-char) committed by us
694 lastcommitted = None
695 lastcommitted = None
695 # p1 which overrides the parent of the next commit, "None" means use
696 # p1 which overrides the parent of the next commit, "None" means use
696 # the original parent unchanged
697 # the original parent unchanged
697 nextp1 = None
698 nextp1 = None
698 for ctx in self.stack:
699 for ctx in self.stack:
699 memworkingcopy = self._getnewfilecontents(ctx)
700 memworkingcopy = self._getnewfilecontents(ctx)
700 if not memworkingcopy and not lastcommitted:
701 if not memworkingcopy and not lastcommitted:
701 # nothing changed, nothing commited
702 # nothing changed, nothing commited
702 nextp1 = ctx
703 nextp1 = ctx
703 continue
704 continue
704 msg = ''
705 msg = ''
705 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
706 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
706 # changeset is no longer necessary
707 # changeset is no longer necessary
707 self.replacemap[ctx.node()] = None
708 self.replacemap[ctx.node()] = None
708 msg = _('became empty and was dropped')
709 msg = _('became empty and was dropped')
709 else:
710 else:
710 # changeset needs re-commit
711 # changeset needs re-commit
711 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
712 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
712 lastcommitted = self.repo[nodestr]
713 lastcommitted = self.repo[nodestr]
713 nextp1 = lastcommitted
714 nextp1 = lastcommitted
714 self.replacemap[ctx.node()] = lastcommitted.node()
715 self.replacemap[ctx.node()] = lastcommitted.node()
715 if memworkingcopy:
716 if memworkingcopy:
716 msg = _('%d file(s) changed, became %s') % (
717 msg = _('%d file(s) changed, became %s') % (
717 len(memworkingcopy), self._ctx2str(lastcommitted))
718 len(memworkingcopy), self._ctx2str(lastcommitted))
718 else:
719 else:
719 msg = _('became %s') % self._ctx2str(lastcommitted)
720 msg = _('became %s') % self._ctx2str(lastcommitted)
720 if self.ui.verbose and msg:
721 if self.ui.verbose and msg:
721 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
722 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
722 self.finalnode = lastcommitted and lastcommitted.node()
723 self.finalnode = lastcommitted and lastcommitted.node()
723
724
724 def _ctx2str(self, ctx):
725 def _ctx2str(self, ctx):
725 if self.ui.debugflag:
726 if self.ui.debugflag:
726 return ctx.hex()
727 return ctx.hex()
727 else:
728 else:
728 return node.short(ctx.node())
729 return node.short(ctx.node())
729
730
730 def _getnewfilecontents(self, ctx):
731 def _getnewfilecontents(self, ctx):
731 """(ctx) -> {path: str}
732 """(ctx) -> {path: str}
732
733
733 fetch file contents from filefixupstates.
734 fetch file contents from filefixupstates.
734 return the working copy overrides - files different from ctx.
735 return the working copy overrides - files different from ctx.
735 """
736 """
736 result = {}
737 result = {}
737 for path in self.paths:
738 for path in self.paths:
738 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
739 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
739 if ctx not in ctx2fctx:
740 if ctx not in ctx2fctx:
740 continue
741 continue
741 fctx = ctx2fctx[ctx]
742 fctx = ctx2fctx[ctx]
742 content = fctx.data()
743 content = fctx.data()
743 newcontent = self.fixupmap[path].getfinalcontent(fctx)
744 newcontent = self.fixupmap[path].getfinalcontent(fctx)
744 if content != newcontent:
745 if content != newcontent:
745 result[fctx.path()] = newcontent
746 result[fctx.path()] = newcontent
746 return result
747 return result
747
748
748 def _movebookmarks(self, tr):
749 def _movebookmarks(self, tr):
749 repo = self.repo
750 repo = self.repo
750 needupdate = [(name, self.replacemap[hsh])
751 needupdate = [(name, self.replacemap[hsh])
751 for name, hsh in repo._bookmarks.iteritems()
752 for name, hsh in repo._bookmarks.iteritems()
752 if hsh in self.replacemap]
753 if hsh in self.replacemap]
753 changes = []
754 changes = []
754 for name, hsh in needupdate:
755 for name, hsh in needupdate:
755 if hsh:
756 if hsh:
756 changes.append((name, hsh))
757 changes.append((name, hsh))
757 if self.ui.verbose:
758 if self.ui.verbose:
758 self.ui.write(_('moving bookmark %s to %s\n')
759 self.ui.write(_('moving bookmark %s to %s\n')
759 % (name, node.hex(hsh)))
760 % (name, node.hex(hsh)))
760 else:
761 else:
761 changes.append((name, None))
762 changes.append((name, None))
762 if self.ui.verbose:
763 if self.ui.verbose:
763 self.ui.write(_('deleting bookmark %s\n') % name)
764 self.ui.write(_('deleting bookmark %s\n') % name)
764 repo._bookmarks.applychanges(repo, tr, changes)
765 repo._bookmarks.applychanges(repo, tr, changes)
765
766
766 def _moveworkingdirectoryparent(self):
767 def _moveworkingdirectoryparent(self):
767 if not self.finalnode:
768 if not self.finalnode:
768 # Find the latest not-{obsoleted,stripped} parent.
769 # Find the latest not-{obsoleted,stripped} parent.
769 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
770 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
770 ctx = self.repo[revs.first()]
771 ctx = self.repo[revs.first()]
771 self.finalnode = ctx.node()
772 self.finalnode = ctx.node()
772 else:
773 else:
773 ctx = self.repo[self.finalnode]
774 ctx = self.repo[self.finalnode]
774
775
775 dirstate = self.repo.dirstate
776 dirstate = self.repo.dirstate
776 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
777 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
777 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
778 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
778 noop = lambda: 0
779 noop = lambda: 0
779 restore = noop
780 restore = noop
780 if util.safehasattr(dirstate, '_fsmonitorstate'):
781 if util.safehasattr(dirstate, '_fsmonitorstate'):
781 bak = dirstate._fsmonitorstate.invalidate
782 bak = dirstate._fsmonitorstate.invalidate
782 def restore():
783 def restore():
783 dirstate._fsmonitorstate.invalidate = bak
784 dirstate._fsmonitorstate.invalidate = bak
784 dirstate._fsmonitorstate.invalidate = noop
785 dirstate._fsmonitorstate.invalidate = noop
785 try:
786 try:
786 with dirstate.parentchange():
787 with dirstate.parentchange():
787 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
788 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
788 finally:
789 finally:
789 restore()
790 restore()
790
791
791 @staticmethod
792 @staticmethod
792 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
793 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
793 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
794 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
794
795
795 if it will become an empty commit (does not change anything, after the
796 if it will become an empty commit (does not change anything, after the
796 memworkingcopy overrides), return True. otherwise return False.
797 memworkingcopy overrides), return True. otherwise return False.
797 """
798 """
798 if not pctx:
799 if not pctx:
799 parents = ctx.parents()
800 parents = ctx.parents()
800 if len(parents) != 1:
801 if len(parents) != 1:
801 return False
802 return False
802 pctx = parents[0]
803 pctx = parents[0]
803 # ctx changes more files (not a subset of memworkingcopy)
804 # ctx changes more files (not a subset of memworkingcopy)
804 if not set(ctx.files()).issubset(set(memworkingcopy.iterkeys())):
805 if not set(ctx.files()).issubset(set(memworkingcopy.iterkeys())):
805 return False
806 return False
806 for path, content in memworkingcopy.iteritems():
807 for path, content in memworkingcopy.iteritems():
807 if path not in pctx or path not in ctx:
808 if path not in pctx or path not in ctx:
808 return False
809 return False
809 fctx = ctx[path]
810 fctx = ctx[path]
810 pfctx = pctx[path]
811 pfctx = pctx[path]
811 if pfctx.flags() != fctx.flags():
812 if pfctx.flags() != fctx.flags():
812 return False
813 return False
813 if pfctx.data() != content:
814 if pfctx.data() != content:
814 return False
815 return False
815 return True
816 return True
816
817
817 def _commitsingle(self, memworkingcopy, ctx, p1=None):
818 def _commitsingle(self, memworkingcopy, ctx, p1=None):
818 """(ctx, {path: content}, node) -> node. make a single commit
819 """(ctx, {path: content}, node) -> node. make a single commit
819
820
820 the commit is a clone from ctx, with a (optionally) different p1, and
821 the commit is a clone from ctx, with a (optionally) different p1, and
821 different file contents replaced by memworkingcopy.
822 different file contents replaced by memworkingcopy.
822 """
823 """
823 parents = p1 and (p1, node.nullid)
824 parents = p1 and (p1, node.nullid)
824 extra = ctx.extra()
825 extra = ctx.extra()
825 if self._useobsolete and self.ui.configbool('absorb', 'addnoise'):
826 if self._useobsolete and self.ui.configbool('absorb', 'addnoise'):
826 extra['absorb_source'] = ctx.hex()
827 extra['absorb_source'] = ctx.hex()
827 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
828 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
828 # preserve phase
829 # preserve phase
829 with mctx.repo().ui.configoverride({
830 with mctx.repo().ui.configoverride({
830 ('phases', 'new-commit'): ctx.phase()}):
831 ('phases', 'new-commit'): ctx.phase()}):
831 return mctx.commit()
832 return mctx.commit()
832
833
833 @util.propertycache
834 @util.propertycache
834 def _useobsolete(self):
835 def _useobsolete(self):
835 """() -> bool"""
836 """() -> bool"""
836 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
837 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
837
838
838 def _obsoleteoldcommits(self):
839 def _obsoleteoldcommits(self):
839 relations = [(self.repo[k], v and (self.repo[v],) or ())
840 relations = [(self.repo[k], v and (self.repo[v],) or ())
840 for k, v in self.replacemap.iteritems()]
841 for k, v in self.replacemap.iteritems()]
841 if relations:
842 if relations:
842 obsolete.createmarkers(self.repo, relations)
843 obsolete.createmarkers(self.repo, relations)
843
844
844 def _stripoldcommits(self):
845 def _stripoldcommits(self):
845 nodelist = self.replacemap.keys()
846 nodelist = self.replacemap.keys()
846 # make sure we don't strip innocent children
847 # make sure we don't strip innocent children
847 revs = self.repo.revs('%ln - (::(heads(%ln::)-%ln))', nodelist,
848 revs = self.repo.revs('%ln - (::(heads(%ln::)-%ln))', nodelist,
848 nodelist, nodelist)
849 nodelist, nodelist)
849 tonode = self.repo.changelog.node
850 tonode = self.repo.changelog.node
850 nodelist = [tonode(r) for r in revs]
851 nodelist = [tonode(r) for r in revs]
851 if nodelist:
852 if nodelist:
852 repair.strip(self.repo.ui, self.repo, nodelist)
853 repair.strip(self.repo.ui, self.repo, nodelist)
853
854
854 def _parsechunk(hunk):
855 def _parsechunk(hunk):
855 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
856 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
856 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
857 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
857 return None, None
858 return None, None
858 path = hunk.header.filename()
859 path = hunk.header.filename()
859 a1 = hunk.fromline + len(hunk.before) - 1
860 a1 = hunk.fromline + len(hunk.before) - 1
860 # remove before and after context
861 # remove before and after context
861 hunk.before = hunk.after = []
862 hunk.before = hunk.after = []
862 buf = util.stringio()
863 buf = util.stringio()
863 hunk.write(buf)
864 hunk.write(buf)
864 patchlines = mdiff.splitnewlines(buf.getvalue())
865 patchlines = mdiff.splitnewlines(buf.getvalue())
865 # hunk.prettystr() will update hunk.removed
866 # hunk.prettystr() will update hunk.removed
866 a2 = a1 + hunk.removed
867 a2 = a1 + hunk.removed
867 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
868 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
868 return path, (a1, a2, blines)
869 return path, (a1, a2, blines)
869
870
870 def overlaydiffcontext(ctx, chunks):
871 def overlaydiffcontext(ctx, chunks):
871 """(ctx, [crecord.uihunk]) -> memctx
872 """(ctx, [crecord.uihunk]) -> memctx
872
873
873 return a memctx with some [1] patches (chunks) applied to ctx.
874 return a memctx with some [1] patches (chunks) applied to ctx.
874 [1]: modifications are handled. renames, mode changes, etc. are ignored.
875 [1]: modifications are handled. renames, mode changes, etc. are ignored.
875 """
876 """
876 # sadly the applying-patch logic is hardly reusable, and messy:
877 # sadly the applying-patch logic is hardly reusable, and messy:
877 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
878 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
878 # needs a file stream of a patch and will re-parse it, while we have
879 # needs a file stream of a patch and will re-parse it, while we have
879 # structured hunk objects at hand.
880 # structured hunk objects at hand.
880 # 2. a lot of different implementations about "chunk" (patch.hunk,
881 # 2. a lot of different implementations about "chunk" (patch.hunk,
881 # patch.recordhunk, crecord.uihunk)
882 # patch.recordhunk, crecord.uihunk)
882 # as we only care about applying changes to modified files, no mode
883 # as we only care about applying changes to modified files, no mode
883 # change, no binary diff, and no renames, it's probably okay to
884 # change, no binary diff, and no renames, it's probably okay to
884 # re-invent the logic using much simpler code here.
885 # re-invent the logic using much simpler code here.
885 memworkingcopy = {} # {path: content}
886 memworkingcopy = {} # {path: content}
886 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
887 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
887 for path, info in map(_parsechunk, chunks):
888 for path, info in map(_parsechunk, chunks):
888 if not path or not info:
889 if not path or not info:
889 continue
890 continue
890 patchmap[path].append(info)
891 patchmap[path].append(info)
891 for path, patches in patchmap.iteritems():
892 for path, patches in patchmap.iteritems():
892 if path not in ctx or not patches:
893 if path not in ctx or not patches:
893 continue
894 continue
894 patches.sort(reverse=True)
895 patches.sort(reverse=True)
895 lines = mdiff.splitnewlines(ctx[path].data())
896 lines = mdiff.splitnewlines(ctx[path].data())
896 for a1, a2, blines in patches:
897 for a1, a2, blines in patches:
897 lines[a1:a2] = blines
898 lines[a1:a2] = blines
898 memworkingcopy[path] = ''.join(lines)
899 memworkingcopy[path] = ''.join(lines)
899 return overlaycontext(memworkingcopy, ctx)
900 return overlaycontext(memworkingcopy, ctx)
900
901
901 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
902 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
902 """pick fixup chunks from targetctx, apply them to stack.
903 """pick fixup chunks from targetctx, apply them to stack.
903
904
904 if targetctx is None, the working copy context will be used.
905 if targetctx is None, the working copy context will be used.
905 if stack is None, the current draft stack will be used.
906 if stack is None, the current draft stack will be used.
906 return fixupstate.
907 return fixupstate.
907 """
908 """
908 if stack is None:
909 if stack is None:
909 limit = ui.configint('absorb', 'maxstacksize')
910 limit = ui.configint('absorb', 'maxstacksize')
910 stack = getdraftstack(repo['.'], limit)
911 stack = getdraftstack(repo['.'], limit)
911 if limit and len(stack) >= limit:
912 if limit and len(stack) >= limit:
912 ui.warn(_('absorb: only the recent %d changesets will '
913 ui.warn(_('absorb: only the recent %d changesets will '
913 'be analysed\n')
914 'be analysed\n')
914 % limit)
915 % limit)
915 if not stack:
916 if not stack:
916 raise error.Abort(_('no changeset to change'))
917 raise error.Abort(_('no changeset to change'))
917 if targetctx is None: # default to working copy
918 if targetctx is None: # default to working copy
918 targetctx = repo[None]
919 targetctx = repo[None]
919 if pats is None:
920 if pats is None:
920 pats = ()
921 pats = ()
921 if opts is None:
922 if opts is None:
922 opts = {}
923 opts = {}
923 state = fixupstate(stack, ui=ui, opts=opts)
924 state = fixupstate(stack, ui=ui, opts=opts)
924 matcher = scmutil.match(targetctx, pats, opts)
925 matcher = scmutil.match(targetctx, pats, opts)
925 if opts.get('interactive'):
926 if opts.get('interactive'):
926 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
927 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
927 origchunks = patch.parsepatch(diff)
928 origchunks = patch.parsepatch(diff)
928 chunks = cmdutil.recordfilter(ui, origchunks)[0]
929 chunks = cmdutil.recordfilter(ui, origchunks)[0]
929 targetctx = overlaydiffcontext(stack[-1], chunks)
930 targetctx = overlaydiffcontext(stack[-1], chunks)
930 state.diffwith(targetctx, matcher, showchanges=opts.get('print_changes'))
931 state.diffwith(targetctx, matcher, showchanges=opts.get('print_changes'))
931 if not opts.get('dry_run'):
932 if not opts.get('dry_run'):
932 state.apply()
933 state.apply()
933 if state.commit():
934 if state.commit():
934 state.printchunkstats()
935 state.printchunkstats()
935 elif not ui.quiet:
936 elif not ui.quiet:
936 ui.write(_('nothing applied\n'))
937 ui.write(_('nothing applied\n'))
937 return state
938 return state
938
939
939 @command('^absorb|sf',
940 @command('^absorb|sf',
940 [('p', 'print-changes', None,
941 [('p', 'print-changes', None,
941 _('print which changesets are modified by which changes')),
942 _('print which changesets are modified by which changes')),
942 ('i', 'interactive', None,
943 ('i', 'interactive', None,
943 _('interactively select which chunks to apply (EXPERIMENTAL)')),
944 _('interactively select which chunks to apply (EXPERIMENTAL)')),
944 ('e', 'edit-lines', None,
945 ('e', 'edit-lines', None,
945 _('edit what lines belong to which changesets before commit '
946 _('edit what lines belong to which changesets before commit '
946 '(EXPERIMENTAL)')),
947 '(EXPERIMENTAL)')),
947 ] + commands.dryrunopts + commands.walkopts,
948 ] + commands.dryrunopts + commands.walkopts,
948 _('hg absorb [OPTION] [FILE]...'))
949 _('hg absorb [OPTION] [FILE]...'))
949 def absorbcmd(ui, repo, *pats, **opts):
950 def absorbcmd(ui, repo, *pats, **opts):
950 """incorporate corrections into the stack of draft changesets
951 """incorporate corrections into the stack of draft changesets
951
952
952 absorb analyzes each change in your working directory and attempts to
953 absorb analyzes each change in your working directory and attempts to
953 amend the changed lines into the changesets in your stack that first
954 amend the changed lines into the changesets in your stack that first
954 introduced those lines.
955 introduced those lines.
955
956
956 If absorb cannot find an unambiguous changeset to amend for a change,
957 If absorb cannot find an unambiguous changeset to amend for a change,
957 that change will be left in the working directory, untouched. They can be
958 that change will be left in the working directory, untouched. They can be
958 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
959 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
959 absorb does not write to the working directory.
960 absorb does not write to the working directory.
960
961
961 Changesets outside the revset `::. and not public() and not merge()` will
962 Changesets outside the revset `::. and not public() and not merge()` will
962 not be changed.
963 not be changed.
963
964
964 Changesets that become empty after applying the changes will be deleted.
965 Changesets that become empty after applying the changes will be deleted.
965
966
966 If in doubt, run :hg:`absorb -pn` to preview what changesets will
967 If in doubt, run :hg:`absorb -pn` to preview what changesets will
967 be amended by what changed lines, without actually changing anything.
968 be amended by what changed lines, without actually changing anything.
968
969
969 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
970 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
970 """
971 """
971 state = absorb(ui, repo, pats=pats, opts=opts)
972 state = absorb(ui, repo, pats=pats, opts=opts)
972 if sum(s[0] for s in state.chunkstats.values()) == 0:
973 if sum(s[0] for s in state.chunkstats.values()) == 0:
973 return 1
974 return 1
974
975
975 def _wrapamend(flag):
976 def _wrapamend(flag):
976 """add flag to amend, which will be a shortcut to the absorb command"""
977 """add flag to amend, which will be a shortcut to the absorb command"""
977 if not flag:
978 if not flag:
978 return
979 return
979 amendcmd = extensions.bind(_amendcmd, flag)
980 amendcmd = extensions.bind(_amendcmd, flag)
980 # the amend command can exist in evolve, or fbamend
981 # the amend command can exist in evolve, or fbamend
981 for extname in ['evolve', 'fbamend', None]:
982 for extname in ['evolve', 'fbamend', None]:
982 try:
983 try:
983 if extname is None:
984 if extname is None:
984 cmdtable = commands.table
985 cmdtable = commands.table
985 else:
986 else:
986 ext = extensions.find(extname)
987 ext = extensions.find(extname)
987 cmdtable = ext.cmdtable
988 cmdtable = ext.cmdtable
988 except (KeyError, AttributeError):
989 except (KeyError, AttributeError):
989 continue
990 continue
990 try:
991 try:
991 entry = extensions.wrapcommand(cmdtable, 'amend', amendcmd)
992 entry = extensions.wrapcommand(cmdtable, 'amend', amendcmd)
992 options = entry[1]
993 options = entry[1]
993 msg = _('incorporate corrections into stack. '
994 msg = _('incorporate corrections into stack. '
994 'see \'hg help absorb\' for details')
995 'see \'hg help absorb\' for details')
995 options.append(('', flag, None, msg))
996 options.append(('', flag, None, msg))
996 return
997 return
997 except error.UnknownCommand:
998 except error.UnknownCommand:
998 pass
999 pass
999
1000
1000 def _amendcmd(flag, orig, ui, repo, *pats, **opts):
1001 def _amendcmd(flag, orig, ui, repo, *pats, **opts):
1001 if not opts.get(flag):
1002 if not opts.get(flag):
1002 return orig(ui, repo, *pats, **opts)
1003 return orig(ui, repo, *pats, **opts)
1003 # use absorb
1004 # use absorb
1004 for k, v in opts.iteritems(): # check unsupported flags
1005 for k, v in opts.iteritems(): # check unsupported flags
1005 if v and k not in ['interactive', flag]:
1006 if v and k not in ['interactive', flag]:
1006 raise error.Abort(_('--%s does not support --%s')
1007 raise error.Abort(_('--%s does not support --%s')
1007 % (flag, k.replace('_', '-')))
1008 % (flag, k.replace('_', '-')))
1008 state = absorb(ui, repo, pats=pats, opts=opts)
1009 state = absorb(ui, repo, pats=pats, opts=opts)
1009 # different from the original absorb, tell users what chunks were
1010 # different from the original absorb, tell users what chunks were
1010 # ignored and were left. it's because users usually expect "amend" to
1011 # ignored and were left. it's because users usually expect "amend" to
1011 # take all of their changes and will feel strange otherwise.
1012 # take all of their changes and will feel strange otherwise.
1012 # the original "absorb" command faces more-advanced users knowing
1013 # the original "absorb" command faces more-advanced users knowing
1013 # what's going on and is less verbose.
1014 # what's going on and is less verbose.
1014 adoptedsum = 0
1015 adoptedsum = 0
1015 messages = []
1016 messages = []
1016 for path, (adopted, total) in state.chunkstats.iteritems():
1017 for path, (adopted, total) in state.chunkstats.iteritems():
1017 adoptedsum += adopted
1018 adoptedsum += adopted
1018 if adopted == total:
1019 if adopted == total:
1019 continue
1020 continue
1020 reason = _('%d modified chunks were ignored') % (total - adopted)
1021 reason = _('%d modified chunks were ignored') % (total - adopted)
1021 messages.append(('M', 'modified', path, reason))
1022 messages.append(('M', 'modified', path, reason))
1022 for idx, word, symbol in [(0, 'modified', 'M'), (1, 'added', 'A'),
1023 for idx, word, symbol in [(0, 'modified', 'M'), (1, 'added', 'A'),
1023 (2, 'removed', 'R'), (3, 'deleted', '!')]:
1024 (2, 'removed', 'R'), (3, 'deleted', '!')]:
1024 paths = set(state.status[idx]) - set(state.paths)
1025 paths = set(state.status[idx]) - set(state.paths)
1025 for path in sorted(paths):
1026 for path in sorted(paths):
1026 if word == 'modified':
1027 if word == 'modified':
1027 reason = _('unsupported file type (ex. binary or link)')
1028 reason = _('unsupported file type (ex. binary or link)')
1028 else:
1029 else:
1029 reason = _('%s files were ignored') % word
1030 reason = _('%s files were ignored') % word
1030 messages.append((symbol, word, path, reason))
1031 messages.append((symbol, word, path, reason))
1031 if messages:
1032 if messages:
1032 ui.write(_('\n# changes not applied and left in '
1033 ui.write(_('\n# changes not applied and left in '
1033 'working directory:\n'))
1034 'working directory:\n'))
1034 for symbol, word, path, reason in messages:
1035 for symbol, word, path, reason in messages:
1035 ui.write(_('# %s %s : %s\n') % (
1036 ui.write(_('# %s %s : %s\n') % (
1036 ui.label(symbol, 'status.' + word),
1037 ui.label(symbol, 'status.' + word),
1037 ui.label(path, 'status.' + word), reason))
1038 ui.label(path, 'status.' + word), reason))
1038
1039
1039 if adoptedsum == 0:
1040 if adoptedsum == 0:
1040 return 1
1041 return 1
1041
1042
1042 def extsetup(ui):
1043 def extsetup(ui):
1043 _wrapamend(ui.config('absorb', 'amendflag'))
1044 _wrapamend(ui.config('absorb', 'amendflag'))
General Comments 0
You need to be logged in to leave comments. Login now