##// END OF EJS Templates
absorb: note some TODOs from the code review...
Augie Fackler -
r38958:ec0697f4 default
parent child Browse files
Show More
@@ -1,1043 +1,1048
1 # absorb.py
1 # absorb.py
2 #
2 #
3 # Copyright 2016 Facebook, Inc.
3 # Copyright 2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """apply working directory changes to changesets (EXPERIMENTAL)
8 """apply working directory changes to changesets (EXPERIMENTAL)
9
9
10 The absorb extension provides a command to use annotate information to
10 The absorb extension provides a command to use annotate information to
11 amend modified chunks into the corresponding non-public changesets.
11 amend modified chunks into the corresponding non-public changesets.
12
12
13 ::
13 ::
14
14
15 [absorb]
15 [absorb]
16 # only check 50 recent non-public changesets at most
16 # only check 50 recent non-public changesets at most
17 maxstacksize = 50
17 maxstacksize = 50
18 # whether to add noise to new commits to avoid obsolescence cycle
18 # whether to add noise to new commits to avoid obsolescence cycle
19 addnoise = 1
19 addnoise = 1
20 # make `amend --correlated` a shortcut to the main command
20 # make `amend --correlated` a shortcut to the main command
21 amendflag = correlated
21 amendflag = correlated
22
22
23 [color]
23 [color]
24 absorb.node = blue bold
24 absorb.node = blue bold
25 absorb.path = bold
25 absorb.path = bold
26 """
26 """
27
27
28 # TODO:
29 # * Rename config items to [commands] namespace
30 # * Converge getdraftstack() with other code in core
31 # * move many attributes on fixupstate to be private
32
28 from __future__ import absolute_import
33 from __future__ import absolute_import
29
34
30 import collections
35 import collections
31
36
32 from mercurial.i18n import _
37 from mercurial.i18n import _
33 from mercurial import (
38 from mercurial import (
34 cmdutil,
39 cmdutil,
35 commands,
40 commands,
36 context,
41 context,
37 crecord,
42 crecord,
38 error,
43 error,
39 extensions,
44 extensions,
40 linelog,
45 linelog,
41 mdiff,
46 mdiff,
42 node,
47 node,
43 obsolete,
48 obsolete,
44 patch,
49 patch,
45 phases,
50 phases,
46 pycompat,
51 pycompat,
47 registrar,
52 registrar,
48 repair,
53 repair,
49 scmutil,
54 scmutil,
50 util,
55 util,
51 )
56 )
52 from mercurial.utils import (
57 from mercurial.utils import (
53 stringutil,
58 stringutil,
54 )
59 )
55
60
56 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
57 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
58 # be specifying the version(s) of Mercurial they are tested with, or
63 # be specifying the version(s) of Mercurial they are tested with, or
59 # leave the attribute unspecified.
64 # leave the attribute unspecified.
60 testedwith = 'ships-with-hg-core'
65 testedwith = 'ships-with-hg-core'
61
66
62 cmdtable = {}
67 cmdtable = {}
63 command = registrar.command(cmdtable)
68 command = registrar.command(cmdtable)
64
69
65 configtable = {}
70 configtable = {}
66 configitem = registrar.configitem(configtable)
71 configitem = registrar.configitem(configtable)
67
72
68 configitem('absorb', 'addnoise', default=True)
73 configitem('absorb', 'addnoise', default=True)
69 configitem('absorb', 'amendflag', default=None)
74 configitem('absorb', 'amendflag', default=None)
70 configitem('absorb', 'maxstacksize', default=50)
75 configitem('absorb', 'maxstacksize', default=50)
71
76
72 colortable = {
77 colortable = {
73 'absorb.node': 'blue bold',
78 'absorb.node': 'blue bold',
74 'absorb.path': 'bold',
79 'absorb.path': 'bold',
75 }
80 }
76
81
77 defaultdict = collections.defaultdict
82 defaultdict = collections.defaultdict
78
83
79 class nullui(object):
84 class nullui(object):
80 """blank ui object doing nothing"""
85 """blank ui object doing nothing"""
81 debugflag = False
86 debugflag = False
82 verbose = False
87 verbose = False
83 quiet = True
88 quiet = True
84
89
85 def __getitem__(name):
90 def __getitem__(name):
86 def nullfunc(*args, **kwds):
91 def nullfunc(*args, **kwds):
87 return
92 return
88 return nullfunc
93 return nullfunc
89
94
90 class emptyfilecontext(object):
95 class emptyfilecontext(object):
91 """minimal filecontext representing an empty file"""
96 """minimal filecontext representing an empty file"""
92 def data(self):
97 def data(self):
93 return ''
98 return ''
94
99
95 def node(self):
100 def node(self):
96 return node.nullid
101 return node.nullid
97
102
98 def uniq(lst):
103 def uniq(lst):
99 """list -> list. remove duplicated items without changing the order"""
104 """list -> list. remove duplicated items without changing the order"""
100 seen = set()
105 seen = set()
101 result = []
106 result = []
102 for x in lst:
107 for x in lst:
103 if x not in seen:
108 if x not in seen:
104 seen.add(x)
109 seen.add(x)
105 result.append(x)
110 result.append(x)
106 return result
111 return result
107
112
108 def getdraftstack(headctx, limit=None):
113 def getdraftstack(headctx, limit=None):
109 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
114 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
110
115
111 changesets are sorted in topo order, oldest first.
116 changesets are sorted in topo order, oldest first.
112 return at most limit items, if limit is a positive number.
117 return at most limit items, if limit is a positive number.
113
118
114 merges are considered as non-draft as well. i.e. every commit
119 merges are considered as non-draft as well. i.e. every commit
115 returned has and only has 1 parent.
120 returned has and only has 1 parent.
116 """
121 """
117 ctx = headctx
122 ctx = headctx
118 result = []
123 result = []
119 while ctx.phase() != phases.public:
124 while ctx.phase() != phases.public:
120 if limit and len(result) >= limit:
125 if limit and len(result) >= limit:
121 break
126 break
122 parents = ctx.parents()
127 parents = ctx.parents()
123 if len(parents) != 1:
128 if len(parents) != 1:
124 break
129 break
125 result.append(ctx)
130 result.append(ctx)
126 ctx = parents[0]
131 ctx = parents[0]
127 result.reverse()
132 result.reverse()
128 return result
133 return result
129
134
130 def getfilestack(stack, path, seenfctxs=None):
135 def getfilestack(stack, path, seenfctxs=None):
131 """([ctx], str, set) -> [fctx], {ctx: fctx}
136 """([ctx], str, set) -> [fctx], {ctx: fctx}
132
137
133 stack is a list of contexts, from old to new. usually they are what
138 stack is a list of contexts, from old to new. usually they are what
134 "getdraftstack" returns.
139 "getdraftstack" returns.
135
140
136 follows renames, but not copies.
141 follows renames, but not copies.
137
142
138 seenfctxs is a set of filecontexts that will be considered "immutable".
143 seenfctxs is a set of filecontexts that will be considered "immutable".
139 they are usually what this function returned in earlier calls, useful
144 they are usually what this function returned in earlier calls, useful
140 to avoid issues that a file was "moved" to multiple places and was then
145 to avoid issues that a file was "moved" to multiple places and was then
141 modified differently, like: "a" was copied to "b", "a" was also copied to
146 modified differently, like: "a" was copied to "b", "a" was also copied to
142 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
147 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
143 and we enforce only one of them to be able to affect "a"'s content.
148 and we enforce only one of them to be able to affect "a"'s content.
144
149
145 return an empty list and an empty dict, if the specified path does not
150 return an empty list and an empty dict, if the specified path does not
146 exist in stack[-1] (the top of the stack).
151 exist in stack[-1] (the top of the stack).
147
152
148 otherwise, return a list of de-duplicated filecontexts, and the map to
153 otherwise, return a list of de-duplicated filecontexts, and the map to
149 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
154 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
150 of the list would be outside the stack and should be considered immutable.
155 of the list would be outside the stack and should be considered immutable.
151 the remaining items are within the stack.
156 the remaining items are within the stack.
152
157
153 for example, given the following changelog and corresponding filelog
158 for example, given the following changelog and corresponding filelog
154 revisions:
159 revisions:
155
160
156 changelog: 3----4----5----6----7
161 changelog: 3----4----5----6----7
157 filelog: x 0----1----1----2 (x: no such file yet)
162 filelog: x 0----1----1----2 (x: no such file yet)
158
163
159 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
164 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
160 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
165 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
161 dummy empty filecontext.
166 dummy empty filecontext.
162 - if stack = [2], returns ([], {})
167 - if stack = [2], returns ([], {})
163 - if stack = [7], returns ([1, 2], {7: 2})
168 - if stack = [7], returns ([1, 2], {7: 2})
164 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
169 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
165 removed, since 1 is immutable.
170 removed, since 1 is immutable.
166 """
171 """
167 if seenfctxs is None:
172 if seenfctxs is None:
168 seenfctxs = set()
173 seenfctxs = set()
169 assert stack
174 assert stack
170
175
171 if path not in stack[-1]:
176 if path not in stack[-1]:
172 return [], {}
177 return [], {}
173
178
174 fctxs = []
179 fctxs = []
175 fctxmap = {}
180 fctxmap = {}
176
181
177 pctx = stack[0].p1() # the public (immutable) ctx we stop at
182 pctx = stack[0].p1() # the public (immutable) ctx we stop at
178 for ctx in reversed(stack):
183 for ctx in reversed(stack):
179 if path not in ctx: # the file is added in the next commit
184 if path not in ctx: # the file is added in the next commit
180 pctx = ctx
185 pctx = ctx
181 break
186 break
182 fctx = ctx[path]
187 fctx = ctx[path]
183 fctxs.append(fctx)
188 fctxs.append(fctx)
184 if fctx in seenfctxs: # treat fctx as the immutable one
189 if fctx in seenfctxs: # treat fctx as the immutable one
185 pctx = None # do not add another immutable fctx
190 pctx = None # do not add another immutable fctx
186 break
191 break
187 fctxmap[ctx] = fctx # only for mutable fctxs
192 fctxmap[ctx] = fctx # only for mutable fctxs
188 renamed = fctx.renamed()
193 renamed = fctx.renamed()
189 if renamed:
194 if renamed:
190 path = renamed[0] # follow rename
195 path = renamed[0] # follow rename
191 if path in ctx: # but do not follow copy
196 if path in ctx: # but do not follow copy
192 pctx = ctx.p1()
197 pctx = ctx.p1()
193 break
198 break
194
199
195 if pctx is not None: # need an extra immutable fctx
200 if pctx is not None: # need an extra immutable fctx
196 if path in pctx:
201 if path in pctx:
197 fctxs.append(pctx[path])
202 fctxs.append(pctx[path])
198 else:
203 else:
199 fctxs.append(emptyfilecontext())
204 fctxs.append(emptyfilecontext())
200
205
201 fctxs.reverse()
206 fctxs.reverse()
202 # note: we rely on a property of hg: filerev is not reused for linear
207 # note: we rely on a property of hg: filerev is not reused for linear
203 # history. i.e. it's impossible to have:
208 # history. i.e. it's impossible to have:
204 # changelog: 4----5----6 (linear, no merges)
209 # changelog: 4----5----6 (linear, no merges)
205 # filelog: 1----2----1
210 # filelog: 1----2----1
206 # ^ reuse filerev (impossible)
211 # ^ reuse filerev (impossible)
207 # because parents are part of the hash. if that's not true, we need to
212 # because parents are part of the hash. if that's not true, we need to
208 # remove uniq and find a different way to identify fctxs.
213 # remove uniq and find a different way to identify fctxs.
209 return uniq(fctxs), fctxmap
214 return uniq(fctxs), fctxmap
210
215
211 class overlaystore(patch.filestore):
216 class overlaystore(patch.filestore):
212 """read-only, hybrid store based on a dict and ctx.
217 """read-only, hybrid store based on a dict and ctx.
213 memworkingcopy: {path: content}, overrides file contents.
218 memworkingcopy: {path: content}, overrides file contents.
214 """
219 """
215 def __init__(self, basectx, memworkingcopy):
220 def __init__(self, basectx, memworkingcopy):
216 self.basectx = basectx
221 self.basectx = basectx
217 self.memworkingcopy = memworkingcopy
222 self.memworkingcopy = memworkingcopy
218
223
219 def getfile(self, path):
224 def getfile(self, path):
220 """comply with mercurial.patch.filestore.getfile"""
225 """comply with mercurial.patch.filestore.getfile"""
221 if path not in self.basectx:
226 if path not in self.basectx:
222 return None, None, None
227 return None, None, None
223 fctx = self.basectx[path]
228 fctx = self.basectx[path]
224 if path in self.memworkingcopy:
229 if path in self.memworkingcopy:
225 content = self.memworkingcopy[path]
230 content = self.memworkingcopy[path]
226 else:
231 else:
227 content = fctx.data()
232 content = fctx.data()
228 mode = (fctx.islink(), fctx.isexec())
233 mode = (fctx.islink(), fctx.isexec())
229 renamed = fctx.renamed() # False or (path, node)
234 renamed = fctx.renamed() # False or (path, node)
230 return content, mode, (renamed and renamed[0])
235 return content, mode, (renamed and renamed[0])
231
236
232 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
237 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
233 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
238 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
234 memworkingcopy overrides file contents.
239 memworkingcopy overrides file contents.
235 """
240 """
236 # parents must contain 2 items: (node1, node2)
241 # parents must contain 2 items: (node1, node2)
237 if parents is None:
242 if parents is None:
238 parents = ctx.repo().changelog.parents(ctx.node())
243 parents = ctx.repo().changelog.parents(ctx.node())
239 if extra is None:
244 if extra is None:
240 extra = ctx.extra()
245 extra = ctx.extra()
241 date = ctx.date()
246 date = ctx.date()
242 desc = ctx.description()
247 desc = ctx.description()
243 user = ctx.user()
248 user = ctx.user()
244 files = set(ctx.files()).union(memworkingcopy.iterkeys())
249 files = set(ctx.files()).union(memworkingcopy.iterkeys())
245 store = overlaystore(ctx, memworkingcopy)
250 store = overlaystore(ctx, memworkingcopy)
246 return context.memctx(
251 return context.memctx(
247 repo=ctx.repo(), parents=parents, text=desc,
252 repo=ctx.repo(), parents=parents, text=desc,
248 files=files, filectxfn=store, user=user, date=date,
253 files=files, filectxfn=store, user=user, date=date,
249 branch=None, extra=extra)
254 branch=None, extra=extra)
250
255
251 class filefixupstate(object):
256 class filefixupstate(object):
252 """state needed to apply fixups to a single file
257 """state needed to apply fixups to a single file
253
258
254 internally, it keeps file contents of several revisions and a linelog.
259 internally, it keeps file contents of several revisions and a linelog.
255
260
256 the linelog uses odd revision numbers for original contents (fctxs passed
261 the linelog uses odd revision numbers for original contents (fctxs passed
257 to __init__), and even revision numbers for fixups, like:
262 to __init__), and even revision numbers for fixups, like:
258
263
259 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
264 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
260 linelog rev 2: fixups made to self.fctxs[0]
265 linelog rev 2: fixups made to self.fctxs[0]
261 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
266 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
262 linelog rev 4: fixups made to self.fctxs[1]
267 linelog rev 4: fixups made to self.fctxs[1]
263 ...
268 ...
264
269
265 a typical use is like:
270 a typical use is like:
266
271
267 1. call diffwith, to calculate self.fixups
272 1. call diffwith, to calculate self.fixups
268 2. (optionally), present self.fixups to the user, or change it
273 2. (optionally), present self.fixups to the user, or change it
269 3. call apply, to apply changes
274 3. call apply, to apply changes
270 4. read results from "finalcontents", or call getfinalcontent
275 4. read results from "finalcontents", or call getfinalcontent
271 """
276 """
272
277
273 def __init__(self, fctxs, ui=None, opts=None):
278 def __init__(self, fctxs, ui=None, opts=None):
274 """([fctx], ui or None) -> None
279 """([fctx], ui or None) -> None
275
280
276 fctxs should be linear, and sorted by topo order - oldest first.
281 fctxs should be linear, and sorted by topo order - oldest first.
277 fctxs[0] will be considered as "immutable" and will not be changed.
282 fctxs[0] will be considered as "immutable" and will not be changed.
278 """
283 """
279 self.fctxs = fctxs
284 self.fctxs = fctxs
280 self.ui = ui or nullui()
285 self.ui = ui or nullui()
281 self.opts = opts or {}
286 self.opts = opts or {}
282
287
283 # following fields are built from fctxs. they exist for perf reason
288 # following fields are built from fctxs. they exist for perf reason
284 self.contents = [f.data() for f in fctxs]
289 self.contents = [f.data() for f in fctxs]
285 self.contentlines = map(mdiff.splitnewlines, self.contents)
290 self.contentlines = map(mdiff.splitnewlines, self.contents)
286 self.linelog = self._buildlinelog()
291 self.linelog = self._buildlinelog()
287 if self.ui.debugflag:
292 if self.ui.debugflag:
288 assert self._checkoutlinelog() == self.contents
293 assert self._checkoutlinelog() == self.contents
289
294
290 # following fields will be filled later
295 # following fields will be filled later
291 self.chunkstats = [0, 0] # [adopted, total : int]
296 self.chunkstats = [0, 0] # [adopted, total : int]
292 self.targetlines = [] # [str]
297 self.targetlines = [] # [str]
293 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
298 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
294 self.finalcontents = [] # [str]
299 self.finalcontents = [] # [str]
295
300
296 def diffwith(self, targetfctx, showchanges=False):
301 def diffwith(self, targetfctx, showchanges=False):
297 """calculate fixups needed by examining the differences between
302 """calculate fixups needed by examining the differences between
298 self.fctxs[-1] and targetfctx, chunk by chunk.
303 self.fctxs[-1] and targetfctx, chunk by chunk.
299
304
300 targetfctx is the target state we move towards. we may or may not be
305 targetfctx is the target state we move towards. we may or may not be
301 able to get there because not all modified chunks can be amended into
306 able to get there because not all modified chunks can be amended into
302 a non-public fctx unambiguously.
307 a non-public fctx unambiguously.
303
308
304 call this only once, before apply().
309 call this only once, before apply().
305
310
306 update self.fixups, self.chunkstats, and self.targetlines.
311 update self.fixups, self.chunkstats, and self.targetlines.
307 """
312 """
308 a = self.contents[-1]
313 a = self.contents[-1]
309 alines = self.contentlines[-1]
314 alines = self.contentlines[-1]
310 b = targetfctx.data()
315 b = targetfctx.data()
311 blines = mdiff.splitnewlines(b)
316 blines = mdiff.splitnewlines(b)
312 self.targetlines = blines
317 self.targetlines = blines
313
318
314 self.linelog.annotate(self.linelog.maxrev)
319 self.linelog.annotate(self.linelog.maxrev)
315 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
320 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
316 assert len(annotated) == len(alines)
321 assert len(annotated) == len(alines)
317 # add a dummy end line to make insertion at the end easier
322 # add a dummy end line to make insertion at the end easier
318 if annotated:
323 if annotated:
319 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
324 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
320 annotated.append(dummyendline)
325 annotated.append(dummyendline)
321
326
322 # analyse diff blocks
327 # analyse diff blocks
323 for chunk in self._alldiffchunks(a, b, alines, blines):
328 for chunk in self._alldiffchunks(a, b, alines, blines):
324 newfixups = self._analysediffchunk(chunk, annotated)
329 newfixups = self._analysediffchunk(chunk, annotated)
325 self.chunkstats[0] += bool(newfixups) # 1 or 0
330 self.chunkstats[0] += bool(newfixups) # 1 or 0
326 self.chunkstats[1] += 1
331 self.chunkstats[1] += 1
327 self.fixups += newfixups
332 self.fixups += newfixups
328 if showchanges:
333 if showchanges:
329 self._showchanges(alines, blines, chunk, newfixups)
334 self._showchanges(alines, blines, chunk, newfixups)
330
335
331 def apply(self):
336 def apply(self):
332 """apply self.fixups. update self.linelog, self.finalcontents.
337 """apply self.fixups. update self.linelog, self.finalcontents.
333
338
334 call this only once, before getfinalcontent(), after diffwith().
339 call this only once, before getfinalcontent(), after diffwith().
335 """
340 """
336 # the following is unnecessary, as it's done by "diffwith":
341 # the following is unnecessary, as it's done by "diffwith":
337 # self.linelog.annotate(self.linelog.maxrev)
342 # self.linelog.annotate(self.linelog.maxrev)
338 for rev, a1, a2, b1, b2 in reversed(self.fixups):
343 for rev, a1, a2, b1, b2 in reversed(self.fixups):
339 blines = self.targetlines[b1:b2]
344 blines = self.targetlines[b1:b2]
340 if self.ui.debugflag:
345 if self.ui.debugflag:
341 idx = (max(rev - 1, 0)) // 2
346 idx = (max(rev - 1, 0)) // 2
342 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
347 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
343 % (node.short(self.fctxs[idx].node()),
348 % (node.short(self.fctxs[idx].node()),
344 a1, a2, len(blines)))
349 a1, a2, len(blines)))
345 self.linelog.replacelines(rev, a1, a2, b1, b2)
350 self.linelog.replacelines(rev, a1, a2, b1, b2)
346 if self.opts.get('edit_lines', False):
351 if self.opts.get('edit_lines', False):
347 self.finalcontents = self._checkoutlinelogwithedits()
352 self.finalcontents = self._checkoutlinelogwithedits()
348 else:
353 else:
349 self.finalcontents = self._checkoutlinelog()
354 self.finalcontents = self._checkoutlinelog()
350
355
351 def getfinalcontent(self, fctx):
356 def getfinalcontent(self, fctx):
352 """(fctx) -> str. get modified file content for a given filecontext"""
357 """(fctx) -> str. get modified file content for a given filecontext"""
353 idx = self.fctxs.index(fctx)
358 idx = self.fctxs.index(fctx)
354 return self.finalcontents[idx]
359 return self.finalcontents[idx]
355
360
356 def _analysediffchunk(self, chunk, annotated):
361 def _analysediffchunk(self, chunk, annotated):
357 """analyse a different chunk and return new fixups found
362 """analyse a different chunk and return new fixups found
358
363
359 return [] if no lines from the chunk can be safely applied.
364 return [] if no lines from the chunk can be safely applied.
360
365
361 the chunk (or lines) cannot be safely applied, if, for example:
366 the chunk (or lines) cannot be safely applied, if, for example:
362 - the modified (deleted) lines belong to a public changeset
367 - the modified (deleted) lines belong to a public changeset
363 (self.fctxs[0])
368 (self.fctxs[0])
364 - the chunk is a pure insertion and the adjacent lines (at most 2
369 - the chunk is a pure insertion and the adjacent lines (at most 2
365 lines) belong to different non-public changesets, or do not belong
370 lines) belong to different non-public changesets, or do not belong
366 to any non-public changesets.
371 to any non-public changesets.
367 - the chunk is modifying lines from different changesets.
372 - the chunk is modifying lines from different changesets.
368 in this case, if the number of lines deleted equals to the number
373 in this case, if the number of lines deleted equals to the number
369 of lines added, assume it's a simple 1:1 map (could be wrong).
374 of lines added, assume it's a simple 1:1 map (could be wrong).
370 otherwise, give up.
375 otherwise, give up.
371 - the chunk is modifying lines from a single non-public changeset,
376 - the chunk is modifying lines from a single non-public changeset,
372 but other revisions touch the area as well. i.e. the lines are
377 but other revisions touch the area as well. i.e. the lines are
373 not continuous as seen from the linelog.
378 not continuous as seen from the linelog.
374 """
379 """
375 a1, a2, b1, b2 = chunk
380 a1, a2, b1, b2 = chunk
376 # find involved indexes from annotate result
381 # find involved indexes from annotate result
377 involved = annotated[a1:a2]
382 involved = annotated[a1:a2]
378 if not involved and annotated: # a1 == a2 and a is not empty
383 if not involved and annotated: # a1 == a2 and a is not empty
379 # pure insertion, check nearby lines. ignore lines belong
384 # pure insertion, check nearby lines. ignore lines belong
380 # to the public (first) changeset (i.e. annotated[i][0] == 1)
385 # to the public (first) changeset (i.e. annotated[i][0] == 1)
381 nearbylinenums = {a2, max(0, a1 - 1)}
386 nearbylinenums = {a2, max(0, a1 - 1)}
382 involved = [annotated[i]
387 involved = [annotated[i]
383 for i in nearbylinenums if annotated[i][0] != 1]
388 for i in nearbylinenums if annotated[i][0] != 1]
384 involvedrevs = list(set(r for r, l in involved))
389 involvedrevs = list(set(r for r, l in involved))
385 newfixups = []
390 newfixups = []
386 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
391 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
387 # chunk belongs to a single revision
392 # chunk belongs to a single revision
388 rev = involvedrevs[0]
393 rev = involvedrevs[0]
389 if rev > 1:
394 if rev > 1:
390 fixuprev = rev + 1
395 fixuprev = rev + 1
391 newfixups.append((fixuprev, a1, a2, b1, b2))
396 newfixups.append((fixuprev, a1, a2, b1, b2))
392 elif a2 - a1 == b2 - b1 or b1 == b2:
397 elif a2 - a1 == b2 - b1 or b1 == b2:
393 # 1:1 line mapping, or chunk was deleted
398 # 1:1 line mapping, or chunk was deleted
394 for i in pycompat.xrange(a1, a2):
399 for i in pycompat.xrange(a1, a2):
395 rev, linenum = annotated[i]
400 rev, linenum = annotated[i]
396 if rev > 1:
401 if rev > 1:
397 if b1 == b2: # deletion, simply remove that single line
402 if b1 == b2: # deletion, simply remove that single line
398 nb1 = nb2 = 0
403 nb1 = nb2 = 0
399 else: # 1:1 line mapping, change the corresponding rev
404 else: # 1:1 line mapping, change the corresponding rev
400 nb1 = b1 + i - a1
405 nb1 = b1 + i - a1
401 nb2 = nb1 + 1
406 nb2 = nb1 + 1
402 fixuprev = rev + 1
407 fixuprev = rev + 1
403 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
408 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
404 return self._optimizefixups(newfixups)
409 return self._optimizefixups(newfixups)
405
410
406 @staticmethod
411 @staticmethod
407 def _alldiffchunks(a, b, alines, blines):
412 def _alldiffchunks(a, b, alines, blines):
408 """like mdiff.allblocks, but only care about differences"""
413 """like mdiff.allblocks, but only care about differences"""
409 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
414 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
410 for chunk, btype in blocks:
415 for chunk, btype in blocks:
411 if btype != '!':
416 if btype != '!':
412 continue
417 continue
413 yield chunk
418 yield chunk
414
419
415 def _buildlinelog(self):
420 def _buildlinelog(self):
416 """calculate the initial linelog based on self.content{,line}s.
421 """calculate the initial linelog based on self.content{,line}s.
417 this is similar to running a partial "annotate".
422 this is similar to running a partial "annotate".
418 """
423 """
419 llog = linelog.linelog()
424 llog = linelog.linelog()
420 a, alines = '', []
425 a, alines = '', []
421 for i in pycompat.xrange(len(self.contents)):
426 for i in pycompat.xrange(len(self.contents)):
422 b, blines = self.contents[i], self.contentlines[i]
427 b, blines = self.contents[i], self.contentlines[i]
423 llrev = i * 2 + 1
428 llrev = i * 2 + 1
424 chunks = self._alldiffchunks(a, b, alines, blines)
429 chunks = self._alldiffchunks(a, b, alines, blines)
425 for a1, a2, b1, b2 in reversed(list(chunks)):
430 for a1, a2, b1, b2 in reversed(list(chunks)):
426 llog.replacelines(llrev, a1, a2, b1, b2)
431 llog.replacelines(llrev, a1, a2, b1, b2)
427 a, alines = b, blines
432 a, alines = b, blines
428 return llog
433 return llog
429
434
430 def _checkoutlinelog(self):
435 def _checkoutlinelog(self):
431 """() -> [str]. check out file contents from linelog"""
436 """() -> [str]. check out file contents from linelog"""
432 contents = []
437 contents = []
433 for i in pycompat.xrange(len(self.contents)):
438 for i in pycompat.xrange(len(self.contents)):
434 rev = (i + 1) * 2
439 rev = (i + 1) * 2
435 self.linelog.annotate(rev)
440 self.linelog.annotate(rev)
436 content = ''.join(map(self._getline, self.linelog.annotateresult))
441 content = ''.join(map(self._getline, self.linelog.annotateresult))
437 contents.append(content)
442 contents.append(content)
438 return contents
443 return contents
439
444
440 def _checkoutlinelogwithedits(self):
445 def _checkoutlinelogwithedits(self):
441 """() -> [str]. prompt all lines for edit"""
446 """() -> [str]. prompt all lines for edit"""
442 alllines = self.linelog.getalllines()
447 alllines = self.linelog.getalllines()
443 # header
448 # header
444 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
449 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
445 'exists in the changeset to the top\nHG:\n')
450 'exists in the changeset to the top\nHG:\n')
446 % self.fctxs[-1].path())
451 % self.fctxs[-1].path())
447 # [(idx, fctx)]. hide the dummy emptyfilecontext
452 # [(idx, fctx)]. hide the dummy emptyfilecontext
448 visiblefctxs = [(i, f)
453 visiblefctxs = [(i, f)
449 for i, f in enumerate(self.fctxs)
454 for i, f in enumerate(self.fctxs)
450 if not isinstance(f, emptyfilecontext)]
455 if not isinstance(f, emptyfilecontext)]
451 for i, (j, f) in enumerate(visiblefctxs):
456 for i, (j, f) in enumerate(visiblefctxs):
452 editortext += (_('HG: %s/%s %s %s\n') %
457 editortext += (_('HG: %s/%s %s %s\n') %
453 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
458 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
454 node.short(f.node()),
459 node.short(f.node()),
455 f.description().split('\n',1)[0]))
460 f.description().split('\n',1)[0]))
456 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
461 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
457 # figure out the lifetime of a line, this is relatively inefficient,
462 # figure out the lifetime of a line, this is relatively inefficient,
458 # but probably fine
463 # but probably fine
459 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
464 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
460 for i, f in visiblefctxs:
465 for i, f in visiblefctxs:
461 self.linelog.annotate((i + 1) * 2)
466 self.linelog.annotate((i + 1) * 2)
462 for l in self.linelog.annotateresult:
467 for l in self.linelog.annotateresult:
463 lineset[l].add(i)
468 lineset[l].add(i)
464 # append lines
469 # append lines
465 for l in alllines:
470 for l in alllines:
466 editortext += (' %s : %s' %
471 editortext += (' %s : %s' %
467 (''.join([('y' if i in lineset[l] else ' ')
472 (''.join([('y' if i in lineset[l] else ' ')
468 for i, _f in visiblefctxs]),
473 for i, _f in visiblefctxs]),
469 self._getline(l)))
474 self._getline(l)))
470 # run editor
475 # run editor
471 editedtext = self.ui.edit(editortext, '', action='absorb')
476 editedtext = self.ui.edit(editortext, '', action='absorb')
472 if not editedtext:
477 if not editedtext:
473 raise error.Abort(_('empty editor text'))
478 raise error.Abort(_('empty editor text'))
474 # parse edited result
479 # parse edited result
475 contents = ['' for i in self.fctxs]
480 contents = ['' for i in self.fctxs]
476 leftpadpos = 4
481 leftpadpos = 4
477 colonpos = leftpadpos + len(visiblefctxs) + 1
482 colonpos = leftpadpos + len(visiblefctxs) + 1
478 for l in mdiff.splitnewlines(editedtext):
483 for l in mdiff.splitnewlines(editedtext):
479 if l.startswith('HG:'):
484 if l.startswith('HG:'):
480 continue
485 continue
481 if l[colonpos - 1:colonpos + 2] != ' : ':
486 if l[colonpos - 1:colonpos + 2] != ' : ':
482 raise error.Abort(_('malformed line: %s') % l)
487 raise error.Abort(_('malformed line: %s') % l)
483 linecontent = l[colonpos + 2:]
488 linecontent = l[colonpos + 2:]
484 for i, ch in enumerate(l[leftpadpos:colonpos - 1]):
489 for i, ch in enumerate(l[leftpadpos:colonpos - 1]):
485 if ch == 'y':
490 if ch == 'y':
486 contents[visiblefctxs[i][0]] += linecontent
491 contents[visiblefctxs[i][0]] += linecontent
487 # chunkstats is hard to calculate if anything changes, therefore
492 # chunkstats is hard to calculate if anything changes, therefore
488 # set them to just a simple value (1, 1).
493 # set them to just a simple value (1, 1).
489 if editedtext != editortext:
494 if editedtext != editortext:
490 self.chunkstats = [1, 1]
495 self.chunkstats = [1, 1]
491 return contents
496 return contents
492
497
493 def _getline(self, lineinfo):
498 def _getline(self, lineinfo):
494 """((rev, linenum)) -> str. convert rev+line number to line content"""
499 """((rev, linenum)) -> str. convert rev+line number to line content"""
495 rev, linenum = lineinfo
500 rev, linenum = lineinfo
496 if rev & 1: # odd: original line taken from fctxs
501 if rev & 1: # odd: original line taken from fctxs
497 return self.contentlines[rev // 2][linenum]
502 return self.contentlines[rev // 2][linenum]
498 else: # even: fixup line from targetfctx
503 else: # even: fixup line from targetfctx
499 return self.targetlines[linenum]
504 return self.targetlines[linenum]
500
505
501 def _iscontinuous(self, a1, a2, closedinterval=False):
506 def _iscontinuous(self, a1, a2, closedinterval=False):
502 """(a1, a2 : int) -> bool
507 """(a1, a2 : int) -> bool
503
508
504 check if these lines are continuous. i.e. no other insertions or
509 check if these lines are continuous. i.e. no other insertions or
505 deletions (from other revisions) among these lines.
510 deletions (from other revisions) among these lines.
506
511
507 closedinterval decides whether a2 should be included or not. i.e. is
512 closedinterval decides whether a2 should be included or not. i.e. is
508 it [a1, a2), or [a1, a2] ?
513 it [a1, a2), or [a1, a2] ?
509 """
514 """
510 if a1 >= a2:
515 if a1 >= a2:
511 return True
516 return True
512 llog = self.linelog
517 llog = self.linelog
513 offset1 = llog.getoffset(a1)
518 offset1 = llog.getoffset(a1)
514 offset2 = llog.getoffset(a2) + int(closedinterval)
519 offset2 = llog.getoffset(a2) + int(closedinterval)
515 linesinbetween = llog.getalllines(offset1, offset2)
520 linesinbetween = llog.getalllines(offset1, offset2)
516 return len(linesinbetween) == a2 - a1 + int(closedinterval)
521 return len(linesinbetween) == a2 - a1 + int(closedinterval)
517
522
518 def _optimizefixups(self, fixups):
523 def _optimizefixups(self, fixups):
519 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
524 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
520 merge adjacent fixups to make them less fragmented.
525 merge adjacent fixups to make them less fragmented.
521 """
526 """
522 result = []
527 result = []
523 pcurrentchunk = [[-1, -1, -1, -1, -1]]
528 pcurrentchunk = [[-1, -1, -1, -1, -1]]
524
529
525 def pushchunk():
530 def pushchunk():
526 if pcurrentchunk[0][0] != -1:
531 if pcurrentchunk[0][0] != -1:
527 result.append(tuple(pcurrentchunk[0]))
532 result.append(tuple(pcurrentchunk[0]))
528
533
529 for i, chunk in enumerate(fixups):
534 for i, chunk in enumerate(fixups):
530 rev, a1, a2, b1, b2 = chunk
535 rev, a1, a2, b1, b2 = chunk
531 lastrev = pcurrentchunk[0][0]
536 lastrev = pcurrentchunk[0][0]
532 lasta2 = pcurrentchunk[0][2]
537 lasta2 = pcurrentchunk[0][2]
533 lastb2 = pcurrentchunk[0][4]
538 lastb2 = pcurrentchunk[0][4]
534 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
539 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
535 self._iscontinuous(max(a1 - 1, 0), a1)):
540 self._iscontinuous(max(a1 - 1, 0), a1)):
536 # merge into currentchunk
541 # merge into currentchunk
537 pcurrentchunk[0][2] = a2
542 pcurrentchunk[0][2] = a2
538 pcurrentchunk[0][4] = b2
543 pcurrentchunk[0][4] = b2
539 else:
544 else:
540 pushchunk()
545 pushchunk()
541 pcurrentchunk[0] = list(chunk)
546 pcurrentchunk[0] = list(chunk)
542 pushchunk()
547 pushchunk()
543 return result
548 return result
544
549
545 def _showchanges(self, alines, blines, chunk, fixups):
550 def _showchanges(self, alines, blines, chunk, fixups):
546 ui = self.ui
551 ui = self.ui
547
552
548 def label(line, label):
553 def label(line, label):
549 if line.endswith('\n'):
554 if line.endswith('\n'):
550 line = line[:-1]
555 line = line[:-1]
551 return ui.label(line, label)
556 return ui.label(line, label)
552
557
553 # this is not optimized for perf but _showchanges only gets executed
558 # this is not optimized for perf but _showchanges only gets executed
554 # with an extra command-line flag.
559 # with an extra command-line flag.
555 a1, a2, b1, b2 = chunk
560 a1, a2, b1, b2 = chunk
556 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
561 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
557 for idx, fa1, fa2, fb1, fb2 in fixups:
562 for idx, fa1, fa2, fb1, fb2 in fixups:
558 for i in pycompat.xrange(fa1, fa2):
563 for i in pycompat.xrange(fa1, fa2):
559 aidxs[i - a1] = (max(idx, 1) - 1) // 2
564 aidxs[i - a1] = (max(idx, 1) - 1) // 2
560 for i in pycompat.xrange(fb1, fb2):
565 for i in pycompat.xrange(fb1, fb2):
561 bidxs[i - b1] = (max(idx, 1) - 1) // 2
566 bidxs[i - b1] = (max(idx, 1) - 1) // 2
562
567
563 buf = [] # [(idx, content)]
568 buf = [] # [(idx, content)]
564 buf.append((0, label('@@ -%d,%d +%d,%d @@'
569 buf.append((0, label('@@ -%d,%d +%d,%d @@'
565 % (a1, a2 - a1, b1, b2 - b1), 'diff.hunk')))
570 % (a1, a2 - a1, b1, b2 - b1), 'diff.hunk')))
566 buf += [(aidxs[i - a1], label('-' + alines[i], 'diff.deleted'))
571 buf += [(aidxs[i - a1], label('-' + alines[i], 'diff.deleted'))
567 for i in pycompat.xrange(a1, a2)]
572 for i in pycompat.xrange(a1, a2)]
568 buf += [(bidxs[i - b1], label('+' + blines[i], 'diff.inserted'))
573 buf += [(bidxs[i - b1], label('+' + blines[i], 'diff.inserted'))
569 for i in pycompat.xrange(b1, b2)]
574 for i in pycompat.xrange(b1, b2)]
570 for idx, line in buf:
575 for idx, line in buf:
571 shortnode = idx and node.short(self.fctxs[idx].node()) or ''
576 shortnode = idx and node.short(self.fctxs[idx].node()) or ''
572 ui.write(ui.label(shortnode[0:7].ljust(8), 'absorb.node') +
577 ui.write(ui.label(shortnode[0:7].ljust(8), 'absorb.node') +
573 line + '\n')
578 line + '\n')
574
579
575 class fixupstate(object):
580 class fixupstate(object):
576 """state needed to run absorb
581 """state needed to run absorb
577
582
578 internally, it keeps paths and filefixupstates.
583 internally, it keeps paths and filefixupstates.
579
584
580 a typical use is like filefixupstates:
585 a typical use is like filefixupstates:
581
586
582 1. call diffwith, to calculate fixups
587 1. call diffwith, to calculate fixups
583 2. (optionally), present fixups to the user, or edit fixups
588 2. (optionally), present fixups to the user, or edit fixups
584 3. call apply, to apply changes to memory
589 3. call apply, to apply changes to memory
585 4. call commit, to commit changes to hg database
590 4. call commit, to commit changes to hg database
586 """
591 """
587
592
588 def __init__(self, stack, ui=None, opts=None):
593 def __init__(self, stack, ui=None, opts=None):
589 """([ctx], ui or None) -> None
594 """([ctx], ui or None) -> None
590
595
591 stack: should be linear, and sorted by topo order - oldest first.
596 stack: should be linear, and sorted by topo order - oldest first.
592 all commits in stack are considered mutable.
597 all commits in stack are considered mutable.
593 """
598 """
594 assert stack
599 assert stack
595 self.ui = ui or nullui()
600 self.ui = ui or nullui()
596 self.opts = opts or {}
601 self.opts = opts or {}
597 self.stack = stack
602 self.stack = stack
598 self.repo = stack[-1].repo().unfiltered()
603 self.repo = stack[-1].repo().unfiltered()
599
604
600 # following fields will be filled later
605 # following fields will be filled later
601 self.paths = [] # [str]
606 self.paths = [] # [str]
602 self.status = None # ctx.status output
607 self.status = None # ctx.status output
603 self.fctxmap = {} # {path: {ctx: fctx}}
608 self.fctxmap = {} # {path: {ctx: fctx}}
604 self.fixupmap = {} # {path: filefixupstate}
609 self.fixupmap = {} # {path: filefixupstate}
605 self.replacemap = {} # {oldnode: newnode or None}
610 self.replacemap = {} # {oldnode: newnode or None}
606 self.finalnode = None # head after all fixups
611 self.finalnode = None # head after all fixups
607
612
608 def diffwith(self, targetctx, match=None, showchanges=False):
613 def diffwith(self, targetctx, match=None, showchanges=False):
609 """diff and prepare fixups. update self.fixupmap, self.paths"""
614 """diff and prepare fixups. update self.fixupmap, self.paths"""
610 # only care about modified files
615 # only care about modified files
611 self.status = self.stack[-1].status(targetctx, match)
616 self.status = self.stack[-1].status(targetctx, match)
612 self.paths = []
617 self.paths = []
613 # but if --edit-lines is used, the user may want to edit files
618 # but if --edit-lines is used, the user may want to edit files
614 # even if they are not modified
619 # even if they are not modified
615 editopt = self.opts.get('edit_lines')
620 editopt = self.opts.get('edit_lines')
616 if not self.status.modified and editopt and match:
621 if not self.status.modified and editopt and match:
617 interestingpaths = match.files()
622 interestingpaths = match.files()
618 else:
623 else:
619 interestingpaths = self.status.modified
624 interestingpaths = self.status.modified
620 # prepare the filefixupstate
625 # prepare the filefixupstate
621 seenfctxs = set()
626 seenfctxs = set()
622 # sorting is necessary to eliminate ambiguity for the "double move"
627 # sorting is necessary to eliminate ambiguity for the "double move"
623 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
628 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
624 for path in sorted(interestingpaths):
629 for path in sorted(interestingpaths):
625 self.ui.debug('calculating fixups for %s\n' % path)
630 self.ui.debug('calculating fixups for %s\n' % path)
626 targetfctx = targetctx[path]
631 targetfctx = targetctx[path]
627 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
632 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
628 # ignore symbolic links or binary, or unchanged files
633 # ignore symbolic links or binary, or unchanged files
629 if any(f.islink() or stringutil.binary(f.data())
634 if any(f.islink() or stringutil.binary(f.data())
630 for f in [targetfctx] + fctxs
635 for f in [targetfctx] + fctxs
631 if not isinstance(f, emptyfilecontext)):
636 if not isinstance(f, emptyfilecontext)):
632 continue
637 continue
633 if targetfctx.data() == fctxs[-1].data() and not editopt:
638 if targetfctx.data() == fctxs[-1].data() and not editopt:
634 continue
639 continue
635 seenfctxs.update(fctxs[1:])
640 seenfctxs.update(fctxs[1:])
636 self.fctxmap[path] = ctx2fctx
641 self.fctxmap[path] = ctx2fctx
637 fstate = filefixupstate(fctxs, ui=self.ui, opts=self.opts)
642 fstate = filefixupstate(fctxs, ui=self.ui, opts=self.opts)
638 if showchanges:
643 if showchanges:
639 colorpath = self.ui.label(path, 'absorb.path')
644 colorpath = self.ui.label(path, 'absorb.path')
640 header = 'showing changes for ' + colorpath
645 header = 'showing changes for ' + colorpath
641 self.ui.write(header + '\n')
646 self.ui.write(header + '\n')
642 fstate.diffwith(targetfctx, showchanges=showchanges)
647 fstate.diffwith(targetfctx, showchanges=showchanges)
643 self.fixupmap[path] = fstate
648 self.fixupmap[path] = fstate
644 self.paths.append(path)
649 self.paths.append(path)
645
650
646 def apply(self):
651 def apply(self):
647 """apply fixups to individual filefixupstates"""
652 """apply fixups to individual filefixupstates"""
648 for path, state in self.fixupmap.iteritems():
653 for path, state in self.fixupmap.iteritems():
649 if self.ui.debugflag:
654 if self.ui.debugflag:
650 self.ui.write(_('applying fixups to %s\n') % path)
655 self.ui.write(_('applying fixups to %s\n') % path)
651 state.apply()
656 state.apply()
652
657
653 @property
658 @property
654 def chunkstats(self):
659 def chunkstats(self):
655 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
660 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
656 return dict((path, state.chunkstats)
661 return dict((path, state.chunkstats)
657 for path, state in self.fixupmap.iteritems())
662 for path, state in self.fixupmap.iteritems())
658
663
659 def commit(self):
664 def commit(self):
660 """commit changes. update self.finalnode, self.replacemap"""
665 """commit changes. update self.finalnode, self.replacemap"""
661 with self.repo.wlock(), self.repo.lock():
666 with self.repo.wlock(), self.repo.lock():
662 with self.repo.transaction('absorb') as tr:
667 with self.repo.transaction('absorb') as tr:
663 self._commitstack()
668 self._commitstack()
664 self._movebookmarks(tr)
669 self._movebookmarks(tr)
665 if self.repo['.'].node() in self.replacemap:
670 if self.repo['.'].node() in self.replacemap:
666 self._moveworkingdirectoryparent()
671 self._moveworkingdirectoryparent()
667 if self._useobsolete:
672 if self._useobsolete:
668 self._obsoleteoldcommits()
673 self._obsoleteoldcommits()
669 if not self._useobsolete: # strip must be outside transactions
674 if not self._useobsolete: # strip must be outside transactions
670 self._stripoldcommits()
675 self._stripoldcommits()
671 return self.finalnode
676 return self.finalnode
672
677
673 def printchunkstats(self):
678 def printchunkstats(self):
674 """print things like '1 of 2 chunk(s) applied'"""
679 """print things like '1 of 2 chunk(s) applied'"""
675 ui = self.ui
680 ui = self.ui
676 chunkstats = self.chunkstats
681 chunkstats = self.chunkstats
677 if ui.verbose:
682 if ui.verbose:
678 # chunkstats for each file
683 # chunkstats for each file
679 for path, stat in chunkstats.iteritems():
684 for path, stat in chunkstats.iteritems():
680 if stat[0]:
685 if stat[0]:
681 ui.write(_('%s: %d of %d chunk(s) applied\n')
686 ui.write(_('%s: %d of %d chunk(s) applied\n')
682 % (path, stat[0], stat[1]))
687 % (path, stat[0], stat[1]))
683 elif not ui.quiet:
688 elif not ui.quiet:
684 # a summary for all files
689 # a summary for all files
685 stats = chunkstats.values()
690 stats = chunkstats.values()
686 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
691 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
687 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
692 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
688
693
689 def _commitstack(self):
694 def _commitstack(self):
690 """make new commits. update self.finalnode, self.replacemap.
695 """make new commits. update self.finalnode, self.replacemap.
691 it is splitted from "commit" to avoid too much indentation.
696 it is splitted from "commit" to avoid too much indentation.
692 """
697 """
693 # last node (20-char) committed by us
698 # last node (20-char) committed by us
694 lastcommitted = None
699 lastcommitted = None
695 # p1 which overrides the parent of the next commit, "None" means use
700 # p1 which overrides the parent of the next commit, "None" means use
696 # the original parent unchanged
701 # the original parent unchanged
697 nextp1 = None
702 nextp1 = None
698 for ctx in self.stack:
703 for ctx in self.stack:
699 memworkingcopy = self._getnewfilecontents(ctx)
704 memworkingcopy = self._getnewfilecontents(ctx)
700 if not memworkingcopy and not lastcommitted:
705 if not memworkingcopy and not lastcommitted:
701 # nothing changed, nothing commited
706 # nothing changed, nothing commited
702 nextp1 = ctx
707 nextp1 = ctx
703 continue
708 continue
704 msg = ''
709 msg = ''
705 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
710 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
706 # changeset is no longer necessary
711 # changeset is no longer necessary
707 self.replacemap[ctx.node()] = None
712 self.replacemap[ctx.node()] = None
708 msg = _('became empty and was dropped')
713 msg = _('became empty and was dropped')
709 else:
714 else:
710 # changeset needs re-commit
715 # changeset needs re-commit
711 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
716 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
712 lastcommitted = self.repo[nodestr]
717 lastcommitted = self.repo[nodestr]
713 nextp1 = lastcommitted
718 nextp1 = lastcommitted
714 self.replacemap[ctx.node()] = lastcommitted.node()
719 self.replacemap[ctx.node()] = lastcommitted.node()
715 if memworkingcopy:
720 if memworkingcopy:
716 msg = _('%d file(s) changed, became %s') % (
721 msg = _('%d file(s) changed, became %s') % (
717 len(memworkingcopy), self._ctx2str(lastcommitted))
722 len(memworkingcopy), self._ctx2str(lastcommitted))
718 else:
723 else:
719 msg = _('became %s') % self._ctx2str(lastcommitted)
724 msg = _('became %s') % self._ctx2str(lastcommitted)
720 if self.ui.verbose and msg:
725 if self.ui.verbose and msg:
721 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
726 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
722 self.finalnode = lastcommitted and lastcommitted.node()
727 self.finalnode = lastcommitted and lastcommitted.node()
723
728
724 def _ctx2str(self, ctx):
729 def _ctx2str(self, ctx):
725 if self.ui.debugflag:
730 if self.ui.debugflag:
726 return ctx.hex()
731 return ctx.hex()
727 else:
732 else:
728 return node.short(ctx.node())
733 return node.short(ctx.node())
729
734
730 def _getnewfilecontents(self, ctx):
735 def _getnewfilecontents(self, ctx):
731 """(ctx) -> {path: str}
736 """(ctx) -> {path: str}
732
737
733 fetch file contents from filefixupstates.
738 fetch file contents from filefixupstates.
734 return the working copy overrides - files different from ctx.
739 return the working copy overrides - files different from ctx.
735 """
740 """
736 result = {}
741 result = {}
737 for path in self.paths:
742 for path in self.paths:
738 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
743 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
739 if ctx not in ctx2fctx:
744 if ctx not in ctx2fctx:
740 continue
745 continue
741 fctx = ctx2fctx[ctx]
746 fctx = ctx2fctx[ctx]
742 content = fctx.data()
747 content = fctx.data()
743 newcontent = self.fixupmap[path].getfinalcontent(fctx)
748 newcontent = self.fixupmap[path].getfinalcontent(fctx)
744 if content != newcontent:
749 if content != newcontent:
745 result[fctx.path()] = newcontent
750 result[fctx.path()] = newcontent
746 return result
751 return result
747
752
748 def _movebookmarks(self, tr):
753 def _movebookmarks(self, tr):
749 repo = self.repo
754 repo = self.repo
750 needupdate = [(name, self.replacemap[hsh])
755 needupdate = [(name, self.replacemap[hsh])
751 for name, hsh in repo._bookmarks.iteritems()
756 for name, hsh in repo._bookmarks.iteritems()
752 if hsh in self.replacemap]
757 if hsh in self.replacemap]
753 changes = []
758 changes = []
754 for name, hsh in needupdate:
759 for name, hsh in needupdate:
755 if hsh:
760 if hsh:
756 changes.append((name, hsh))
761 changes.append((name, hsh))
757 if self.ui.verbose:
762 if self.ui.verbose:
758 self.ui.write(_('moving bookmark %s to %s\n')
763 self.ui.write(_('moving bookmark %s to %s\n')
759 % (name, node.hex(hsh)))
764 % (name, node.hex(hsh)))
760 else:
765 else:
761 changes.append((name, None))
766 changes.append((name, None))
762 if self.ui.verbose:
767 if self.ui.verbose:
763 self.ui.write(_('deleting bookmark %s\n') % name)
768 self.ui.write(_('deleting bookmark %s\n') % name)
764 repo._bookmarks.applychanges(repo, tr, changes)
769 repo._bookmarks.applychanges(repo, tr, changes)
765
770
766 def _moveworkingdirectoryparent(self):
771 def _moveworkingdirectoryparent(self):
767 if not self.finalnode:
772 if not self.finalnode:
768 # Find the latest not-{obsoleted,stripped} parent.
773 # Find the latest not-{obsoleted,stripped} parent.
769 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
774 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
770 ctx = self.repo[revs.first()]
775 ctx = self.repo[revs.first()]
771 self.finalnode = ctx.node()
776 self.finalnode = ctx.node()
772 else:
777 else:
773 ctx = self.repo[self.finalnode]
778 ctx = self.repo[self.finalnode]
774
779
775 dirstate = self.repo.dirstate
780 dirstate = self.repo.dirstate
776 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
781 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
777 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
782 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
778 noop = lambda: 0
783 noop = lambda: 0
779 restore = noop
784 restore = noop
780 if util.safehasattr(dirstate, '_fsmonitorstate'):
785 if util.safehasattr(dirstate, '_fsmonitorstate'):
781 bak = dirstate._fsmonitorstate.invalidate
786 bak = dirstate._fsmonitorstate.invalidate
782 def restore():
787 def restore():
783 dirstate._fsmonitorstate.invalidate = bak
788 dirstate._fsmonitorstate.invalidate = bak
784 dirstate._fsmonitorstate.invalidate = noop
789 dirstate._fsmonitorstate.invalidate = noop
785 try:
790 try:
786 with dirstate.parentchange():
791 with dirstate.parentchange():
787 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
792 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
788 finally:
793 finally:
789 restore()
794 restore()
790
795
791 @staticmethod
796 @staticmethod
792 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
797 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
793 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
798 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
794
799
795 if it will become an empty commit (does not change anything, after the
800 if it will become an empty commit (does not change anything, after the
796 memworkingcopy overrides), return True. otherwise return False.
801 memworkingcopy overrides), return True. otherwise return False.
797 """
802 """
798 if not pctx:
803 if not pctx:
799 parents = ctx.parents()
804 parents = ctx.parents()
800 if len(parents) != 1:
805 if len(parents) != 1:
801 return False
806 return False
802 pctx = parents[0]
807 pctx = parents[0]
803 # ctx changes more files (not a subset of memworkingcopy)
808 # ctx changes more files (not a subset of memworkingcopy)
804 if not set(ctx.files()).issubset(set(memworkingcopy.iterkeys())):
809 if not set(ctx.files()).issubset(set(memworkingcopy.iterkeys())):
805 return False
810 return False
806 for path, content in memworkingcopy.iteritems():
811 for path, content in memworkingcopy.iteritems():
807 if path not in pctx or path not in ctx:
812 if path not in pctx or path not in ctx:
808 return False
813 return False
809 fctx = ctx[path]
814 fctx = ctx[path]
810 pfctx = pctx[path]
815 pfctx = pctx[path]
811 if pfctx.flags() != fctx.flags():
816 if pfctx.flags() != fctx.flags():
812 return False
817 return False
813 if pfctx.data() != content:
818 if pfctx.data() != content:
814 return False
819 return False
815 return True
820 return True
816
821
817 def _commitsingle(self, memworkingcopy, ctx, p1=None):
822 def _commitsingle(self, memworkingcopy, ctx, p1=None):
818 """(ctx, {path: content}, node) -> node. make a single commit
823 """(ctx, {path: content}, node) -> node. make a single commit
819
824
820 the commit is a clone from ctx, with a (optionally) different p1, and
825 the commit is a clone from ctx, with a (optionally) different p1, and
821 different file contents replaced by memworkingcopy.
826 different file contents replaced by memworkingcopy.
822 """
827 """
823 parents = p1 and (p1, node.nullid)
828 parents = p1 and (p1, node.nullid)
824 extra = ctx.extra()
829 extra = ctx.extra()
825 if self._useobsolete and self.ui.configbool('absorb', 'addnoise'):
830 if self._useobsolete and self.ui.configbool('absorb', 'addnoise'):
826 extra['absorb_source'] = ctx.hex()
831 extra['absorb_source'] = ctx.hex()
827 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
832 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
828 # preserve phase
833 # preserve phase
829 with mctx.repo().ui.configoverride({
834 with mctx.repo().ui.configoverride({
830 ('phases', 'new-commit'): ctx.phase()}):
835 ('phases', 'new-commit'): ctx.phase()}):
831 return mctx.commit()
836 return mctx.commit()
832
837
833 @util.propertycache
838 @util.propertycache
834 def _useobsolete(self):
839 def _useobsolete(self):
835 """() -> bool"""
840 """() -> bool"""
836 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
841 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
837
842
838 def _obsoleteoldcommits(self):
843 def _obsoleteoldcommits(self):
839 relations = [(self.repo[k], v and (self.repo[v],) or ())
844 relations = [(self.repo[k], v and (self.repo[v],) or ())
840 for k, v in self.replacemap.iteritems()]
845 for k, v in self.replacemap.iteritems()]
841 if relations:
846 if relations:
842 obsolete.createmarkers(self.repo, relations)
847 obsolete.createmarkers(self.repo, relations)
843
848
844 def _stripoldcommits(self):
849 def _stripoldcommits(self):
845 nodelist = self.replacemap.keys()
850 nodelist = self.replacemap.keys()
846 # make sure we don't strip innocent children
851 # make sure we don't strip innocent children
847 revs = self.repo.revs('%ln - (::(heads(%ln::)-%ln))', nodelist,
852 revs = self.repo.revs('%ln - (::(heads(%ln::)-%ln))', nodelist,
848 nodelist, nodelist)
853 nodelist, nodelist)
849 tonode = self.repo.changelog.node
854 tonode = self.repo.changelog.node
850 nodelist = [tonode(r) for r in revs]
855 nodelist = [tonode(r) for r in revs]
851 if nodelist:
856 if nodelist:
852 repair.strip(self.repo.ui, self.repo, nodelist)
857 repair.strip(self.repo.ui, self.repo, nodelist)
853
858
854 def _parsechunk(hunk):
859 def _parsechunk(hunk):
855 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
860 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
856 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
861 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
857 return None, None
862 return None, None
858 path = hunk.header.filename()
863 path = hunk.header.filename()
859 a1 = hunk.fromline + len(hunk.before) - 1
864 a1 = hunk.fromline + len(hunk.before) - 1
860 # remove before and after context
865 # remove before and after context
861 hunk.before = hunk.after = []
866 hunk.before = hunk.after = []
862 buf = util.stringio()
867 buf = util.stringio()
863 hunk.write(buf)
868 hunk.write(buf)
864 patchlines = mdiff.splitnewlines(buf.getvalue())
869 patchlines = mdiff.splitnewlines(buf.getvalue())
865 # hunk.prettystr() will update hunk.removed
870 # hunk.prettystr() will update hunk.removed
866 a2 = a1 + hunk.removed
871 a2 = a1 + hunk.removed
867 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
872 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
868 return path, (a1, a2, blines)
873 return path, (a1, a2, blines)
869
874
870 def overlaydiffcontext(ctx, chunks):
875 def overlaydiffcontext(ctx, chunks):
871 """(ctx, [crecord.uihunk]) -> memctx
876 """(ctx, [crecord.uihunk]) -> memctx
872
877
873 return a memctx with some [1] patches (chunks) applied to ctx.
878 return a memctx with some [1] patches (chunks) applied to ctx.
874 [1]: modifications are handled. renames, mode changes, etc. are ignored.
879 [1]: modifications are handled. renames, mode changes, etc. are ignored.
875 """
880 """
876 # sadly the applying-patch logic is hardly reusable, and messy:
881 # sadly the applying-patch logic is hardly reusable, and messy:
877 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
882 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
878 # needs a file stream of a patch and will re-parse it, while we have
883 # needs a file stream of a patch and will re-parse it, while we have
879 # structured hunk objects at hand.
884 # structured hunk objects at hand.
880 # 2. a lot of different implementations about "chunk" (patch.hunk,
885 # 2. a lot of different implementations about "chunk" (patch.hunk,
881 # patch.recordhunk, crecord.uihunk)
886 # patch.recordhunk, crecord.uihunk)
882 # as we only care about applying changes to modified files, no mode
887 # as we only care about applying changes to modified files, no mode
883 # change, no binary diff, and no renames, it's probably okay to
888 # change, no binary diff, and no renames, it's probably okay to
884 # re-invent the logic using much simpler code here.
889 # re-invent the logic using much simpler code here.
885 memworkingcopy = {} # {path: content}
890 memworkingcopy = {} # {path: content}
886 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
891 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
887 for path, info in map(_parsechunk, chunks):
892 for path, info in map(_parsechunk, chunks):
888 if not path or not info:
893 if not path or not info:
889 continue
894 continue
890 patchmap[path].append(info)
895 patchmap[path].append(info)
891 for path, patches in patchmap.iteritems():
896 for path, patches in patchmap.iteritems():
892 if path not in ctx or not patches:
897 if path not in ctx or not patches:
893 continue
898 continue
894 patches.sort(reverse=True)
899 patches.sort(reverse=True)
895 lines = mdiff.splitnewlines(ctx[path].data())
900 lines = mdiff.splitnewlines(ctx[path].data())
896 for a1, a2, blines in patches:
901 for a1, a2, blines in patches:
897 lines[a1:a2] = blines
902 lines[a1:a2] = blines
898 memworkingcopy[path] = ''.join(lines)
903 memworkingcopy[path] = ''.join(lines)
899 return overlaycontext(memworkingcopy, ctx)
904 return overlaycontext(memworkingcopy, ctx)
900
905
901 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
906 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
902 """pick fixup chunks from targetctx, apply them to stack.
907 """pick fixup chunks from targetctx, apply them to stack.
903
908
904 if targetctx is None, the working copy context will be used.
909 if targetctx is None, the working copy context will be used.
905 if stack is None, the current draft stack will be used.
910 if stack is None, the current draft stack will be used.
906 return fixupstate.
911 return fixupstate.
907 """
912 """
908 if stack is None:
913 if stack is None:
909 limit = ui.configint('absorb', 'maxstacksize')
914 limit = ui.configint('absorb', 'maxstacksize')
910 stack = getdraftstack(repo['.'], limit)
915 stack = getdraftstack(repo['.'], limit)
911 if limit and len(stack) >= limit:
916 if limit and len(stack) >= limit:
912 ui.warn(_('absorb: only the recent %d changesets will '
917 ui.warn(_('absorb: only the recent %d changesets will '
913 'be analysed\n')
918 'be analysed\n')
914 % limit)
919 % limit)
915 if not stack:
920 if not stack:
916 raise error.Abort(_('no changeset to change'))
921 raise error.Abort(_('no changeset to change'))
917 if targetctx is None: # default to working copy
922 if targetctx is None: # default to working copy
918 targetctx = repo[None]
923 targetctx = repo[None]
919 if pats is None:
924 if pats is None:
920 pats = ()
925 pats = ()
921 if opts is None:
926 if opts is None:
922 opts = {}
927 opts = {}
923 state = fixupstate(stack, ui=ui, opts=opts)
928 state = fixupstate(stack, ui=ui, opts=opts)
924 matcher = scmutil.match(targetctx, pats, opts)
929 matcher = scmutil.match(targetctx, pats, opts)
925 if opts.get('interactive'):
930 if opts.get('interactive'):
926 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
931 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
927 origchunks = patch.parsepatch(diff)
932 origchunks = patch.parsepatch(diff)
928 chunks = cmdutil.recordfilter(ui, origchunks)[0]
933 chunks = cmdutil.recordfilter(ui, origchunks)[0]
929 targetctx = overlaydiffcontext(stack[-1], chunks)
934 targetctx = overlaydiffcontext(stack[-1], chunks)
930 state.diffwith(targetctx, matcher, showchanges=opts.get('print_changes'))
935 state.diffwith(targetctx, matcher, showchanges=opts.get('print_changes'))
931 if not opts.get('dry_run'):
936 if not opts.get('dry_run'):
932 state.apply()
937 state.apply()
933 if state.commit():
938 if state.commit():
934 state.printchunkstats()
939 state.printchunkstats()
935 elif not ui.quiet:
940 elif not ui.quiet:
936 ui.write(_('nothing applied\n'))
941 ui.write(_('nothing applied\n'))
937 return state
942 return state
938
943
939 @command('^absorb|sf',
944 @command('^absorb|sf',
940 [('p', 'print-changes', None,
945 [('p', 'print-changes', None,
941 _('print which changesets are modified by which changes')),
946 _('print which changesets are modified by which changes')),
942 ('i', 'interactive', None,
947 ('i', 'interactive', None,
943 _('interactively select which chunks to apply (EXPERIMENTAL)')),
948 _('interactively select which chunks to apply (EXPERIMENTAL)')),
944 ('e', 'edit-lines', None,
949 ('e', 'edit-lines', None,
945 _('edit what lines belong to which changesets before commit '
950 _('edit what lines belong to which changesets before commit '
946 '(EXPERIMENTAL)')),
951 '(EXPERIMENTAL)')),
947 ] + commands.dryrunopts + commands.walkopts,
952 ] + commands.dryrunopts + commands.walkopts,
948 _('hg absorb [OPTION] [FILE]...'))
953 _('hg absorb [OPTION] [FILE]...'))
949 def absorbcmd(ui, repo, *pats, **opts):
954 def absorbcmd(ui, repo, *pats, **opts):
950 """incorporate corrections into the stack of draft changesets
955 """incorporate corrections into the stack of draft changesets
951
956
952 absorb analyzes each change in your working directory and attempts to
957 absorb analyzes each change in your working directory and attempts to
953 amend the changed lines into the changesets in your stack that first
958 amend the changed lines into the changesets in your stack that first
954 introduced those lines.
959 introduced those lines.
955
960
956 If absorb cannot find an unambiguous changeset to amend for a change,
961 If absorb cannot find an unambiguous changeset to amend for a change,
957 that change will be left in the working directory, untouched. They can be
962 that change will be left in the working directory, untouched. They can be
958 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
963 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
959 absorb does not write to the working directory.
964 absorb does not write to the working directory.
960
965
961 Changesets outside the revset `::. and not public() and not merge()` will
966 Changesets outside the revset `::. and not public() and not merge()` will
962 not be changed.
967 not be changed.
963
968
964 Changesets that become empty after applying the changes will be deleted.
969 Changesets that become empty after applying the changes will be deleted.
965
970
966 If in doubt, run :hg:`absorb -pn` to preview what changesets will
971 If in doubt, run :hg:`absorb -pn` to preview what changesets will
967 be amended by what changed lines, without actually changing anything.
972 be amended by what changed lines, without actually changing anything.
968
973
969 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
974 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
970 """
975 """
971 state = absorb(ui, repo, pats=pats, opts=opts)
976 state = absorb(ui, repo, pats=pats, opts=opts)
972 if sum(s[0] for s in state.chunkstats.values()) == 0:
977 if sum(s[0] for s in state.chunkstats.values()) == 0:
973 return 1
978 return 1
974
979
975 def _wrapamend(flag):
980 def _wrapamend(flag):
976 """add flag to amend, which will be a shortcut to the absorb command"""
981 """add flag to amend, which will be a shortcut to the absorb command"""
977 if not flag:
982 if not flag:
978 return
983 return
979 amendcmd = extensions.bind(_amendcmd, flag)
984 amendcmd = extensions.bind(_amendcmd, flag)
980 # the amend command can exist in evolve, or fbamend
985 # the amend command can exist in evolve, or fbamend
981 for extname in ['evolve', 'fbamend', None]:
986 for extname in ['evolve', 'fbamend', None]:
982 try:
987 try:
983 if extname is None:
988 if extname is None:
984 cmdtable = commands.table
989 cmdtable = commands.table
985 else:
990 else:
986 ext = extensions.find(extname)
991 ext = extensions.find(extname)
987 cmdtable = ext.cmdtable
992 cmdtable = ext.cmdtable
988 except (KeyError, AttributeError):
993 except (KeyError, AttributeError):
989 continue
994 continue
990 try:
995 try:
991 entry = extensions.wrapcommand(cmdtable, 'amend', amendcmd)
996 entry = extensions.wrapcommand(cmdtable, 'amend', amendcmd)
992 options = entry[1]
997 options = entry[1]
993 msg = _('incorporate corrections into stack. '
998 msg = _('incorporate corrections into stack. '
994 'see \'hg help absorb\' for details')
999 'see \'hg help absorb\' for details')
995 options.append(('', flag, None, msg))
1000 options.append(('', flag, None, msg))
996 return
1001 return
997 except error.UnknownCommand:
1002 except error.UnknownCommand:
998 pass
1003 pass
999
1004
1000 def _amendcmd(flag, orig, ui, repo, *pats, **opts):
1005 def _amendcmd(flag, orig, ui, repo, *pats, **opts):
1001 if not opts.get(flag):
1006 if not opts.get(flag):
1002 return orig(ui, repo, *pats, **opts)
1007 return orig(ui, repo, *pats, **opts)
1003 # use absorb
1008 # use absorb
1004 for k, v in opts.iteritems(): # check unsupported flags
1009 for k, v in opts.iteritems(): # check unsupported flags
1005 if v and k not in ['interactive', flag]:
1010 if v and k not in ['interactive', flag]:
1006 raise error.Abort(_('--%s does not support --%s')
1011 raise error.Abort(_('--%s does not support --%s')
1007 % (flag, k.replace('_', '-')))
1012 % (flag, k.replace('_', '-')))
1008 state = absorb(ui, repo, pats=pats, opts=opts)
1013 state = absorb(ui, repo, pats=pats, opts=opts)
1009 # different from the original absorb, tell users what chunks were
1014 # different from the original absorb, tell users what chunks were
1010 # ignored and were left. it's because users usually expect "amend" to
1015 # ignored and were left. it's because users usually expect "amend" to
1011 # take all of their changes and will feel strange otherwise.
1016 # take all of their changes and will feel strange otherwise.
1012 # the original "absorb" command faces more-advanced users knowing
1017 # the original "absorb" command faces more-advanced users knowing
1013 # what's going on and is less verbose.
1018 # what's going on and is less verbose.
1014 adoptedsum = 0
1019 adoptedsum = 0
1015 messages = []
1020 messages = []
1016 for path, (adopted, total) in state.chunkstats.iteritems():
1021 for path, (adopted, total) in state.chunkstats.iteritems():
1017 adoptedsum += adopted
1022 adoptedsum += adopted
1018 if adopted == total:
1023 if adopted == total:
1019 continue
1024 continue
1020 reason = _('%d modified chunks were ignored') % (total - adopted)
1025 reason = _('%d modified chunks were ignored') % (total - adopted)
1021 messages.append(('M', 'modified', path, reason))
1026 messages.append(('M', 'modified', path, reason))
1022 for idx, word, symbol in [(0, 'modified', 'M'), (1, 'added', 'A'),
1027 for idx, word, symbol in [(0, 'modified', 'M'), (1, 'added', 'A'),
1023 (2, 'removed', 'R'), (3, 'deleted', '!')]:
1028 (2, 'removed', 'R'), (3, 'deleted', '!')]:
1024 paths = set(state.status[idx]) - set(state.paths)
1029 paths = set(state.status[idx]) - set(state.paths)
1025 for path in sorted(paths):
1030 for path in sorted(paths):
1026 if word == 'modified':
1031 if word == 'modified':
1027 reason = _('unsupported file type (ex. binary or link)')
1032 reason = _('unsupported file type (ex. binary or link)')
1028 else:
1033 else:
1029 reason = _('%s files were ignored') % word
1034 reason = _('%s files were ignored') % word
1030 messages.append((symbol, word, path, reason))
1035 messages.append((symbol, word, path, reason))
1031 if messages:
1036 if messages:
1032 ui.write(_('\n# changes not applied and left in '
1037 ui.write(_('\n# changes not applied and left in '
1033 'working directory:\n'))
1038 'working directory:\n'))
1034 for symbol, word, path, reason in messages:
1039 for symbol, word, path, reason in messages:
1035 ui.write(_('# %s %s : %s\n') % (
1040 ui.write(_('# %s %s : %s\n') % (
1036 ui.label(symbol, 'status.' + word),
1041 ui.label(symbol, 'status.' + word),
1037 ui.label(path, 'status.' + word), reason))
1042 ui.label(path, 'status.' + word), reason))
1038
1043
1039 if adoptedsum == 0:
1044 if adoptedsum == 0:
1040 return 1
1045 return 1
1041
1046
1042 def extsetup(ui):
1047 def extsetup(ui):
1043 _wrapamend(ui.config('absorb', 'amendflag'))
1048 _wrapamend(ui.config('absorb', 'amendflag'))
General Comments 0
You need to be logged in to leave comments. Login now