##// END OF EJS Templates
effectflag: detect when diff changed...
Boris Feld -
r34422:187bc224 default
parent child Browse files
Show More
@@ -1,725 +1,771
1 # obsutil.py - utility functions for obsolescence
1 # obsutil.py - utility functions for obsolescence
2 #
2 #
3 # Copyright 2017 Boris Feld <boris.feld@octobus.net>
3 # Copyright 2017 Boris Feld <boris.feld@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import re
10 import re
11
11
12 from . import (
12 from . import (
13 phases,
13 phases,
14 util
14 util
15 )
15 )
16
16
17 class marker(object):
17 class marker(object):
18 """Wrap obsolete marker raw data"""
18 """Wrap obsolete marker raw data"""
19
19
20 def __init__(self, repo, data):
20 def __init__(self, repo, data):
21 # the repo argument will be used to create changectx in later version
21 # the repo argument will be used to create changectx in later version
22 self._repo = repo
22 self._repo = repo
23 self._data = data
23 self._data = data
24 self._decodedmeta = None
24 self._decodedmeta = None
25
25
26 def __hash__(self):
26 def __hash__(self):
27 return hash(self._data)
27 return hash(self._data)
28
28
29 def __eq__(self, other):
29 def __eq__(self, other):
30 if type(other) != type(self):
30 if type(other) != type(self):
31 return False
31 return False
32 return self._data == other._data
32 return self._data == other._data
33
33
34 def precnode(self):
34 def precnode(self):
35 msg = ("'marker.precnode' is deprecated, "
35 msg = ("'marker.precnode' is deprecated, "
36 "use 'marker.prednode'")
36 "use 'marker.prednode'")
37 util.nouideprecwarn(msg, '4.4')
37 util.nouideprecwarn(msg, '4.4')
38 return self.prednode()
38 return self.prednode()
39
39
40 def prednode(self):
40 def prednode(self):
41 """Predecessor changeset node identifier"""
41 """Predecessor changeset node identifier"""
42 return self._data[0]
42 return self._data[0]
43
43
44 def succnodes(self):
44 def succnodes(self):
45 """List of successor changesets node identifiers"""
45 """List of successor changesets node identifiers"""
46 return self._data[1]
46 return self._data[1]
47
47
48 def parentnodes(self):
48 def parentnodes(self):
49 """Parents of the predecessors (None if not recorded)"""
49 """Parents of the predecessors (None if not recorded)"""
50 return self._data[5]
50 return self._data[5]
51
51
52 def metadata(self):
52 def metadata(self):
53 """Decoded metadata dictionary"""
53 """Decoded metadata dictionary"""
54 return dict(self._data[3])
54 return dict(self._data[3])
55
55
56 def date(self):
56 def date(self):
57 """Creation date as (unixtime, offset)"""
57 """Creation date as (unixtime, offset)"""
58 return self._data[4]
58 return self._data[4]
59
59
60 def flags(self):
60 def flags(self):
61 """The flags field of the marker"""
61 """The flags field of the marker"""
62 return self._data[2]
62 return self._data[2]
63
63
64 def getmarkers(repo, nodes=None, exclusive=False):
64 def getmarkers(repo, nodes=None, exclusive=False):
65 """returns markers known in a repository
65 """returns markers known in a repository
66
66
67 If <nodes> is specified, only markers "relevant" to those nodes are are
67 If <nodes> is specified, only markers "relevant" to those nodes are are
68 returned"""
68 returned"""
69 if nodes is None:
69 if nodes is None:
70 rawmarkers = repo.obsstore
70 rawmarkers = repo.obsstore
71 elif exclusive:
71 elif exclusive:
72 rawmarkers = exclusivemarkers(repo, nodes)
72 rawmarkers = exclusivemarkers(repo, nodes)
73 else:
73 else:
74 rawmarkers = repo.obsstore.relevantmarkers(nodes)
74 rawmarkers = repo.obsstore.relevantmarkers(nodes)
75
75
76 for markerdata in rawmarkers:
76 for markerdata in rawmarkers:
77 yield marker(repo, markerdata)
77 yield marker(repo, markerdata)
78
78
79 def closestpredecessors(repo, nodeid):
79 def closestpredecessors(repo, nodeid):
80 """yield the list of next predecessors pointing on visible changectx nodes
80 """yield the list of next predecessors pointing on visible changectx nodes
81
81
82 This function respect the repoview filtering, filtered revision will be
82 This function respect the repoview filtering, filtered revision will be
83 considered missing.
83 considered missing.
84 """
84 """
85
85
86 precursors = repo.obsstore.predecessors
86 precursors = repo.obsstore.predecessors
87 stack = [nodeid]
87 stack = [nodeid]
88 seen = set(stack)
88 seen = set(stack)
89
89
90 while stack:
90 while stack:
91 current = stack.pop()
91 current = stack.pop()
92 currentpreccs = precursors.get(current, ())
92 currentpreccs = precursors.get(current, ())
93
93
94 for prec in currentpreccs:
94 for prec in currentpreccs:
95 precnodeid = prec[0]
95 precnodeid = prec[0]
96
96
97 # Basic cycle protection
97 # Basic cycle protection
98 if precnodeid in seen:
98 if precnodeid in seen:
99 continue
99 continue
100 seen.add(precnodeid)
100 seen.add(precnodeid)
101
101
102 if precnodeid in repo:
102 if precnodeid in repo:
103 yield precnodeid
103 yield precnodeid
104 else:
104 else:
105 stack.append(precnodeid)
105 stack.append(precnodeid)
106
106
107 def allprecursors(*args, **kwargs):
107 def allprecursors(*args, **kwargs):
108 """ (DEPRECATED)
108 """ (DEPRECATED)
109 """
109 """
110 msg = ("'obsutil.allprecursors' is deprecated, "
110 msg = ("'obsutil.allprecursors' is deprecated, "
111 "use 'obsutil.allpredecessors'")
111 "use 'obsutil.allpredecessors'")
112 util.nouideprecwarn(msg, '4.4')
112 util.nouideprecwarn(msg, '4.4')
113
113
114 return allpredecessors(*args, **kwargs)
114 return allpredecessors(*args, **kwargs)
115
115
116 def allpredecessors(obsstore, nodes, ignoreflags=0):
116 def allpredecessors(obsstore, nodes, ignoreflags=0):
117 """Yield node for every precursors of <nodes>.
117 """Yield node for every precursors of <nodes>.
118
118
119 Some precursors may be unknown locally.
119 Some precursors may be unknown locally.
120
120
121 This is a linear yield unsuited to detecting folded changesets. It includes
121 This is a linear yield unsuited to detecting folded changesets. It includes
122 initial nodes too."""
122 initial nodes too."""
123
123
124 remaining = set(nodes)
124 remaining = set(nodes)
125 seen = set(remaining)
125 seen = set(remaining)
126 while remaining:
126 while remaining:
127 current = remaining.pop()
127 current = remaining.pop()
128 yield current
128 yield current
129 for mark in obsstore.predecessors.get(current, ()):
129 for mark in obsstore.predecessors.get(current, ()):
130 # ignore marker flagged with specified flag
130 # ignore marker flagged with specified flag
131 if mark[2] & ignoreflags:
131 if mark[2] & ignoreflags:
132 continue
132 continue
133 suc = mark[0]
133 suc = mark[0]
134 if suc not in seen:
134 if suc not in seen:
135 seen.add(suc)
135 seen.add(suc)
136 remaining.add(suc)
136 remaining.add(suc)
137
137
138 def allsuccessors(obsstore, nodes, ignoreflags=0):
138 def allsuccessors(obsstore, nodes, ignoreflags=0):
139 """Yield node for every successor of <nodes>.
139 """Yield node for every successor of <nodes>.
140
140
141 Some successors may be unknown locally.
141 Some successors may be unknown locally.
142
142
143 This is a linear yield unsuited to detecting split changesets. It includes
143 This is a linear yield unsuited to detecting split changesets. It includes
144 initial nodes too."""
144 initial nodes too."""
145 remaining = set(nodes)
145 remaining = set(nodes)
146 seen = set(remaining)
146 seen = set(remaining)
147 while remaining:
147 while remaining:
148 current = remaining.pop()
148 current = remaining.pop()
149 yield current
149 yield current
150 for mark in obsstore.successors.get(current, ()):
150 for mark in obsstore.successors.get(current, ()):
151 # ignore marker flagged with specified flag
151 # ignore marker flagged with specified flag
152 if mark[2] & ignoreflags:
152 if mark[2] & ignoreflags:
153 continue
153 continue
154 for suc in mark[1]:
154 for suc in mark[1]:
155 if suc not in seen:
155 if suc not in seen:
156 seen.add(suc)
156 seen.add(suc)
157 remaining.add(suc)
157 remaining.add(suc)
158
158
159 def _filterprunes(markers):
159 def _filterprunes(markers):
160 """return a set with no prune markers"""
160 """return a set with no prune markers"""
161 return set(m for m in markers if m[1])
161 return set(m for m in markers if m[1])
162
162
163 def exclusivemarkers(repo, nodes):
163 def exclusivemarkers(repo, nodes):
164 """set of markers relevant to "nodes" but no other locally-known nodes
164 """set of markers relevant to "nodes" but no other locally-known nodes
165
165
166 This function compute the set of markers "exclusive" to a locally-known
166 This function compute the set of markers "exclusive" to a locally-known
167 node. This means we walk the markers starting from <nodes> until we reach a
167 node. This means we walk the markers starting from <nodes> until we reach a
168 locally-known precursors outside of <nodes>. Element of <nodes> with
168 locally-known precursors outside of <nodes>. Element of <nodes> with
169 locally-known successors outside of <nodes> are ignored (since their
169 locally-known successors outside of <nodes> are ignored (since their
170 precursors markers are also relevant to these successors).
170 precursors markers are also relevant to these successors).
171
171
172 For example:
172 For example:
173
173
174 # (A0 rewritten as A1)
174 # (A0 rewritten as A1)
175 #
175 #
176 # A0 <-1- A1 # Marker "1" is exclusive to A1
176 # A0 <-1- A1 # Marker "1" is exclusive to A1
177
177
178 or
178 or
179
179
180 # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
180 # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
181 #
181 #
182 # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
182 # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
183
183
184 or
184 or
185
185
186 # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
186 # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
187 #
187 #
188 # <-2- A1 # Marker "2" is exclusive to A0,A1
188 # <-2- A1 # Marker "2" is exclusive to A0,A1
189 # /
189 # /
190 # <-1- A0
190 # <-1- A0
191 # \
191 # \
192 # <-3- A2 # Marker "3" is exclusive to A0,A2
192 # <-3- A2 # Marker "3" is exclusive to A0,A2
193 #
193 #
194 # in addition:
194 # in addition:
195 #
195 #
196 # Markers "2,3" are exclusive to A1,A2
196 # Markers "2,3" are exclusive to A1,A2
197 # Markers "1,2,3" are exclusive to A0,A1,A2
197 # Markers "1,2,3" are exclusive to A0,A1,A2
198
198
199 See test/test-obsolete-bundle-strip.t for more examples.
199 See test/test-obsolete-bundle-strip.t for more examples.
200
200
201 An example usage is strip. When stripping a changeset, we also want to
201 An example usage is strip. When stripping a changeset, we also want to
202 strip the markers exclusive to this changeset. Otherwise we would have
202 strip the markers exclusive to this changeset. Otherwise we would have
203 "dangling"" obsolescence markers from its precursors: Obsolescence markers
203 "dangling"" obsolescence markers from its precursors: Obsolescence markers
204 marking a node as obsolete without any successors available locally.
204 marking a node as obsolete without any successors available locally.
205
205
206 As for relevant markers, the prune markers for children will be followed.
206 As for relevant markers, the prune markers for children will be followed.
207 Of course, they will only be followed if the pruned children is
207 Of course, they will only be followed if the pruned children is
208 locally-known. Since the prune markers are relevant to the pruned node.
208 locally-known. Since the prune markers are relevant to the pruned node.
209 However, while prune markers are considered relevant to the parent of the
209 However, while prune markers are considered relevant to the parent of the
210 pruned changesets, prune markers for locally-known changeset (with no
210 pruned changesets, prune markers for locally-known changeset (with no
211 successors) are considered exclusive to the pruned nodes. This allows
211 successors) are considered exclusive to the pruned nodes. This allows
212 to strip the prune markers (with the rest of the exclusive chain) alongside
212 to strip the prune markers (with the rest of the exclusive chain) alongside
213 the pruned changesets.
213 the pruned changesets.
214 """
214 """
215 # running on a filtered repository would be dangerous as markers could be
215 # running on a filtered repository would be dangerous as markers could be
216 # reported as exclusive when they are relevant for other filtered nodes.
216 # reported as exclusive when they are relevant for other filtered nodes.
217 unfi = repo.unfiltered()
217 unfi = repo.unfiltered()
218
218
219 # shortcut to various useful item
219 # shortcut to various useful item
220 nm = unfi.changelog.nodemap
220 nm = unfi.changelog.nodemap
221 precursorsmarkers = unfi.obsstore.predecessors
221 precursorsmarkers = unfi.obsstore.predecessors
222 successormarkers = unfi.obsstore.successors
222 successormarkers = unfi.obsstore.successors
223 childrenmarkers = unfi.obsstore.children
223 childrenmarkers = unfi.obsstore.children
224
224
225 # exclusive markers (return of the function)
225 # exclusive markers (return of the function)
226 exclmarkers = set()
226 exclmarkers = set()
227 # we need fast membership testing
227 # we need fast membership testing
228 nodes = set(nodes)
228 nodes = set(nodes)
229 # looking for head in the obshistory
229 # looking for head in the obshistory
230 #
230 #
231 # XXX we are ignoring all issues in regard with cycle for now.
231 # XXX we are ignoring all issues in regard with cycle for now.
232 stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
232 stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
233 stack.sort()
233 stack.sort()
234 # nodes already stacked
234 # nodes already stacked
235 seennodes = set(stack)
235 seennodes = set(stack)
236 while stack:
236 while stack:
237 current = stack.pop()
237 current = stack.pop()
238 # fetch precursors markers
238 # fetch precursors markers
239 markers = list(precursorsmarkers.get(current, ()))
239 markers = list(precursorsmarkers.get(current, ()))
240 # extend the list with prune markers
240 # extend the list with prune markers
241 for mark in successormarkers.get(current, ()):
241 for mark in successormarkers.get(current, ()):
242 if not mark[1]:
242 if not mark[1]:
243 markers.append(mark)
243 markers.append(mark)
244 # and markers from children (looking for prune)
244 # and markers from children (looking for prune)
245 for mark in childrenmarkers.get(current, ()):
245 for mark in childrenmarkers.get(current, ()):
246 if not mark[1]:
246 if not mark[1]:
247 markers.append(mark)
247 markers.append(mark)
248 # traverse the markers
248 # traverse the markers
249 for mark in markers:
249 for mark in markers:
250 if mark in exclmarkers:
250 if mark in exclmarkers:
251 # markers already selected
251 # markers already selected
252 continue
252 continue
253
253
254 # If the markers is about the current node, select it
254 # If the markers is about the current node, select it
255 #
255 #
256 # (this delay the addition of markers from children)
256 # (this delay the addition of markers from children)
257 if mark[1] or mark[0] == current:
257 if mark[1] or mark[0] == current:
258 exclmarkers.add(mark)
258 exclmarkers.add(mark)
259
259
260 # should we keep traversing through the precursors?
260 # should we keep traversing through the precursors?
261 prec = mark[0]
261 prec = mark[0]
262
262
263 # nodes in the stack or already processed
263 # nodes in the stack or already processed
264 if prec in seennodes:
264 if prec in seennodes:
265 continue
265 continue
266
266
267 # is this a locally known node ?
267 # is this a locally known node ?
268 known = prec in nm
268 known = prec in nm
269 # if locally-known and not in the <nodes> set the traversal
269 # if locally-known and not in the <nodes> set the traversal
270 # stop here.
270 # stop here.
271 if known and prec not in nodes:
271 if known and prec not in nodes:
272 continue
272 continue
273
273
274 # do not keep going if there are unselected markers pointing to this
274 # do not keep going if there are unselected markers pointing to this
275 # nodes. If we end up traversing these unselected markers later the
275 # nodes. If we end up traversing these unselected markers later the
276 # node will be taken care of at that point.
276 # node will be taken care of at that point.
277 precmarkers = _filterprunes(successormarkers.get(prec))
277 precmarkers = _filterprunes(successormarkers.get(prec))
278 if precmarkers.issubset(exclmarkers):
278 if precmarkers.issubset(exclmarkers):
279 seennodes.add(prec)
279 seennodes.add(prec)
280 stack.append(prec)
280 stack.append(prec)
281
281
282 return exclmarkers
282 return exclmarkers
283
283
284 def foreground(repo, nodes):
284 def foreground(repo, nodes):
285 """return all nodes in the "foreground" of other node
285 """return all nodes in the "foreground" of other node
286
286
287 The foreground of a revision is anything reachable using parent -> children
287 The foreground of a revision is anything reachable using parent -> children
288 or precursor -> successor relation. It is very similar to "descendant" but
288 or precursor -> successor relation. It is very similar to "descendant" but
289 augmented with obsolescence information.
289 augmented with obsolescence information.
290
290
291 Beware that possible obsolescence cycle may result if complex situation.
291 Beware that possible obsolescence cycle may result if complex situation.
292 """
292 """
293 repo = repo.unfiltered()
293 repo = repo.unfiltered()
294 foreground = set(repo.set('%ln::', nodes))
294 foreground = set(repo.set('%ln::', nodes))
295 if repo.obsstore:
295 if repo.obsstore:
296 # We only need this complicated logic if there is obsolescence
296 # We only need this complicated logic if there is obsolescence
297 # XXX will probably deserve an optimised revset.
297 # XXX will probably deserve an optimised revset.
298 nm = repo.changelog.nodemap
298 nm = repo.changelog.nodemap
299 plen = -1
299 plen = -1
300 # compute the whole set of successors or descendants
300 # compute the whole set of successors or descendants
301 while len(foreground) != plen:
301 while len(foreground) != plen:
302 plen = len(foreground)
302 plen = len(foreground)
303 succs = set(c.node() for c in foreground)
303 succs = set(c.node() for c in foreground)
304 mutable = [c.node() for c in foreground if c.mutable()]
304 mutable = [c.node() for c in foreground if c.mutable()]
305 succs.update(allsuccessors(repo.obsstore, mutable))
305 succs.update(allsuccessors(repo.obsstore, mutable))
306 known = (n for n in succs if n in nm)
306 known = (n for n in succs if n in nm)
307 foreground = set(repo.set('%ln::', known))
307 foreground = set(repo.set('%ln::', known))
308 return set(c.node() for c in foreground)
308 return set(c.node() for c in foreground)
309
309
310 # logic around storing and using effect flags
310 # logic around storing and using effect flags
311 EFFECTFLAGFIELD = "ef1"
311 EFFECTFLAGFIELD = "ef1"
312
312
313 DESCCHANGED = 1 << 0 # action changed the description
313 DESCCHANGED = 1 << 0 # action changed the description
314 METACHANGED = 1 << 1 # action change the meta
314 METACHANGED = 1 << 1 # action change the meta
315 DIFFCHANGED = 1 << 3 # action change diff introduced by the changeset
315 PARENTCHANGED = 1 << 2 # action change the parent
316 PARENTCHANGED = 1 << 2 # action change the parent
316 USERCHANGED = 1 << 4 # the user changed
317 USERCHANGED = 1 << 4 # the user changed
317 DATECHANGED = 1 << 5 # the date changed
318 DATECHANGED = 1 << 5 # the date changed
318 BRANCHCHANGED = 1 << 6 # the branch changed
319 BRANCHCHANGED = 1 << 6 # the branch changed
319
320
320 METABLACKLIST = [
321 METABLACKLIST = [
321 re.compile('^branch$'),
322 re.compile('^branch$'),
322 re.compile('^.*-source$'),
323 re.compile('^.*-source$'),
323 re.compile('^.*_source$'),
324 re.compile('^.*_source$'),
324 re.compile('^source$'),
325 re.compile('^source$'),
325 ]
326 ]
326
327
327 def metanotblacklisted(metaitem):
328 def metanotblacklisted(metaitem):
328 """ Check that the key of a meta item (extrakey, extravalue) does not
329 """ Check that the key of a meta item (extrakey, extravalue) does not
329 match at least one of the blacklist pattern
330 match at least one of the blacklist pattern
330 """
331 """
331 metakey = metaitem[0]
332 metakey = metaitem[0]
332
333
333 return not any(pattern.match(metakey) for pattern in METABLACKLIST)
334 return not any(pattern.match(metakey) for pattern in METABLACKLIST)
334
335
336 def _prepare_hunk(hunk):
337 """Drop all information but the username and patch"""
338 cleanhunk = []
339 for line in hunk.splitlines():
340 if line.startswith(b'# User') or not line.startswith(b'#'):
341 if line.startswith(b'@@'):
342 line = b'@@\n'
343 cleanhunk.append(line)
344 return cleanhunk
345
346 def _getdifflines(iterdiff):
347 """return a cleaned up lines"""
348 lines = next(iterdiff, None)
349
350 if lines is None:
351 return lines
352
353 return _prepare_hunk(lines)
354
355 def _cmpdiff(leftctx, rightctx):
356 """return True if both ctx introduce the "same diff"
357
358 This is a first and basic implementation, with many shortcoming.
359 """
360
361 # Leftctx or right ctx might be filtered, so we need to use the contexts
362 # with an unfiltered repository to safely compute the diff
363 leftunfi = leftctx._repo.unfiltered()[leftctx.rev()]
364 leftdiff = leftunfi.diff(git=1)
365 rightunfi = rightctx._repo.unfiltered()[rightctx.rev()]
366 rightdiff = rightunfi.diff(git=1)
367
368 left, right = (0, 0)
369 while None not in (left, right):
370 left = _getdifflines(leftdiff)
371 right = _getdifflines(rightdiff)
372
373 if left != right:
374 return False
375 return True
376
335 def geteffectflag(relation):
377 def geteffectflag(relation):
336 """ From an obs-marker relation, compute what changed between the
378 """ From an obs-marker relation, compute what changed between the
337 predecessor and the successor.
379 predecessor and the successor.
338 """
380 """
339 effects = 0
381 effects = 0
340
382
341 source = relation[0]
383 source = relation[0]
342
384
343 for changectx in relation[1]:
385 for changectx in relation[1]:
344 # Check if description has changed
386 # Check if description has changed
345 if changectx.description() != source.description():
387 if changectx.description() != source.description():
346 effects |= DESCCHANGED
388 effects |= DESCCHANGED
347
389
348 # Check if user has changed
390 # Check if user has changed
349 if changectx.user() != source.user():
391 if changectx.user() != source.user():
350 effects |= USERCHANGED
392 effects |= USERCHANGED
351
393
352 # Check if date has changed
394 # Check if date has changed
353 if changectx.date() != source.date():
395 if changectx.date() != source.date():
354 effects |= DATECHANGED
396 effects |= DATECHANGED
355
397
356 # Check if branch has changed
398 # Check if branch has changed
357 if changectx.branch() != source.branch():
399 if changectx.branch() != source.branch():
358 effects |= BRANCHCHANGED
400 effects |= BRANCHCHANGED
359
401
360 # Check if at least one of the parent has changed
402 # Check if at least one of the parent has changed
361 if changectx.parents() != source.parents():
403 if changectx.parents() != source.parents():
362 effects |= PARENTCHANGED
404 effects |= PARENTCHANGED
363
405
364 # Check if other meta has changed
406 # Check if other meta has changed
365 changeextra = changectx.extra().items()
407 changeextra = changectx.extra().items()
366 ctxmeta = filter(metanotblacklisted, changeextra)
408 ctxmeta = filter(metanotblacklisted, changeextra)
367
409
368 sourceextra = source.extra().items()
410 sourceextra = source.extra().items()
369 srcmeta = filter(metanotblacklisted, sourceextra)
411 srcmeta = filter(metanotblacklisted, sourceextra)
370
412
371 if ctxmeta != srcmeta:
413 if ctxmeta != srcmeta:
372 effects |= METACHANGED
414 effects |= METACHANGED
373
415
416 # Check if the diff has changed
417 if not _cmpdiff(source, changectx):
418 effects |= DIFFCHANGED
419
374 return effects
420 return effects
375
421
376 def getobsoleted(repo, tr):
422 def getobsoleted(repo, tr):
377 """return the set of pre-existing revisions obsoleted by a transaction"""
423 """return the set of pre-existing revisions obsoleted by a transaction"""
378 torev = repo.unfiltered().changelog.nodemap.get
424 torev = repo.unfiltered().changelog.nodemap.get
379 phase = repo._phasecache.phase
425 phase = repo._phasecache.phase
380 succsmarkers = repo.obsstore.successors.get
426 succsmarkers = repo.obsstore.successors.get
381 public = phases.public
427 public = phases.public
382 addedmarkers = tr.changes.get('obsmarkers')
428 addedmarkers = tr.changes.get('obsmarkers')
383 addedrevs = tr.changes.get('revs')
429 addedrevs = tr.changes.get('revs')
384 seenrevs = set(addedrevs)
430 seenrevs = set(addedrevs)
385 obsoleted = set()
431 obsoleted = set()
386 for mark in addedmarkers:
432 for mark in addedmarkers:
387 node = mark[0]
433 node = mark[0]
388 rev = torev(node)
434 rev = torev(node)
389 if rev is None or rev in seenrevs:
435 if rev is None or rev in seenrevs:
390 continue
436 continue
391 seenrevs.add(rev)
437 seenrevs.add(rev)
392 if phase(repo, rev) == public:
438 if phase(repo, rev) == public:
393 continue
439 continue
394 if set(succsmarkers(node) or []).issubset(addedmarkers):
440 if set(succsmarkers(node) or []).issubset(addedmarkers):
395 obsoleted.add(rev)
441 obsoleted.add(rev)
396 return obsoleted
442 return obsoleted
397
443
398 class _succs(list):
444 class _succs(list):
399 """small class to represent a successors with some metadata about it"""
445 """small class to represent a successors with some metadata about it"""
400
446
401 def __init__(self, *args, **kwargs):
447 def __init__(self, *args, **kwargs):
402 super(_succs, self).__init__(*args, **kwargs)
448 super(_succs, self).__init__(*args, **kwargs)
403 self.markers = set()
449 self.markers = set()
404
450
405 def copy(self):
451 def copy(self):
406 new = _succs(self)
452 new = _succs(self)
407 new.markers = self.markers.copy()
453 new.markers = self.markers.copy()
408 return new
454 return new
409
455
410 @util.propertycache
456 @util.propertycache
411 def _set(self):
457 def _set(self):
412 # immutable
458 # immutable
413 return set(self)
459 return set(self)
414
460
415 def canmerge(self, other):
461 def canmerge(self, other):
416 return self._set.issubset(other._set)
462 return self._set.issubset(other._set)
417
463
418 def successorssets(repo, initialnode, closest=False, cache=None):
464 def successorssets(repo, initialnode, closest=False, cache=None):
419 """Return set of all latest successors of initial nodes
465 """Return set of all latest successors of initial nodes
420
466
421 The successors set of a changeset A are the group of revisions that succeed
467 The successors set of a changeset A are the group of revisions that succeed
422 A. It succeeds A as a consistent whole, each revision being only a partial
468 A. It succeeds A as a consistent whole, each revision being only a partial
423 replacement. By default, the successors set contains non-obsolete
469 replacement. By default, the successors set contains non-obsolete
424 changesets only, walking the obsolescence graph until reaching a leaf. If
470 changesets only, walking the obsolescence graph until reaching a leaf. If
425 'closest' is set to True, closest successors-sets are return (the
471 'closest' is set to True, closest successors-sets are return (the
426 obsolescence walk stops on known changesets).
472 obsolescence walk stops on known changesets).
427
473
428 This function returns the full list of successor sets which is why it
474 This function returns the full list of successor sets which is why it
429 returns a list of tuples and not just a single tuple. Each tuple is a valid
475 returns a list of tuples and not just a single tuple. Each tuple is a valid
430 successors set. Note that (A,) may be a valid successors set for changeset A
476 successors set. Note that (A,) may be a valid successors set for changeset A
431 (see below).
477 (see below).
432
478
433 In most cases, a changeset A will have a single element (e.g. the changeset
479 In most cases, a changeset A will have a single element (e.g. the changeset
434 A is replaced by A') in its successors set. Though, it is also common for a
480 A is replaced by A') in its successors set. Though, it is also common for a
435 changeset A to have no elements in its successor set (e.g. the changeset
481 changeset A to have no elements in its successor set (e.g. the changeset
436 has been pruned). Therefore, the returned list of successors sets will be
482 has been pruned). Therefore, the returned list of successors sets will be
437 [(A',)] or [], respectively.
483 [(A',)] or [], respectively.
438
484
439 When a changeset A is split into A' and B', however, it will result in a
485 When a changeset A is split into A' and B', however, it will result in a
440 successors set containing more than a single element, i.e. [(A',B')].
486 successors set containing more than a single element, i.e. [(A',B')].
441 Divergent changesets will result in multiple successors sets, i.e. [(A',),
487 Divergent changesets will result in multiple successors sets, i.e. [(A',),
442 (A'')].
488 (A'')].
443
489
444 If a changeset A is not obsolete, then it will conceptually have no
490 If a changeset A is not obsolete, then it will conceptually have no
445 successors set. To distinguish this from a pruned changeset, the successor
491 successors set. To distinguish this from a pruned changeset, the successor
446 set will contain itself only, i.e. [(A,)].
492 set will contain itself only, i.e. [(A,)].
447
493
448 Finally, final successors unknown locally are considered to be pruned
494 Finally, final successors unknown locally are considered to be pruned
449 (pruned: obsoleted without any successors). (Final: successors not affected
495 (pruned: obsoleted without any successors). (Final: successors not affected
450 by markers).
496 by markers).
451
497
452 The 'closest' mode respect the repoview filtering. For example, without
498 The 'closest' mode respect the repoview filtering. For example, without
453 filter it will stop at the first locally known changeset, with 'visible'
499 filter it will stop at the first locally known changeset, with 'visible'
454 filter it will stop on visible changesets).
500 filter it will stop on visible changesets).
455
501
456 The optional `cache` parameter is a dictionary that may contains
502 The optional `cache` parameter is a dictionary that may contains
457 precomputed successors sets. It is meant to reuse the computation of a
503 precomputed successors sets. It is meant to reuse the computation of a
458 previous call to `successorssets` when multiple calls are made at the same
504 previous call to `successorssets` when multiple calls are made at the same
459 time. The cache dictionary is updated in place. The caller is responsible
505 time. The cache dictionary is updated in place. The caller is responsible
460 for its life span. Code that makes multiple calls to `successorssets`
506 for its life span. Code that makes multiple calls to `successorssets`
461 *should* use this cache mechanism or risk a performance hit.
507 *should* use this cache mechanism or risk a performance hit.
462
508
463 Since results are different depending of the 'closest' most, the same cache
509 Since results are different depending of the 'closest' most, the same cache
464 cannot be reused for both mode.
510 cannot be reused for both mode.
465 """
511 """
466
512
467 succmarkers = repo.obsstore.successors
513 succmarkers = repo.obsstore.successors
468
514
469 # Stack of nodes we search successors sets for
515 # Stack of nodes we search successors sets for
470 toproceed = [initialnode]
516 toproceed = [initialnode]
471 # set version of above list for fast loop detection
517 # set version of above list for fast loop detection
472 # element added to "toproceed" must be added here
518 # element added to "toproceed" must be added here
473 stackedset = set(toproceed)
519 stackedset = set(toproceed)
474 if cache is None:
520 if cache is None:
475 cache = {}
521 cache = {}
476
522
477 # This while loop is the flattened version of a recursive search for
523 # This while loop is the flattened version of a recursive search for
478 # successors sets
524 # successors sets
479 #
525 #
480 # def successorssets(x):
526 # def successorssets(x):
481 # successors = directsuccessors(x)
527 # successors = directsuccessors(x)
482 # ss = [[]]
528 # ss = [[]]
483 # for succ in directsuccessors(x):
529 # for succ in directsuccessors(x):
484 # # product as in itertools cartesian product
530 # # product as in itertools cartesian product
485 # ss = product(ss, successorssets(succ))
531 # ss = product(ss, successorssets(succ))
486 # return ss
532 # return ss
487 #
533 #
488 # But we can not use plain recursive calls here:
534 # But we can not use plain recursive calls here:
489 # - that would blow the python call stack
535 # - that would blow the python call stack
490 # - obsolescence markers may have cycles, we need to handle them.
536 # - obsolescence markers may have cycles, we need to handle them.
491 #
537 #
492 # The `toproceed` list act as our call stack. Every node we search
538 # The `toproceed` list act as our call stack. Every node we search
493 # successors set for are stacked there.
539 # successors set for are stacked there.
494 #
540 #
495 # The `stackedset` is set version of this stack used to check if a node is
541 # The `stackedset` is set version of this stack used to check if a node is
496 # already stacked. This check is used to detect cycles and prevent infinite
542 # already stacked. This check is used to detect cycles and prevent infinite
497 # loop.
543 # loop.
498 #
544 #
499 # successors set of all nodes are stored in the `cache` dictionary.
545 # successors set of all nodes are stored in the `cache` dictionary.
500 #
546 #
501 # After this while loop ends we use the cache to return the successors sets
547 # After this while loop ends we use the cache to return the successors sets
502 # for the node requested by the caller.
548 # for the node requested by the caller.
503 while toproceed:
549 while toproceed:
504 # Every iteration tries to compute the successors sets of the topmost
550 # Every iteration tries to compute the successors sets of the topmost
505 # node of the stack: CURRENT.
551 # node of the stack: CURRENT.
506 #
552 #
507 # There are four possible outcomes:
553 # There are four possible outcomes:
508 #
554 #
509 # 1) We already know the successors sets of CURRENT:
555 # 1) We already know the successors sets of CURRENT:
510 # -> mission accomplished, pop it from the stack.
556 # -> mission accomplished, pop it from the stack.
511 # 2) Stop the walk:
557 # 2) Stop the walk:
512 # default case: Node is not obsolete
558 # default case: Node is not obsolete
513 # closest case: Node is known at this repo filter level
559 # closest case: Node is known at this repo filter level
514 # -> the node is its own successors sets. Add it to the cache.
560 # -> the node is its own successors sets. Add it to the cache.
515 # 3) We do not know successors set of direct successors of CURRENT:
561 # 3) We do not know successors set of direct successors of CURRENT:
516 # -> We add those successors to the stack.
562 # -> We add those successors to the stack.
517 # 4) We know successors sets of all direct successors of CURRENT:
563 # 4) We know successors sets of all direct successors of CURRENT:
518 # -> We can compute CURRENT successors set and add it to the
564 # -> We can compute CURRENT successors set and add it to the
519 # cache.
565 # cache.
520 #
566 #
521 current = toproceed[-1]
567 current = toproceed[-1]
522
568
523 # case 2 condition is a bit hairy because of closest,
569 # case 2 condition is a bit hairy because of closest,
524 # we compute it on its own
570 # we compute it on its own
525 case2condition = ((current not in succmarkers)
571 case2condition = ((current not in succmarkers)
526 or (closest and current != initialnode
572 or (closest and current != initialnode
527 and current in repo))
573 and current in repo))
528
574
529 if current in cache:
575 if current in cache:
530 # case (1): We already know the successors sets
576 # case (1): We already know the successors sets
531 stackedset.remove(toproceed.pop())
577 stackedset.remove(toproceed.pop())
532 elif case2condition:
578 elif case2condition:
533 # case (2): end of walk.
579 # case (2): end of walk.
534 if current in repo:
580 if current in repo:
535 # We have a valid successors.
581 # We have a valid successors.
536 cache[current] = [_succs((current,))]
582 cache[current] = [_succs((current,))]
537 else:
583 else:
538 # Final obsolete version is unknown locally.
584 # Final obsolete version is unknown locally.
539 # Do not count that as a valid successors
585 # Do not count that as a valid successors
540 cache[current] = []
586 cache[current] = []
541 else:
587 else:
542 # cases (3) and (4)
588 # cases (3) and (4)
543 #
589 #
544 # We proceed in two phases. Phase 1 aims to distinguish case (3)
590 # We proceed in two phases. Phase 1 aims to distinguish case (3)
545 # from case (4):
591 # from case (4):
546 #
592 #
547 # For each direct successors of CURRENT, we check whether its
593 # For each direct successors of CURRENT, we check whether its
548 # successors sets are known. If they are not, we stack the
594 # successors sets are known. If they are not, we stack the
549 # unknown node and proceed to the next iteration of the while
595 # unknown node and proceed to the next iteration of the while
550 # loop. (case 3)
596 # loop. (case 3)
551 #
597 #
552 # During this step, we may detect obsolescence cycles: a node
598 # During this step, we may detect obsolescence cycles: a node
553 # with unknown successors sets but already in the call stack.
599 # with unknown successors sets but already in the call stack.
554 # In such a situation, we arbitrary set the successors sets of
600 # In such a situation, we arbitrary set the successors sets of
555 # the node to nothing (node pruned) to break the cycle.
601 # the node to nothing (node pruned) to break the cycle.
556 #
602 #
557 # If no break was encountered we proceed to phase 2.
603 # If no break was encountered we proceed to phase 2.
558 #
604 #
559 # Phase 2 computes successors sets of CURRENT (case 4); see details
605 # Phase 2 computes successors sets of CURRENT (case 4); see details
560 # in phase 2 itself.
606 # in phase 2 itself.
561 #
607 #
562 # Note the two levels of iteration in each phase.
608 # Note the two levels of iteration in each phase.
563 # - The first one handles obsolescence markers using CURRENT as
609 # - The first one handles obsolescence markers using CURRENT as
564 # precursor (successors markers of CURRENT).
610 # precursor (successors markers of CURRENT).
565 #
611 #
566 # Having multiple entry here means divergence.
612 # Having multiple entry here means divergence.
567 #
613 #
568 # - The second one handles successors defined in each marker.
614 # - The second one handles successors defined in each marker.
569 #
615 #
570 # Having none means pruned node, multiple successors means split,
616 # Having none means pruned node, multiple successors means split,
571 # single successors are standard replacement.
617 # single successors are standard replacement.
572 #
618 #
573 for mark in sorted(succmarkers[current]):
619 for mark in sorted(succmarkers[current]):
574 for suc in mark[1]:
620 for suc in mark[1]:
575 if suc not in cache:
621 if suc not in cache:
576 if suc in stackedset:
622 if suc in stackedset:
577 # cycle breaking
623 # cycle breaking
578 cache[suc] = []
624 cache[suc] = []
579 else:
625 else:
580 # case (3) If we have not computed successors sets
626 # case (3) If we have not computed successors sets
581 # of one of those successors we add it to the
627 # of one of those successors we add it to the
582 # `toproceed` stack and stop all work for this
628 # `toproceed` stack and stop all work for this
583 # iteration.
629 # iteration.
584 toproceed.append(suc)
630 toproceed.append(suc)
585 stackedset.add(suc)
631 stackedset.add(suc)
586 break
632 break
587 else:
633 else:
588 continue
634 continue
589 break
635 break
590 else:
636 else:
591 # case (4): we know all successors sets of all direct
637 # case (4): we know all successors sets of all direct
592 # successors
638 # successors
593 #
639 #
594 # Successors set contributed by each marker depends on the
640 # Successors set contributed by each marker depends on the
595 # successors sets of all its "successors" node.
641 # successors sets of all its "successors" node.
596 #
642 #
597 # Each different marker is a divergence in the obsolescence
643 # Each different marker is a divergence in the obsolescence
598 # history. It contributes successors sets distinct from other
644 # history. It contributes successors sets distinct from other
599 # markers.
645 # markers.
600 #
646 #
601 # Within a marker, a successor may have divergent successors
647 # Within a marker, a successor may have divergent successors
602 # sets. In such a case, the marker will contribute multiple
648 # sets. In such a case, the marker will contribute multiple
603 # divergent successors sets. If multiple successors have
649 # divergent successors sets. If multiple successors have
604 # divergent successors sets, a Cartesian product is used.
650 # divergent successors sets, a Cartesian product is used.
605 #
651 #
606 # At the end we post-process successors sets to remove
652 # At the end we post-process successors sets to remove
607 # duplicated entry and successors set that are strict subset of
653 # duplicated entry and successors set that are strict subset of
608 # another one.
654 # another one.
609 succssets = []
655 succssets = []
610 for mark in sorted(succmarkers[current]):
656 for mark in sorted(succmarkers[current]):
611 # successors sets contributed by this marker
657 # successors sets contributed by this marker
612 base = _succs()
658 base = _succs()
613 base.markers.add(mark)
659 base.markers.add(mark)
614 markss = [base]
660 markss = [base]
615 for suc in mark[1]:
661 for suc in mark[1]:
616 # cardinal product with previous successors
662 # cardinal product with previous successors
617 productresult = []
663 productresult = []
618 for prefix in markss:
664 for prefix in markss:
619 for suffix in cache[suc]:
665 for suffix in cache[suc]:
620 newss = prefix.copy()
666 newss = prefix.copy()
621 newss.markers.update(suffix.markers)
667 newss.markers.update(suffix.markers)
622 for part in suffix:
668 for part in suffix:
623 # do not duplicated entry in successors set
669 # do not duplicated entry in successors set
624 # first entry wins.
670 # first entry wins.
625 if part not in newss:
671 if part not in newss:
626 newss.append(part)
672 newss.append(part)
627 productresult.append(newss)
673 productresult.append(newss)
628 markss = productresult
674 markss = productresult
629 succssets.extend(markss)
675 succssets.extend(markss)
630 # remove duplicated and subset
676 # remove duplicated and subset
631 seen = []
677 seen = []
632 final = []
678 final = []
633 candidates = sorted((s for s in succssets if s),
679 candidates = sorted((s for s in succssets if s),
634 key=len, reverse=True)
680 key=len, reverse=True)
635 for cand in candidates:
681 for cand in candidates:
636 for seensuccs in seen:
682 for seensuccs in seen:
637 if cand.canmerge(seensuccs):
683 if cand.canmerge(seensuccs):
638 seensuccs.markers.update(cand.markers)
684 seensuccs.markers.update(cand.markers)
639 break
685 break
640 else:
686 else:
641 final.append(cand)
687 final.append(cand)
642 seen.append(cand)
688 seen.append(cand)
643 final.reverse() # put small successors set first
689 final.reverse() # put small successors set first
644 cache[current] = final
690 cache[current] = final
645 return cache[initialnode]
691 return cache[initialnode]
646
692
647 def successorsandmarkers(repo, ctx):
693 def successorsandmarkers(repo, ctx):
648 """compute the raw data needed for computing obsfate
694 """compute the raw data needed for computing obsfate
649 Returns a list of dict, one dict per successors set
695 Returns a list of dict, one dict per successors set
650 """
696 """
651 if not ctx.obsolete():
697 if not ctx.obsolete():
652 return None
698 return None
653
699
654 ssets = successorssets(repo, ctx.node(), closest=True)
700 ssets = successorssets(repo, ctx.node(), closest=True)
655
701
656 # closestsuccessors returns an empty list for pruned revisions, remap it
702 # closestsuccessors returns an empty list for pruned revisions, remap it
657 # into a list containing an empty list for future processing
703 # into a list containing an empty list for future processing
658 if ssets == []:
704 if ssets == []:
659 ssets = [[]]
705 ssets = [[]]
660
706
661 # Try to recover pruned markers
707 # Try to recover pruned markers
662 succsmap = repo.obsstore.successors
708 succsmap = repo.obsstore.successors
663 fullsuccessorsets = [] # successor set + markers
709 fullsuccessorsets = [] # successor set + markers
664 for sset in ssets:
710 for sset in ssets:
665 if sset:
711 if sset:
666 fullsuccessorsets.append(sset)
712 fullsuccessorsets.append(sset)
667 else:
713 else:
668 # successorsset return an empty set() when ctx or one of its
714 # successorsset return an empty set() when ctx or one of its
669 # successors is pruned.
715 # successors is pruned.
670 # In this case, walk the obs-markers tree again starting with ctx
716 # In this case, walk the obs-markers tree again starting with ctx
671 # and find the relevant pruning obs-makers, the ones without
717 # and find the relevant pruning obs-makers, the ones without
672 # successors.
718 # successors.
673 # Having these markers allow us to compute some information about
719 # Having these markers allow us to compute some information about
674 # its fate, like who pruned this changeset and when.
720 # its fate, like who pruned this changeset and when.
675
721
676 # XXX we do not catch all prune markers (eg rewritten then pruned)
722 # XXX we do not catch all prune markers (eg rewritten then pruned)
677 # (fix me later)
723 # (fix me later)
678 foundany = False
724 foundany = False
679 for mark in succsmap.get(ctx.node(), ()):
725 for mark in succsmap.get(ctx.node(), ()):
680 if not mark[1]:
726 if not mark[1]:
681 foundany = True
727 foundany = True
682 sset = _succs()
728 sset = _succs()
683 sset.markers.add(mark)
729 sset.markers.add(mark)
684 fullsuccessorsets.append(sset)
730 fullsuccessorsets.append(sset)
685 if not foundany:
731 if not foundany:
686 fullsuccessorsets.append(_succs())
732 fullsuccessorsets.append(_succs())
687
733
688 values = []
734 values = []
689 for sset in fullsuccessorsets:
735 for sset in fullsuccessorsets:
690 values.append({'successors': sset, 'markers': sset.markers})
736 values.append({'successors': sset, 'markers': sset.markers})
691
737
692 return values
738 return values
693
739
694 def successorsetverb(successorset):
740 def successorsetverb(successorset):
695 """ Return the verb summarizing the successorset
741 """ Return the verb summarizing the successorset
696 """
742 """
697 if not successorset:
743 if not successorset:
698 verb = 'pruned'
744 verb = 'pruned'
699 elif len(successorset) == 1:
745 elif len(successorset) == 1:
700 verb = 'rewritten'
746 verb = 'rewritten'
701 else:
747 else:
702 verb = 'split'
748 verb = 'split'
703 return verb
749 return verb
704
750
705 def markersdates(markers):
751 def markersdates(markers):
706 """returns the list of dates for a list of markers
752 """returns the list of dates for a list of markers
707 """
753 """
708 return [m[4] for m in markers]
754 return [m[4] for m in markers]
709
755
710 def markersusers(markers):
756 def markersusers(markers):
711 """ Returns a sorted list of markers users without duplicates
757 """ Returns a sorted list of markers users without duplicates
712 """
758 """
713 markersmeta = [dict(m[3]) for m in markers]
759 markersmeta = [dict(m[3]) for m in markers]
714 users = set(meta.get('user') for meta in markersmeta if meta.get('user'))
760 users = set(meta.get('user') for meta in markersmeta if meta.get('user'))
715
761
716 return sorted(users)
762 return sorted(users)
717
763
718 def markersoperations(markers):
764 def markersoperations(markers):
719 """ Returns a sorted list of markers operations without duplicates
765 """ Returns a sorted list of markers operations without duplicates
720 """
766 """
721 markersmeta = [dict(m[3]) for m in markers]
767 markersmeta = [dict(m[3]) for m in markers]
722 operations = set(meta.get('operation') for meta in markersmeta
768 operations = set(meta.get('operation') for meta in markersmeta
723 if meta.get('operation'))
769 if meta.get('operation'))
724
770
725 return sorted(operations)
771 return sorted(operations)
@@ -1,167 +1,167
1 Test the 'effect-flags' feature
1 Test the 'effect-flags' feature
2
2
3 Global setup
3 Global setup
4 ============
4 ============
5
5
6 $ . $TESTDIR/testlib/obsmarker-common.sh
6 $ . $TESTDIR/testlib/obsmarker-common.sh
7 $ cat >> $HGRCPATH <<EOF
7 $ cat >> $HGRCPATH <<EOF
8 > [ui]
8 > [ui]
9 > interactive = true
9 > interactive = true
10 > [phases]
10 > [phases]
11 > publish=False
11 > publish=False
12 > [extensions]
12 > [extensions]
13 > rebase =
13 > rebase =
14 > [experimental]
14 > [experimental]
15 > evolution = all
15 > evolution = all
16 > effect-flags = 1
16 > effect-flags = 1
17 > EOF
17 > EOF
18
18
19 $ hg init $TESTTMP/effect-flags
19 $ hg init $TESTTMP/effect-flags
20 $ cd $TESTTMP/effect-flags
20 $ cd $TESTTMP/effect-flags
21 $ mkcommit ROOT
21 $ mkcommit ROOT
22
22
23 amend touching the description only
23 amend touching the description only
24 -----------------------------------
24 -----------------------------------
25
25
26 $ mkcommit A0
26 $ mkcommit A0
27 $ hg commit --amend -m "A1"
27 $ hg commit --amend -m "A1"
28
28
29 check result
29 check result
30
30
31 $ hg debugobsolete --rev .
31 $ hg debugobsolete --rev .
32 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
32 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
33
33
34 amend touching the user only
34 amend touching the user only
35 ----------------------------
35 ----------------------------
36
36
37 $ mkcommit B0
37 $ mkcommit B0
38 $ hg commit --amend -u "bob <bob@bob.com>"
38 $ hg commit --amend -u "bob <bob@bob.com>"
39
39
40 check result
40 check result
41
41
42 $ hg debugobsolete --rev .
42 $ hg debugobsolete --rev .
43 ef4a313b1e0ade55718395d80e6b88c5ccd875eb 5485c92d34330dac9d7a63dc07e1e3373835b964 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '16', 'operation': 'amend', 'user': 'test'}
43 ef4a313b1e0ade55718395d80e6b88c5ccd875eb 5485c92d34330dac9d7a63dc07e1e3373835b964 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '16', 'operation': 'amend', 'user': 'test'}
44
44
45 amend touching the date only
45 amend touching the date only
46 ----------------------------
46 ----------------------------
47
47
48 $ mkcommit B1
48 $ mkcommit B1
49 $ hg commit --amend -d "42 0"
49 $ hg commit --amend -d "42 0"
50
50
51 check result
51 check result
52
52
53 $ hg debugobsolete --rev .
53 $ hg debugobsolete --rev .
54 2ef0680ff45038ac28c9f1ff3644341f54487280 4dd84345082e9e5291c2e6b3f335bbf8bf389378 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '32', 'operation': 'amend', 'user': 'test'}
54 2ef0680ff45038ac28c9f1ff3644341f54487280 4dd84345082e9e5291c2e6b3f335bbf8bf389378 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '32', 'operation': 'amend', 'user': 'test'}
55
55
56 amend touching the branch only
56 amend touching the branch only
57 ----------------------------
57 ----------------------------
58
58
59 $ mkcommit B2
59 $ mkcommit B2
60 $ hg branch my-branch
60 $ hg branch my-branch
61 marked working directory as branch my-branch
61 marked working directory as branch my-branch
62 (branches are permanent and global, did you want a bookmark?)
62 (branches are permanent and global, did you want a bookmark?)
63 $ hg commit --amend
63 $ hg commit --amend
64
64
65 check result
65 check result
66
66
67 $ hg debugobsolete --rev .
67 $ hg debugobsolete --rev .
68 bd3db8264ceebf1966319f5df3be7aac6acd1a8e 14a01456e0574f0e0a0b15b2345486a6364a8d79 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '64', 'operation': 'amend', 'user': 'test'}
68 bd3db8264ceebf1966319f5df3be7aac6acd1a8e 14a01456e0574f0e0a0b15b2345486a6364a8d79 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '64', 'operation': 'amend', 'user': 'test'}
69
69
70 $ hg up default
70 $ hg up default
71 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
71 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
72
72
73 rebase (parents change)
73 rebase (parents change)
74 -----------------------
74 -----------------------
75
75
76 $ mkcommit C0
76 $ mkcommit C0
77 $ mkcommit D0
77 $ mkcommit D0
78 $ hg rebase -r . -d 'desc(B0)'
78 $ hg rebase -r . -d 'desc(B0)'
79 rebasing 10:c85eff83a034 "D0" (tip)
79 rebasing 10:c85eff83a034 "D0" (tip)
80
80
81 check result
81 check result
82
82
83 $ hg debugobsolete --rev .
83 $ hg debugobsolete --rev .
84 c85eff83a0340efd9da52b806a94c350222f3371 da86aa2f19a30d6686b15cae15c7b6c908ec9699 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
84 c85eff83a0340efd9da52b806a94c350222f3371 da86aa2f19a30d6686b15cae15c7b6c908ec9699 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
85
85
86 amend touching the diff
86 amend touching the diff
87 -----------------------
87 -----------------------
88
88
89 $ mkcommit E0
89 $ mkcommit E0
90 $ echo 42 >> E0
90 $ echo 42 >> E0
91 $ hg commit --amend
91 $ hg commit --amend
92
92
93 check result
93 check result
94
94
95 $ hg debugobsolete --rev .
95 $ hg debugobsolete --rev .
96 ebfe0333e0d96f68a917afd97c0a0af87f1c3b5f 75781fdbdbf58a987516b00c980bccda1e9ae588 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'amend', 'user': 'test'}
96 ebfe0333e0d96f68a917afd97c0a0af87f1c3b5f 75781fdbdbf58a987516b00c980bccda1e9ae588 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
97
97
98 amend with multiple effect (desc and meta)
98 amend with multiple effect (desc and meta)
99 -------------------------------------------
99 -------------------------------------------
100
100
101 $ mkcommit F0
101 $ mkcommit F0
102 $ hg branch my-other-branch
102 $ hg branch my-other-branch
103 marked working directory as branch my-other-branch
103 marked working directory as branch my-other-branch
104 $ hg commit --amend -m F1 -u "bob <bob@bob.com>" -d "42 0"
104 $ hg commit --amend -m F1 -u "bob <bob@bob.com>" -d "42 0"
105
105
106 check result
106 check result
107
107
108 $ hg debugobsolete --rev .
108 $ hg debugobsolete --rev .
109 fad47e5bd78e6aa4db1b5a0a1751bc12563655ff a94e0fd5f1c81d969381a76eb0d37ce499a44fae 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '113', 'operation': 'amend', 'user': 'test'}
109 fad47e5bd78e6aa4db1b5a0a1751bc12563655ff a94e0fd5f1c81d969381a76eb0d37ce499a44fae 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '113', 'operation': 'amend', 'user': 'test'}
110
110
111 rebase not touching the diff
111 rebase not touching the diff
112 ----------------------------
112 ----------------------------
113
113
114 $ cat << EOF > H0
114 $ cat << EOF > H0
115 > 0
115 > 0
116 > 1
116 > 1
117 > 2
117 > 2
118 > 3
118 > 3
119 > 4
119 > 4
120 > 5
120 > 5
121 > 6
121 > 6
122 > 7
122 > 7
123 > 8
123 > 8
124 > 9
124 > 9
125 > 10
125 > 10
126 > EOF
126 > EOF
127 $ hg add H0
127 $ hg add H0
128 $ hg commit -m 'H0'
128 $ hg commit -m 'H0'
129 $ echo "H1" >> H0
129 $ echo "H1" >> H0
130 $ hg commit -m "H1"
130 $ hg commit -m "H1"
131 $ hg up -r "desc(H0)"
131 $ hg up -r "desc(H0)"
132 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
132 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
133 $ cat << EOF > H0
133 $ cat << EOF > H0
134 > H2
134 > H2
135 > 0
135 > 0
136 > 1
136 > 1
137 > 2
137 > 2
138 > 3
138 > 3
139 > 4
139 > 4
140 > 5
140 > 5
141 > 6
141 > 6
142 > 7
142 > 7
143 > 8
143 > 8
144 > 9
144 > 9
145 > 10
145 > 10
146 > EOF
146 > EOF
147 $ hg commit -m "H2"
147 $ hg commit -m "H2"
148 created new head
148 created new head
149 $ hg rebase -s "desc(H1)" -d "desc(H2)" -t :merge3
149 $ hg rebase -s "desc(H1)" -d "desc(H2)" -t :merge3
150 rebasing 17:b57fed8d8322 "H1"
150 rebasing 17:b57fed8d8322 "H1"
151 merging H0
151 merging H0
152 $ hg debugobsolete -r tip
152 $ hg debugobsolete -r tip
153 b57fed8d83228a8ae3748d8c3760a77638dd4f8c e509e2eb3df5d131ff7c02350bf2a9edd0c09478 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
153 b57fed8d83228a8ae3748d8c3760a77638dd4f8c e509e2eb3df5d131ff7c02350bf2a9edd0c09478 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
154
154
155 amend closing the branch should be detected as meta change
155 amend closing the branch should be detected as meta change
156 ----------------------------------------------------------
156 ----------------------------------------------------------
157
157
158 $ hg branch closedbranch
158 $ hg branch closedbranch
159 marked working directory as branch closedbranch
159 marked working directory as branch closedbranch
160 $ mkcommit G0
160 $ mkcommit G0
161 $ mkcommit I0
161 $ mkcommit I0
162 $ hg commit --amend --close-branch
162 $ hg commit --amend --close-branch
163
163
164 check result
164 check result
165
165
166 $ hg debugobsolete -r .
166 $ hg debugobsolete -r .
167 2f599e54c1c6974299065cdf54e1ad640bfb7b5d 12c6238b5e371eea00fd2013b12edce3f070928b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '2', 'operation': 'amend', 'user': 'test'}
167 2f599e54c1c6974299065cdf54e1ad640bfb7b5d 12c6238b5e371eea00fd2013b12edce3f070928b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '2', 'operation': 'amend', 'user': 'test'}
General Comments 0
You need to be logged in to leave comments. Login now