##// END OF EJS Templates
effectflag: detect when parents changed...
Boris Feld -
r34420:fa26f589 default
parent child Browse files
Show More
@@ -1,692 +1,697 b''
1 # obsutil.py - utility functions for obsolescence
1 # obsutil.py - utility functions for obsolescence
2 #
2 #
3 # Copyright 2017 Boris Feld <boris.feld@octobus.net>
3 # Copyright 2017 Boris Feld <boris.feld@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from . import (
10 from . import (
11 phases,
11 phases,
12 util
12 util
13 )
13 )
14
14
15 class marker(object):
15 class marker(object):
16 """Wrap obsolete marker raw data"""
16 """Wrap obsolete marker raw data"""
17
17
18 def __init__(self, repo, data):
18 def __init__(self, repo, data):
19 # the repo argument will be used to create changectx in later version
19 # the repo argument will be used to create changectx in later version
20 self._repo = repo
20 self._repo = repo
21 self._data = data
21 self._data = data
22 self._decodedmeta = None
22 self._decodedmeta = None
23
23
24 def __hash__(self):
24 def __hash__(self):
25 return hash(self._data)
25 return hash(self._data)
26
26
27 def __eq__(self, other):
27 def __eq__(self, other):
28 if type(other) != type(self):
28 if type(other) != type(self):
29 return False
29 return False
30 return self._data == other._data
30 return self._data == other._data
31
31
32 def precnode(self):
32 def precnode(self):
33 msg = ("'marker.precnode' is deprecated, "
33 msg = ("'marker.precnode' is deprecated, "
34 "use 'marker.prednode'")
34 "use 'marker.prednode'")
35 util.nouideprecwarn(msg, '4.4')
35 util.nouideprecwarn(msg, '4.4')
36 return self.prednode()
36 return self.prednode()
37
37
38 def prednode(self):
38 def prednode(self):
39 """Predecessor changeset node identifier"""
39 """Predecessor changeset node identifier"""
40 return self._data[0]
40 return self._data[0]
41
41
42 def succnodes(self):
42 def succnodes(self):
43 """List of successor changesets node identifiers"""
43 """List of successor changesets node identifiers"""
44 return self._data[1]
44 return self._data[1]
45
45
46 def parentnodes(self):
46 def parentnodes(self):
47 """Parents of the predecessors (None if not recorded)"""
47 """Parents of the predecessors (None if not recorded)"""
48 return self._data[5]
48 return self._data[5]
49
49
50 def metadata(self):
50 def metadata(self):
51 """Decoded metadata dictionary"""
51 """Decoded metadata dictionary"""
52 return dict(self._data[3])
52 return dict(self._data[3])
53
53
54 def date(self):
54 def date(self):
55 """Creation date as (unixtime, offset)"""
55 """Creation date as (unixtime, offset)"""
56 return self._data[4]
56 return self._data[4]
57
57
58 def flags(self):
58 def flags(self):
59 """The flags field of the marker"""
59 """The flags field of the marker"""
60 return self._data[2]
60 return self._data[2]
61
61
62 def getmarkers(repo, nodes=None, exclusive=False):
62 def getmarkers(repo, nodes=None, exclusive=False):
63 """returns markers known in a repository
63 """returns markers known in a repository
64
64
65 If <nodes> is specified, only markers "relevant" to those nodes are are
65 If <nodes> is specified, only markers "relevant" to those nodes are are
66 returned"""
66 returned"""
67 if nodes is None:
67 if nodes is None:
68 rawmarkers = repo.obsstore
68 rawmarkers = repo.obsstore
69 elif exclusive:
69 elif exclusive:
70 rawmarkers = exclusivemarkers(repo, nodes)
70 rawmarkers = exclusivemarkers(repo, nodes)
71 else:
71 else:
72 rawmarkers = repo.obsstore.relevantmarkers(nodes)
72 rawmarkers = repo.obsstore.relevantmarkers(nodes)
73
73
74 for markerdata in rawmarkers:
74 for markerdata in rawmarkers:
75 yield marker(repo, markerdata)
75 yield marker(repo, markerdata)
76
76
77 def closestpredecessors(repo, nodeid):
77 def closestpredecessors(repo, nodeid):
78 """yield the list of next predecessors pointing on visible changectx nodes
78 """yield the list of next predecessors pointing on visible changectx nodes
79
79
80 This function respect the repoview filtering, filtered revision will be
80 This function respect the repoview filtering, filtered revision will be
81 considered missing.
81 considered missing.
82 """
82 """
83
83
84 precursors = repo.obsstore.predecessors
84 precursors = repo.obsstore.predecessors
85 stack = [nodeid]
85 stack = [nodeid]
86 seen = set(stack)
86 seen = set(stack)
87
87
88 while stack:
88 while stack:
89 current = stack.pop()
89 current = stack.pop()
90 currentpreccs = precursors.get(current, ())
90 currentpreccs = precursors.get(current, ())
91
91
92 for prec in currentpreccs:
92 for prec in currentpreccs:
93 precnodeid = prec[0]
93 precnodeid = prec[0]
94
94
95 # Basic cycle protection
95 # Basic cycle protection
96 if precnodeid in seen:
96 if precnodeid in seen:
97 continue
97 continue
98 seen.add(precnodeid)
98 seen.add(precnodeid)
99
99
100 if precnodeid in repo:
100 if precnodeid in repo:
101 yield precnodeid
101 yield precnodeid
102 else:
102 else:
103 stack.append(precnodeid)
103 stack.append(precnodeid)
104
104
105 def allprecursors(*args, **kwargs):
105 def allprecursors(*args, **kwargs):
106 """ (DEPRECATED)
106 """ (DEPRECATED)
107 """
107 """
108 msg = ("'obsutil.allprecursors' is deprecated, "
108 msg = ("'obsutil.allprecursors' is deprecated, "
109 "use 'obsutil.allpredecessors'")
109 "use 'obsutil.allpredecessors'")
110 util.nouideprecwarn(msg, '4.4')
110 util.nouideprecwarn(msg, '4.4')
111
111
112 return allpredecessors(*args, **kwargs)
112 return allpredecessors(*args, **kwargs)
113
113
114 def allpredecessors(obsstore, nodes, ignoreflags=0):
114 def allpredecessors(obsstore, nodes, ignoreflags=0):
115 """Yield node for every precursors of <nodes>.
115 """Yield node for every precursors of <nodes>.
116
116
117 Some precursors may be unknown locally.
117 Some precursors may be unknown locally.
118
118
119 This is a linear yield unsuited to detecting folded changesets. It includes
119 This is a linear yield unsuited to detecting folded changesets. It includes
120 initial nodes too."""
120 initial nodes too."""
121
121
122 remaining = set(nodes)
122 remaining = set(nodes)
123 seen = set(remaining)
123 seen = set(remaining)
124 while remaining:
124 while remaining:
125 current = remaining.pop()
125 current = remaining.pop()
126 yield current
126 yield current
127 for mark in obsstore.predecessors.get(current, ()):
127 for mark in obsstore.predecessors.get(current, ()):
128 # ignore marker flagged with specified flag
128 # ignore marker flagged with specified flag
129 if mark[2] & ignoreflags:
129 if mark[2] & ignoreflags:
130 continue
130 continue
131 suc = mark[0]
131 suc = mark[0]
132 if suc not in seen:
132 if suc not in seen:
133 seen.add(suc)
133 seen.add(suc)
134 remaining.add(suc)
134 remaining.add(suc)
135
135
136 def allsuccessors(obsstore, nodes, ignoreflags=0):
136 def allsuccessors(obsstore, nodes, ignoreflags=0):
137 """Yield node for every successor of <nodes>.
137 """Yield node for every successor of <nodes>.
138
138
139 Some successors may be unknown locally.
139 Some successors may be unknown locally.
140
140
141 This is a linear yield unsuited to detecting split changesets. It includes
141 This is a linear yield unsuited to detecting split changesets. It includes
142 initial nodes too."""
142 initial nodes too."""
143 remaining = set(nodes)
143 remaining = set(nodes)
144 seen = set(remaining)
144 seen = set(remaining)
145 while remaining:
145 while remaining:
146 current = remaining.pop()
146 current = remaining.pop()
147 yield current
147 yield current
148 for mark in obsstore.successors.get(current, ()):
148 for mark in obsstore.successors.get(current, ()):
149 # ignore marker flagged with specified flag
149 # ignore marker flagged with specified flag
150 if mark[2] & ignoreflags:
150 if mark[2] & ignoreflags:
151 continue
151 continue
152 for suc in mark[1]:
152 for suc in mark[1]:
153 if suc not in seen:
153 if suc not in seen:
154 seen.add(suc)
154 seen.add(suc)
155 remaining.add(suc)
155 remaining.add(suc)
156
156
157 def _filterprunes(markers):
157 def _filterprunes(markers):
158 """return a set with no prune markers"""
158 """return a set with no prune markers"""
159 return set(m for m in markers if m[1])
159 return set(m for m in markers if m[1])
160
160
161 def exclusivemarkers(repo, nodes):
161 def exclusivemarkers(repo, nodes):
162 """set of markers relevant to "nodes" but no other locally-known nodes
162 """set of markers relevant to "nodes" but no other locally-known nodes
163
163
164 This function compute the set of markers "exclusive" to a locally-known
164 This function compute the set of markers "exclusive" to a locally-known
165 node. This means we walk the markers starting from <nodes> until we reach a
165 node. This means we walk the markers starting from <nodes> until we reach a
166 locally-known precursors outside of <nodes>. Element of <nodes> with
166 locally-known precursors outside of <nodes>. Element of <nodes> with
167 locally-known successors outside of <nodes> are ignored (since their
167 locally-known successors outside of <nodes> are ignored (since their
168 precursors markers are also relevant to these successors).
168 precursors markers are also relevant to these successors).
169
169
170 For example:
170 For example:
171
171
172 # (A0 rewritten as A1)
172 # (A0 rewritten as A1)
173 #
173 #
174 # A0 <-1- A1 # Marker "1" is exclusive to A1
174 # A0 <-1- A1 # Marker "1" is exclusive to A1
175
175
176 or
176 or
177
177
178 # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
178 # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
179 #
179 #
180 # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
180 # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
181
181
182 or
182 or
183
183
184 # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
184 # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
185 #
185 #
186 # <-2- A1 # Marker "2" is exclusive to A0,A1
186 # <-2- A1 # Marker "2" is exclusive to A0,A1
187 # /
187 # /
188 # <-1- A0
188 # <-1- A0
189 # \
189 # \
190 # <-3- A2 # Marker "3" is exclusive to A0,A2
190 # <-3- A2 # Marker "3" is exclusive to A0,A2
191 #
191 #
192 # in addition:
192 # in addition:
193 #
193 #
194 # Markers "2,3" are exclusive to A1,A2
194 # Markers "2,3" are exclusive to A1,A2
195 # Markers "1,2,3" are exclusive to A0,A1,A2
195 # Markers "1,2,3" are exclusive to A0,A1,A2
196
196
197 See test/test-obsolete-bundle-strip.t for more examples.
197 See test/test-obsolete-bundle-strip.t for more examples.
198
198
199 An example usage is strip. When stripping a changeset, we also want to
199 An example usage is strip. When stripping a changeset, we also want to
200 strip the markers exclusive to this changeset. Otherwise we would have
200 strip the markers exclusive to this changeset. Otherwise we would have
201 "dangling"" obsolescence markers from its precursors: Obsolescence markers
201 "dangling"" obsolescence markers from its precursors: Obsolescence markers
202 marking a node as obsolete without any successors available locally.
202 marking a node as obsolete without any successors available locally.
203
203
204 As for relevant markers, the prune markers for children will be followed.
204 As for relevant markers, the prune markers for children will be followed.
205 Of course, they will only be followed if the pruned children is
205 Of course, they will only be followed if the pruned children is
206 locally-known. Since the prune markers are relevant to the pruned node.
206 locally-known. Since the prune markers are relevant to the pruned node.
207 However, while prune markers are considered relevant to the parent of the
207 However, while prune markers are considered relevant to the parent of the
208 pruned changesets, prune markers for locally-known changeset (with no
208 pruned changesets, prune markers for locally-known changeset (with no
209 successors) are considered exclusive to the pruned nodes. This allows
209 successors) are considered exclusive to the pruned nodes. This allows
210 to strip the prune markers (with the rest of the exclusive chain) alongside
210 to strip the prune markers (with the rest of the exclusive chain) alongside
211 the pruned changesets.
211 the pruned changesets.
212 """
212 """
213 # running on a filtered repository would be dangerous as markers could be
213 # running on a filtered repository would be dangerous as markers could be
214 # reported as exclusive when they are relevant for other filtered nodes.
214 # reported as exclusive when they are relevant for other filtered nodes.
215 unfi = repo.unfiltered()
215 unfi = repo.unfiltered()
216
216
217 # shortcut to various useful item
217 # shortcut to various useful item
218 nm = unfi.changelog.nodemap
218 nm = unfi.changelog.nodemap
219 precursorsmarkers = unfi.obsstore.predecessors
219 precursorsmarkers = unfi.obsstore.predecessors
220 successormarkers = unfi.obsstore.successors
220 successormarkers = unfi.obsstore.successors
221 childrenmarkers = unfi.obsstore.children
221 childrenmarkers = unfi.obsstore.children
222
222
223 # exclusive markers (return of the function)
223 # exclusive markers (return of the function)
224 exclmarkers = set()
224 exclmarkers = set()
225 # we need fast membership testing
225 # we need fast membership testing
226 nodes = set(nodes)
226 nodes = set(nodes)
227 # looking for head in the obshistory
227 # looking for head in the obshistory
228 #
228 #
229 # XXX we are ignoring all issues in regard with cycle for now.
229 # XXX we are ignoring all issues in regard with cycle for now.
230 stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
230 stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
231 stack.sort()
231 stack.sort()
232 # nodes already stacked
232 # nodes already stacked
233 seennodes = set(stack)
233 seennodes = set(stack)
234 while stack:
234 while stack:
235 current = stack.pop()
235 current = stack.pop()
236 # fetch precursors markers
236 # fetch precursors markers
237 markers = list(precursorsmarkers.get(current, ()))
237 markers = list(precursorsmarkers.get(current, ()))
238 # extend the list with prune markers
238 # extend the list with prune markers
239 for mark in successormarkers.get(current, ()):
239 for mark in successormarkers.get(current, ()):
240 if not mark[1]:
240 if not mark[1]:
241 markers.append(mark)
241 markers.append(mark)
242 # and markers from children (looking for prune)
242 # and markers from children (looking for prune)
243 for mark in childrenmarkers.get(current, ()):
243 for mark in childrenmarkers.get(current, ()):
244 if not mark[1]:
244 if not mark[1]:
245 markers.append(mark)
245 markers.append(mark)
246 # traverse the markers
246 # traverse the markers
247 for mark in markers:
247 for mark in markers:
248 if mark in exclmarkers:
248 if mark in exclmarkers:
249 # markers already selected
249 # markers already selected
250 continue
250 continue
251
251
252 # If the markers is about the current node, select it
252 # If the markers is about the current node, select it
253 #
253 #
254 # (this delay the addition of markers from children)
254 # (this delay the addition of markers from children)
255 if mark[1] or mark[0] == current:
255 if mark[1] or mark[0] == current:
256 exclmarkers.add(mark)
256 exclmarkers.add(mark)
257
257
258 # should we keep traversing through the precursors?
258 # should we keep traversing through the precursors?
259 prec = mark[0]
259 prec = mark[0]
260
260
261 # nodes in the stack or already processed
261 # nodes in the stack or already processed
262 if prec in seennodes:
262 if prec in seennodes:
263 continue
263 continue
264
264
265 # is this a locally known node ?
265 # is this a locally known node ?
266 known = prec in nm
266 known = prec in nm
267 # if locally-known and not in the <nodes> set the traversal
267 # if locally-known and not in the <nodes> set the traversal
268 # stop here.
268 # stop here.
269 if known and prec not in nodes:
269 if known and prec not in nodes:
270 continue
270 continue
271
271
272 # do not keep going if there are unselected markers pointing to this
272 # do not keep going if there are unselected markers pointing to this
273 # nodes. If we end up traversing these unselected markers later the
273 # nodes. If we end up traversing these unselected markers later the
274 # node will be taken care of at that point.
274 # node will be taken care of at that point.
275 precmarkers = _filterprunes(successormarkers.get(prec))
275 precmarkers = _filterprunes(successormarkers.get(prec))
276 if precmarkers.issubset(exclmarkers):
276 if precmarkers.issubset(exclmarkers):
277 seennodes.add(prec)
277 seennodes.add(prec)
278 stack.append(prec)
278 stack.append(prec)
279
279
280 return exclmarkers
280 return exclmarkers
281
281
282 def foreground(repo, nodes):
282 def foreground(repo, nodes):
283 """return all nodes in the "foreground" of other node
283 """return all nodes in the "foreground" of other node
284
284
285 The foreground of a revision is anything reachable using parent -> children
285 The foreground of a revision is anything reachable using parent -> children
286 or precursor -> successor relation. It is very similar to "descendant" but
286 or precursor -> successor relation. It is very similar to "descendant" but
287 augmented with obsolescence information.
287 augmented with obsolescence information.
288
288
289 Beware that possible obsolescence cycle may result if complex situation.
289 Beware that possible obsolescence cycle may result if complex situation.
290 """
290 """
291 repo = repo.unfiltered()
291 repo = repo.unfiltered()
292 foreground = set(repo.set('%ln::', nodes))
292 foreground = set(repo.set('%ln::', nodes))
293 if repo.obsstore:
293 if repo.obsstore:
294 # We only need this complicated logic if there is obsolescence
294 # We only need this complicated logic if there is obsolescence
295 # XXX will probably deserve an optimised revset.
295 # XXX will probably deserve an optimised revset.
296 nm = repo.changelog.nodemap
296 nm = repo.changelog.nodemap
297 plen = -1
297 plen = -1
298 # compute the whole set of successors or descendants
298 # compute the whole set of successors or descendants
299 while len(foreground) != plen:
299 while len(foreground) != plen:
300 plen = len(foreground)
300 plen = len(foreground)
301 succs = set(c.node() for c in foreground)
301 succs = set(c.node() for c in foreground)
302 mutable = [c.node() for c in foreground if c.mutable()]
302 mutable = [c.node() for c in foreground if c.mutable()]
303 succs.update(allsuccessors(repo.obsstore, mutable))
303 succs.update(allsuccessors(repo.obsstore, mutable))
304 known = (n for n in succs if n in nm)
304 known = (n for n in succs if n in nm)
305 foreground = set(repo.set('%ln::', known))
305 foreground = set(repo.set('%ln::', known))
306 return set(c.node() for c in foreground)
306 return set(c.node() for c in foreground)
307
307
308 # logic around storing and using effect flags
308 # logic around storing and using effect flags
309 EFFECTFLAGFIELD = "ef1"
309 EFFECTFLAGFIELD = "ef1"
310
310
311 DESCCHANGED = 1 << 0 # action changed the description
311 DESCCHANGED = 1 << 0 # action changed the description
312 PARENTCHANGED = 1 << 2 # action change the parent
312 USERCHANGED = 1 << 4 # the user changed
313 USERCHANGED = 1 << 4 # the user changed
313 DATECHANGED = 1 << 5 # the date changed
314 DATECHANGED = 1 << 5 # the date changed
314 BRANCHCHANGED = 1 << 6 # the branch changed
315 BRANCHCHANGED = 1 << 6 # the branch changed
315
316
316 def geteffectflag(relation):
317 def geteffectflag(relation):
317 """ From an obs-marker relation, compute what changed between the
318 """ From an obs-marker relation, compute what changed between the
318 predecessor and the successor.
319 predecessor and the successor.
319 """
320 """
320 effects = 0
321 effects = 0
321
322
322 source = relation[0]
323 source = relation[0]
323
324
324 for changectx in relation[1]:
325 for changectx in relation[1]:
325 # Check if description has changed
326 # Check if description has changed
326 if changectx.description() != source.description():
327 if changectx.description() != source.description():
327 effects |= DESCCHANGED
328 effects |= DESCCHANGED
328
329
329 # Check if user has changed
330 # Check if user has changed
330 if changectx.user() != source.user():
331 if changectx.user() != source.user():
331 effects |= USERCHANGED
332 effects |= USERCHANGED
332
333
333 # Check if date has changed
334 # Check if date has changed
334 if changectx.date() != source.date():
335 if changectx.date() != source.date():
335 effects |= DATECHANGED
336 effects |= DATECHANGED
336
337
337 # Check if branch has changed
338 # Check if branch has changed
338 if changectx.branch() != source.branch():
339 if changectx.branch() != source.branch():
339 effects |= BRANCHCHANGED
340 effects |= BRANCHCHANGED
340
341
342 # Check if at least one of the parent has changed
343 if changectx.parents() != source.parents():
344 effects |= PARENTCHANGED
345
341 return effects
346 return effects
342
347
343 def getobsoleted(repo, tr):
348 def getobsoleted(repo, tr):
344 """return the set of pre-existing revisions obsoleted by a transaction"""
349 """return the set of pre-existing revisions obsoleted by a transaction"""
345 torev = repo.unfiltered().changelog.nodemap.get
350 torev = repo.unfiltered().changelog.nodemap.get
346 phase = repo._phasecache.phase
351 phase = repo._phasecache.phase
347 succsmarkers = repo.obsstore.successors.get
352 succsmarkers = repo.obsstore.successors.get
348 public = phases.public
353 public = phases.public
349 addedmarkers = tr.changes.get('obsmarkers')
354 addedmarkers = tr.changes.get('obsmarkers')
350 addedrevs = tr.changes.get('revs')
355 addedrevs = tr.changes.get('revs')
351 seenrevs = set(addedrevs)
356 seenrevs = set(addedrevs)
352 obsoleted = set()
357 obsoleted = set()
353 for mark in addedmarkers:
358 for mark in addedmarkers:
354 node = mark[0]
359 node = mark[0]
355 rev = torev(node)
360 rev = torev(node)
356 if rev is None or rev in seenrevs:
361 if rev is None or rev in seenrevs:
357 continue
362 continue
358 seenrevs.add(rev)
363 seenrevs.add(rev)
359 if phase(repo, rev) == public:
364 if phase(repo, rev) == public:
360 continue
365 continue
361 if set(succsmarkers(node) or []).issubset(addedmarkers):
366 if set(succsmarkers(node) or []).issubset(addedmarkers):
362 obsoleted.add(rev)
367 obsoleted.add(rev)
363 return obsoleted
368 return obsoleted
364
369
365 class _succs(list):
370 class _succs(list):
366 """small class to represent a successors with some metadata about it"""
371 """small class to represent a successors with some metadata about it"""
367
372
368 def __init__(self, *args, **kwargs):
373 def __init__(self, *args, **kwargs):
369 super(_succs, self).__init__(*args, **kwargs)
374 super(_succs, self).__init__(*args, **kwargs)
370 self.markers = set()
375 self.markers = set()
371
376
372 def copy(self):
377 def copy(self):
373 new = _succs(self)
378 new = _succs(self)
374 new.markers = self.markers.copy()
379 new.markers = self.markers.copy()
375 return new
380 return new
376
381
377 @util.propertycache
382 @util.propertycache
378 def _set(self):
383 def _set(self):
379 # immutable
384 # immutable
380 return set(self)
385 return set(self)
381
386
382 def canmerge(self, other):
387 def canmerge(self, other):
383 return self._set.issubset(other._set)
388 return self._set.issubset(other._set)
384
389
385 def successorssets(repo, initialnode, closest=False, cache=None):
390 def successorssets(repo, initialnode, closest=False, cache=None):
386 """Return set of all latest successors of initial nodes
391 """Return set of all latest successors of initial nodes
387
392
388 The successors set of a changeset A are the group of revisions that succeed
393 The successors set of a changeset A are the group of revisions that succeed
389 A. It succeeds A as a consistent whole, each revision being only a partial
394 A. It succeeds A as a consistent whole, each revision being only a partial
390 replacement. By default, the successors set contains non-obsolete
395 replacement. By default, the successors set contains non-obsolete
391 changesets only, walking the obsolescence graph until reaching a leaf. If
396 changesets only, walking the obsolescence graph until reaching a leaf. If
392 'closest' is set to True, closest successors-sets are return (the
397 'closest' is set to True, closest successors-sets are return (the
393 obsolescence walk stops on known changesets).
398 obsolescence walk stops on known changesets).
394
399
395 This function returns the full list of successor sets which is why it
400 This function returns the full list of successor sets which is why it
396 returns a list of tuples and not just a single tuple. Each tuple is a valid
401 returns a list of tuples and not just a single tuple. Each tuple is a valid
397 successors set. Note that (A,) may be a valid successors set for changeset A
402 successors set. Note that (A,) may be a valid successors set for changeset A
398 (see below).
403 (see below).
399
404
400 In most cases, a changeset A will have a single element (e.g. the changeset
405 In most cases, a changeset A will have a single element (e.g. the changeset
401 A is replaced by A') in its successors set. Though, it is also common for a
406 A is replaced by A') in its successors set. Though, it is also common for a
402 changeset A to have no elements in its successor set (e.g. the changeset
407 changeset A to have no elements in its successor set (e.g. the changeset
403 has been pruned). Therefore, the returned list of successors sets will be
408 has been pruned). Therefore, the returned list of successors sets will be
404 [(A',)] or [], respectively.
409 [(A',)] or [], respectively.
405
410
406 When a changeset A is split into A' and B', however, it will result in a
411 When a changeset A is split into A' and B', however, it will result in a
407 successors set containing more than a single element, i.e. [(A',B')].
412 successors set containing more than a single element, i.e. [(A',B')].
408 Divergent changesets will result in multiple successors sets, i.e. [(A',),
413 Divergent changesets will result in multiple successors sets, i.e. [(A',),
409 (A'')].
414 (A'')].
410
415
411 If a changeset A is not obsolete, then it will conceptually have no
416 If a changeset A is not obsolete, then it will conceptually have no
412 successors set. To distinguish this from a pruned changeset, the successor
417 successors set. To distinguish this from a pruned changeset, the successor
413 set will contain itself only, i.e. [(A,)].
418 set will contain itself only, i.e. [(A,)].
414
419
415 Finally, final successors unknown locally are considered to be pruned
420 Finally, final successors unknown locally are considered to be pruned
416 (pruned: obsoleted without any successors). (Final: successors not affected
421 (pruned: obsoleted without any successors). (Final: successors not affected
417 by markers).
422 by markers).
418
423
419 The 'closest' mode respect the repoview filtering. For example, without
424 The 'closest' mode respect the repoview filtering. For example, without
420 filter it will stop at the first locally known changeset, with 'visible'
425 filter it will stop at the first locally known changeset, with 'visible'
421 filter it will stop on visible changesets).
426 filter it will stop on visible changesets).
422
427
423 The optional `cache` parameter is a dictionary that may contains
428 The optional `cache` parameter is a dictionary that may contains
424 precomputed successors sets. It is meant to reuse the computation of a
429 precomputed successors sets. It is meant to reuse the computation of a
425 previous call to `successorssets` when multiple calls are made at the same
430 previous call to `successorssets` when multiple calls are made at the same
426 time. The cache dictionary is updated in place. The caller is responsible
431 time. The cache dictionary is updated in place. The caller is responsible
427 for its life span. Code that makes multiple calls to `successorssets`
432 for its life span. Code that makes multiple calls to `successorssets`
428 *should* use this cache mechanism or risk a performance hit.
433 *should* use this cache mechanism or risk a performance hit.
429
434
430 Since results are different depending of the 'closest' most, the same cache
435 Since results are different depending of the 'closest' most, the same cache
431 cannot be reused for both mode.
436 cannot be reused for both mode.
432 """
437 """
433
438
434 succmarkers = repo.obsstore.successors
439 succmarkers = repo.obsstore.successors
435
440
436 # Stack of nodes we search successors sets for
441 # Stack of nodes we search successors sets for
437 toproceed = [initialnode]
442 toproceed = [initialnode]
438 # set version of above list for fast loop detection
443 # set version of above list for fast loop detection
439 # element added to "toproceed" must be added here
444 # element added to "toproceed" must be added here
440 stackedset = set(toproceed)
445 stackedset = set(toproceed)
441 if cache is None:
446 if cache is None:
442 cache = {}
447 cache = {}
443
448
444 # This while loop is the flattened version of a recursive search for
449 # This while loop is the flattened version of a recursive search for
445 # successors sets
450 # successors sets
446 #
451 #
447 # def successorssets(x):
452 # def successorssets(x):
448 # successors = directsuccessors(x)
453 # successors = directsuccessors(x)
449 # ss = [[]]
454 # ss = [[]]
450 # for succ in directsuccessors(x):
455 # for succ in directsuccessors(x):
451 # # product as in itertools cartesian product
456 # # product as in itertools cartesian product
452 # ss = product(ss, successorssets(succ))
457 # ss = product(ss, successorssets(succ))
453 # return ss
458 # return ss
454 #
459 #
455 # But we can not use plain recursive calls here:
460 # But we can not use plain recursive calls here:
456 # - that would blow the python call stack
461 # - that would blow the python call stack
457 # - obsolescence markers may have cycles, we need to handle them.
462 # - obsolescence markers may have cycles, we need to handle them.
458 #
463 #
459 # The `toproceed` list act as our call stack. Every node we search
464 # The `toproceed` list act as our call stack. Every node we search
460 # successors set for are stacked there.
465 # successors set for are stacked there.
461 #
466 #
462 # The `stackedset` is set version of this stack used to check if a node is
467 # The `stackedset` is set version of this stack used to check if a node is
463 # already stacked. This check is used to detect cycles and prevent infinite
468 # already stacked. This check is used to detect cycles and prevent infinite
464 # loop.
469 # loop.
465 #
470 #
466 # successors set of all nodes are stored in the `cache` dictionary.
471 # successors set of all nodes are stored in the `cache` dictionary.
467 #
472 #
468 # After this while loop ends we use the cache to return the successors sets
473 # After this while loop ends we use the cache to return the successors sets
469 # for the node requested by the caller.
474 # for the node requested by the caller.
470 while toproceed:
475 while toproceed:
471 # Every iteration tries to compute the successors sets of the topmost
476 # Every iteration tries to compute the successors sets of the topmost
472 # node of the stack: CURRENT.
477 # node of the stack: CURRENT.
473 #
478 #
474 # There are four possible outcomes:
479 # There are four possible outcomes:
475 #
480 #
476 # 1) We already know the successors sets of CURRENT:
481 # 1) We already know the successors sets of CURRENT:
477 # -> mission accomplished, pop it from the stack.
482 # -> mission accomplished, pop it from the stack.
478 # 2) Stop the walk:
483 # 2) Stop the walk:
479 # default case: Node is not obsolete
484 # default case: Node is not obsolete
480 # closest case: Node is known at this repo filter level
485 # closest case: Node is known at this repo filter level
481 # -> the node is its own successors sets. Add it to the cache.
486 # -> the node is its own successors sets. Add it to the cache.
482 # 3) We do not know successors set of direct successors of CURRENT:
487 # 3) We do not know successors set of direct successors of CURRENT:
483 # -> We add those successors to the stack.
488 # -> We add those successors to the stack.
484 # 4) We know successors sets of all direct successors of CURRENT:
489 # 4) We know successors sets of all direct successors of CURRENT:
485 # -> We can compute CURRENT successors set and add it to the
490 # -> We can compute CURRENT successors set and add it to the
486 # cache.
491 # cache.
487 #
492 #
488 current = toproceed[-1]
493 current = toproceed[-1]
489
494
490 # case 2 condition is a bit hairy because of closest,
495 # case 2 condition is a bit hairy because of closest,
491 # we compute it on its own
496 # we compute it on its own
492 case2condition = ((current not in succmarkers)
497 case2condition = ((current not in succmarkers)
493 or (closest and current != initialnode
498 or (closest and current != initialnode
494 and current in repo))
499 and current in repo))
495
500
496 if current in cache:
501 if current in cache:
497 # case (1): We already know the successors sets
502 # case (1): We already know the successors sets
498 stackedset.remove(toproceed.pop())
503 stackedset.remove(toproceed.pop())
499 elif case2condition:
504 elif case2condition:
500 # case (2): end of walk.
505 # case (2): end of walk.
501 if current in repo:
506 if current in repo:
502 # We have a valid successors.
507 # We have a valid successors.
503 cache[current] = [_succs((current,))]
508 cache[current] = [_succs((current,))]
504 else:
509 else:
505 # Final obsolete version is unknown locally.
510 # Final obsolete version is unknown locally.
506 # Do not count that as a valid successors
511 # Do not count that as a valid successors
507 cache[current] = []
512 cache[current] = []
508 else:
513 else:
509 # cases (3) and (4)
514 # cases (3) and (4)
510 #
515 #
511 # We proceed in two phases. Phase 1 aims to distinguish case (3)
516 # We proceed in two phases. Phase 1 aims to distinguish case (3)
512 # from case (4):
517 # from case (4):
513 #
518 #
514 # For each direct successors of CURRENT, we check whether its
519 # For each direct successors of CURRENT, we check whether its
515 # successors sets are known. If they are not, we stack the
520 # successors sets are known. If they are not, we stack the
516 # unknown node and proceed to the next iteration of the while
521 # unknown node and proceed to the next iteration of the while
517 # loop. (case 3)
522 # loop. (case 3)
518 #
523 #
519 # During this step, we may detect obsolescence cycles: a node
524 # During this step, we may detect obsolescence cycles: a node
520 # with unknown successors sets but already in the call stack.
525 # with unknown successors sets but already in the call stack.
521 # In such a situation, we arbitrary set the successors sets of
526 # In such a situation, we arbitrary set the successors sets of
522 # the node to nothing (node pruned) to break the cycle.
527 # the node to nothing (node pruned) to break the cycle.
523 #
528 #
524 # If no break was encountered we proceed to phase 2.
529 # If no break was encountered we proceed to phase 2.
525 #
530 #
526 # Phase 2 computes successors sets of CURRENT (case 4); see details
531 # Phase 2 computes successors sets of CURRENT (case 4); see details
527 # in phase 2 itself.
532 # in phase 2 itself.
528 #
533 #
529 # Note the two levels of iteration in each phase.
534 # Note the two levels of iteration in each phase.
530 # - The first one handles obsolescence markers using CURRENT as
535 # - The first one handles obsolescence markers using CURRENT as
531 # precursor (successors markers of CURRENT).
536 # precursor (successors markers of CURRENT).
532 #
537 #
533 # Having multiple entry here means divergence.
538 # Having multiple entry here means divergence.
534 #
539 #
535 # - The second one handles successors defined in each marker.
540 # - The second one handles successors defined in each marker.
536 #
541 #
537 # Having none means pruned node, multiple successors means split,
542 # Having none means pruned node, multiple successors means split,
538 # single successors are standard replacement.
543 # single successors are standard replacement.
539 #
544 #
540 for mark in sorted(succmarkers[current]):
545 for mark in sorted(succmarkers[current]):
541 for suc in mark[1]:
546 for suc in mark[1]:
542 if suc not in cache:
547 if suc not in cache:
543 if suc in stackedset:
548 if suc in stackedset:
544 # cycle breaking
549 # cycle breaking
545 cache[suc] = []
550 cache[suc] = []
546 else:
551 else:
547 # case (3) If we have not computed successors sets
552 # case (3) If we have not computed successors sets
548 # of one of those successors we add it to the
553 # of one of those successors we add it to the
549 # `toproceed` stack and stop all work for this
554 # `toproceed` stack and stop all work for this
550 # iteration.
555 # iteration.
551 toproceed.append(suc)
556 toproceed.append(suc)
552 stackedset.add(suc)
557 stackedset.add(suc)
553 break
558 break
554 else:
559 else:
555 continue
560 continue
556 break
561 break
557 else:
562 else:
558 # case (4): we know all successors sets of all direct
563 # case (4): we know all successors sets of all direct
559 # successors
564 # successors
560 #
565 #
561 # Successors set contributed by each marker depends on the
566 # Successors set contributed by each marker depends on the
562 # successors sets of all its "successors" node.
567 # successors sets of all its "successors" node.
563 #
568 #
564 # Each different marker is a divergence in the obsolescence
569 # Each different marker is a divergence in the obsolescence
565 # history. It contributes successors sets distinct from other
570 # history. It contributes successors sets distinct from other
566 # markers.
571 # markers.
567 #
572 #
568 # Within a marker, a successor may have divergent successors
573 # Within a marker, a successor may have divergent successors
569 # sets. In such a case, the marker will contribute multiple
574 # sets. In such a case, the marker will contribute multiple
570 # divergent successors sets. If multiple successors have
575 # divergent successors sets. If multiple successors have
571 # divergent successors sets, a Cartesian product is used.
576 # divergent successors sets, a Cartesian product is used.
572 #
577 #
573 # At the end we post-process successors sets to remove
578 # At the end we post-process successors sets to remove
574 # duplicated entry and successors set that are strict subset of
579 # duplicated entry and successors set that are strict subset of
575 # another one.
580 # another one.
576 succssets = []
581 succssets = []
577 for mark in sorted(succmarkers[current]):
582 for mark in sorted(succmarkers[current]):
578 # successors sets contributed by this marker
583 # successors sets contributed by this marker
579 base = _succs()
584 base = _succs()
580 base.markers.add(mark)
585 base.markers.add(mark)
581 markss = [base]
586 markss = [base]
582 for suc in mark[1]:
587 for suc in mark[1]:
583 # cardinal product with previous successors
588 # cardinal product with previous successors
584 productresult = []
589 productresult = []
585 for prefix in markss:
590 for prefix in markss:
586 for suffix in cache[suc]:
591 for suffix in cache[suc]:
587 newss = prefix.copy()
592 newss = prefix.copy()
588 newss.markers.update(suffix.markers)
593 newss.markers.update(suffix.markers)
589 for part in suffix:
594 for part in suffix:
590 # do not duplicated entry in successors set
595 # do not duplicated entry in successors set
591 # first entry wins.
596 # first entry wins.
592 if part not in newss:
597 if part not in newss:
593 newss.append(part)
598 newss.append(part)
594 productresult.append(newss)
599 productresult.append(newss)
595 markss = productresult
600 markss = productresult
596 succssets.extend(markss)
601 succssets.extend(markss)
597 # remove duplicated and subset
602 # remove duplicated and subset
598 seen = []
603 seen = []
599 final = []
604 final = []
600 candidates = sorted((s for s in succssets if s),
605 candidates = sorted((s for s in succssets if s),
601 key=len, reverse=True)
606 key=len, reverse=True)
602 for cand in candidates:
607 for cand in candidates:
603 for seensuccs in seen:
608 for seensuccs in seen:
604 if cand.canmerge(seensuccs):
609 if cand.canmerge(seensuccs):
605 seensuccs.markers.update(cand.markers)
610 seensuccs.markers.update(cand.markers)
606 break
611 break
607 else:
612 else:
608 final.append(cand)
613 final.append(cand)
609 seen.append(cand)
614 seen.append(cand)
610 final.reverse() # put small successors set first
615 final.reverse() # put small successors set first
611 cache[current] = final
616 cache[current] = final
612 return cache[initialnode]
617 return cache[initialnode]
613
618
614 def successorsandmarkers(repo, ctx):
619 def successorsandmarkers(repo, ctx):
615 """compute the raw data needed for computing obsfate
620 """compute the raw data needed for computing obsfate
616 Returns a list of dict, one dict per successors set
621 Returns a list of dict, one dict per successors set
617 """
622 """
618 if not ctx.obsolete():
623 if not ctx.obsolete():
619 return None
624 return None
620
625
621 ssets = successorssets(repo, ctx.node(), closest=True)
626 ssets = successorssets(repo, ctx.node(), closest=True)
622
627
623 # closestsuccessors returns an empty list for pruned revisions, remap it
628 # closestsuccessors returns an empty list for pruned revisions, remap it
624 # into a list containing an empty list for future processing
629 # into a list containing an empty list for future processing
625 if ssets == []:
630 if ssets == []:
626 ssets = [[]]
631 ssets = [[]]
627
632
628 # Try to recover pruned markers
633 # Try to recover pruned markers
629 succsmap = repo.obsstore.successors
634 succsmap = repo.obsstore.successors
630 fullsuccessorsets = [] # successor set + markers
635 fullsuccessorsets = [] # successor set + markers
631 for sset in ssets:
636 for sset in ssets:
632 if sset:
637 if sset:
633 fullsuccessorsets.append(sset)
638 fullsuccessorsets.append(sset)
634 else:
639 else:
635 # successorsset return an empty set() when ctx or one of its
640 # successorsset return an empty set() when ctx or one of its
636 # successors is pruned.
641 # successors is pruned.
637 # In this case, walk the obs-markers tree again starting with ctx
642 # In this case, walk the obs-markers tree again starting with ctx
638 # and find the relevant pruning obs-makers, the ones without
643 # and find the relevant pruning obs-makers, the ones without
639 # successors.
644 # successors.
640 # Having these markers allow us to compute some information about
645 # Having these markers allow us to compute some information about
641 # its fate, like who pruned this changeset and when.
646 # its fate, like who pruned this changeset and when.
642
647
643 # XXX we do not catch all prune markers (eg rewritten then pruned)
648 # XXX we do not catch all prune markers (eg rewritten then pruned)
644 # (fix me later)
649 # (fix me later)
645 foundany = False
650 foundany = False
646 for mark in succsmap.get(ctx.node(), ()):
651 for mark in succsmap.get(ctx.node(), ()):
647 if not mark[1]:
652 if not mark[1]:
648 foundany = True
653 foundany = True
649 sset = _succs()
654 sset = _succs()
650 sset.markers.add(mark)
655 sset.markers.add(mark)
651 fullsuccessorsets.append(sset)
656 fullsuccessorsets.append(sset)
652 if not foundany:
657 if not foundany:
653 fullsuccessorsets.append(_succs())
658 fullsuccessorsets.append(_succs())
654
659
655 values = []
660 values = []
656 for sset in fullsuccessorsets:
661 for sset in fullsuccessorsets:
657 values.append({'successors': sset, 'markers': sset.markers})
662 values.append({'successors': sset, 'markers': sset.markers})
658
663
659 return values
664 return values
660
665
661 def successorsetverb(successorset):
666 def successorsetverb(successorset):
662 """ Return the verb summarizing the successorset
667 """ Return the verb summarizing the successorset
663 """
668 """
664 if not successorset:
669 if not successorset:
665 verb = 'pruned'
670 verb = 'pruned'
666 elif len(successorset) == 1:
671 elif len(successorset) == 1:
667 verb = 'rewritten'
672 verb = 'rewritten'
668 else:
673 else:
669 verb = 'split'
674 verb = 'split'
670 return verb
675 return verb
671
676
672 def markersdates(markers):
677 def markersdates(markers):
673 """returns the list of dates for a list of markers
678 """returns the list of dates for a list of markers
674 """
679 """
675 return [m[4] for m in markers]
680 return [m[4] for m in markers]
676
681
677 def markersusers(markers):
682 def markersusers(markers):
678 """ Returns a sorted list of markers users without duplicates
683 """ Returns a sorted list of markers users without duplicates
679 """
684 """
680 markersmeta = [dict(m[3]) for m in markers]
685 markersmeta = [dict(m[3]) for m in markers]
681 users = set(meta.get('user') for meta in markersmeta if meta.get('user'))
686 users = set(meta.get('user') for meta in markersmeta if meta.get('user'))
682
687
683 return sorted(users)
688 return sorted(users)
684
689
685 def markersoperations(markers):
690 def markersoperations(markers):
686 """ Returns a sorted list of markers operations without duplicates
691 """ Returns a sorted list of markers operations without duplicates
687 """
692 """
688 markersmeta = [dict(m[3]) for m in markers]
693 markersmeta = [dict(m[3]) for m in markers]
689 operations = set(meta.get('operation') for meta in markersmeta
694 operations = set(meta.get('operation') for meta in markersmeta
690 if meta.get('operation'))
695 if meta.get('operation'))
691
696
692 return sorted(operations)
697 return sorted(operations)
@@ -1,167 +1,167 b''
1 Test the 'effect-flags' feature
1 Test the 'effect-flags' feature
2
2
3 Global setup
3 Global setup
4 ============
4 ============
5
5
6 $ . $TESTDIR/testlib/obsmarker-common.sh
6 $ . $TESTDIR/testlib/obsmarker-common.sh
7 $ cat >> $HGRCPATH <<EOF
7 $ cat >> $HGRCPATH <<EOF
8 > [ui]
8 > [ui]
9 > interactive = true
9 > interactive = true
10 > [phases]
10 > [phases]
11 > publish=False
11 > publish=False
12 > [extensions]
12 > [extensions]
13 > rebase =
13 > rebase =
14 > [experimental]
14 > [experimental]
15 > evolution = all
15 > evolution = all
16 > effect-flags = 1
16 > effect-flags = 1
17 > EOF
17 > EOF
18
18
19 $ hg init $TESTTMP/effect-flags
19 $ hg init $TESTTMP/effect-flags
20 $ cd $TESTTMP/effect-flags
20 $ cd $TESTTMP/effect-flags
21 $ mkcommit ROOT
21 $ mkcommit ROOT
22
22
23 amend touching the description only
23 amend touching the description only
24 -----------------------------------
24 -----------------------------------
25
25
26 $ mkcommit A0
26 $ mkcommit A0
27 $ hg commit --amend -m "A1"
27 $ hg commit --amend -m "A1"
28
28
29 check result
29 check result
30
30
31 $ hg debugobsolete --rev .
31 $ hg debugobsolete --rev .
32 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
32 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
33
33
34 amend touching the user only
34 amend touching the user only
35 ----------------------------
35 ----------------------------
36
36
37 $ mkcommit B0
37 $ mkcommit B0
38 $ hg commit --amend -u "bob <bob@bob.com>"
38 $ hg commit --amend -u "bob <bob@bob.com>"
39
39
40 check result
40 check result
41
41
42 $ hg debugobsolete --rev .
42 $ hg debugobsolete --rev .
43 ef4a313b1e0ade55718395d80e6b88c5ccd875eb 5485c92d34330dac9d7a63dc07e1e3373835b964 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '16', 'operation': 'amend', 'user': 'test'}
43 ef4a313b1e0ade55718395d80e6b88c5ccd875eb 5485c92d34330dac9d7a63dc07e1e3373835b964 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '16', 'operation': 'amend', 'user': 'test'}
44
44
45 amend touching the date only
45 amend touching the date only
46 ----------------------------
46 ----------------------------
47
47
48 $ mkcommit B1
48 $ mkcommit B1
49 $ hg commit --amend -d "42 0"
49 $ hg commit --amend -d "42 0"
50
50
51 check result
51 check result
52
52
53 $ hg debugobsolete --rev .
53 $ hg debugobsolete --rev .
54 2ef0680ff45038ac28c9f1ff3644341f54487280 4dd84345082e9e5291c2e6b3f335bbf8bf389378 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '32', 'operation': 'amend', 'user': 'test'}
54 2ef0680ff45038ac28c9f1ff3644341f54487280 4dd84345082e9e5291c2e6b3f335bbf8bf389378 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '32', 'operation': 'amend', 'user': 'test'}
55
55
56 amend touching the branch only
56 amend touching the branch only
57 ----------------------------
57 ----------------------------
58
58
59 $ mkcommit B2
59 $ mkcommit B2
60 $ hg branch my-branch
60 $ hg branch my-branch
61 marked working directory as branch my-branch
61 marked working directory as branch my-branch
62 (branches are permanent and global, did you want a bookmark?)
62 (branches are permanent and global, did you want a bookmark?)
63 $ hg commit --amend
63 $ hg commit --amend
64
64
65 check result
65 check result
66
66
67 $ hg debugobsolete --rev .
67 $ hg debugobsolete --rev .
68 bd3db8264ceebf1966319f5df3be7aac6acd1a8e 14a01456e0574f0e0a0b15b2345486a6364a8d79 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '64', 'operation': 'amend', 'user': 'test'}
68 bd3db8264ceebf1966319f5df3be7aac6acd1a8e 14a01456e0574f0e0a0b15b2345486a6364a8d79 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '64', 'operation': 'amend', 'user': 'test'}
69
69
70 $ hg up default
70 $ hg up default
71 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
71 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
72
72
73 rebase (parents change)
73 rebase (parents change)
74 -----------------------
74 -----------------------
75
75
76 $ mkcommit C0
76 $ mkcommit C0
77 $ mkcommit D0
77 $ mkcommit D0
78 $ hg rebase -r . -d 'desc(B0)'
78 $ hg rebase -r . -d 'desc(B0)'
79 rebasing 10:c85eff83a034 "D0" (tip)
79 rebasing 10:c85eff83a034 "D0" (tip)
80
80
81 check result
81 check result
82
82
83 $ hg debugobsolete --rev .
83 $ hg debugobsolete --rev .
84 c85eff83a0340efd9da52b806a94c350222f3371 da86aa2f19a30d6686b15cae15c7b6c908ec9699 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'}
84 c85eff83a0340efd9da52b806a94c350222f3371 da86aa2f19a30d6686b15cae15c7b6c908ec9699 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
85
85
86 amend touching the diff
86 amend touching the diff
87 -----------------------
87 -----------------------
88
88
89 $ mkcommit E0
89 $ mkcommit E0
90 $ echo 42 >> E0
90 $ echo 42 >> E0
91 $ hg commit --amend
91 $ hg commit --amend
92
92
93 check result
93 check result
94
94
95 $ hg debugobsolete --rev .
95 $ hg debugobsolete --rev .
96 ebfe0333e0d96f68a917afd97c0a0af87f1c3b5f 75781fdbdbf58a987516b00c980bccda1e9ae588 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'amend', 'user': 'test'}
96 ebfe0333e0d96f68a917afd97c0a0af87f1c3b5f 75781fdbdbf58a987516b00c980bccda1e9ae588 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'amend', 'user': 'test'}
97
97
98 amend with multiple effect (desc and meta)
98 amend with multiple effect (desc and meta)
99 -------------------------------------------
99 -------------------------------------------
100
100
101 $ mkcommit F0
101 $ mkcommit F0
102 $ hg branch my-other-branch
102 $ hg branch my-other-branch
103 marked working directory as branch my-other-branch
103 marked working directory as branch my-other-branch
104 $ hg commit --amend -m F1 -u "bob <bob@bob.com>" -d "42 0"
104 $ hg commit --amend -m F1 -u "bob <bob@bob.com>" -d "42 0"
105
105
106 check result
106 check result
107
107
108 $ hg debugobsolete --rev .
108 $ hg debugobsolete --rev .
109 fad47e5bd78e6aa4db1b5a0a1751bc12563655ff a94e0fd5f1c81d969381a76eb0d37ce499a44fae 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '113', 'operation': 'amend', 'user': 'test'}
109 fad47e5bd78e6aa4db1b5a0a1751bc12563655ff a94e0fd5f1c81d969381a76eb0d37ce499a44fae 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '113', 'operation': 'amend', 'user': 'test'}
110
110
111 rebase not touching the diff
111 rebase not touching the diff
112 ----------------------------
112 ----------------------------
113
113
114 $ cat << EOF > H0
114 $ cat << EOF > H0
115 > 0
115 > 0
116 > 1
116 > 1
117 > 2
117 > 2
118 > 3
118 > 3
119 > 4
119 > 4
120 > 5
120 > 5
121 > 6
121 > 6
122 > 7
122 > 7
123 > 8
123 > 8
124 > 9
124 > 9
125 > 10
125 > 10
126 > EOF
126 > EOF
127 $ hg add H0
127 $ hg add H0
128 $ hg commit -m 'H0'
128 $ hg commit -m 'H0'
129 $ echo "H1" >> H0
129 $ echo "H1" >> H0
130 $ hg commit -m "H1"
130 $ hg commit -m "H1"
131 $ hg up -r "desc(H0)"
131 $ hg up -r "desc(H0)"
132 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
132 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
133 $ cat << EOF > H0
133 $ cat << EOF > H0
134 > H2
134 > H2
135 > 0
135 > 0
136 > 1
136 > 1
137 > 2
137 > 2
138 > 3
138 > 3
139 > 4
139 > 4
140 > 5
140 > 5
141 > 6
141 > 6
142 > 7
142 > 7
143 > 8
143 > 8
144 > 9
144 > 9
145 > 10
145 > 10
146 > EOF
146 > EOF
147 $ hg commit -m "H2"
147 $ hg commit -m "H2"
148 created new head
148 created new head
149 $ hg rebase -s "desc(H1)" -d "desc(H2)" -t :merge3
149 $ hg rebase -s "desc(H1)" -d "desc(H2)" -t :merge3
150 rebasing 17:b57fed8d8322 "H1"
150 rebasing 17:b57fed8d8322 "H1"
151 merging H0
151 merging H0
152 $ hg debugobsolete -r tip
152 $ hg debugobsolete -r tip
153 b57fed8d83228a8ae3748d8c3760a77638dd4f8c e509e2eb3df5d131ff7c02350bf2a9edd0c09478 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'}
153 b57fed8d83228a8ae3748d8c3760a77638dd4f8c e509e2eb3df5d131ff7c02350bf2a9edd0c09478 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '4', 'operation': 'rebase', 'user': 'test'}
154
154
155 amend closing the branch should be detected as meta change
155 amend closing the branch should be detected as meta change
156 ----------------------------------------------------------
156 ----------------------------------------------------------
157
157
158 $ hg branch closedbranch
158 $ hg branch closedbranch
159 marked working directory as branch closedbranch
159 marked working directory as branch closedbranch
160 $ mkcommit G0
160 $ mkcommit G0
161 $ mkcommit I0
161 $ mkcommit I0
162 $ hg commit --amend --close-branch
162 $ hg commit --amend --close-branch
163
163
164 check result
164 check result
165
165
166 $ hg debugobsolete -r .
166 $ hg debugobsolete -r .
167 2f599e54c1c6974299065cdf54e1ad640bfb7b5d 12c6238b5e371eea00fd2013b12edce3f070928b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'amend', 'user': 'test'}
167 2f599e54c1c6974299065cdf54e1ad640bfb7b5d 12c6238b5e371eea00fd2013b12edce3f070928b 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'amend', 'user': 'test'}
General Comments 0
You need to be logged in to leave comments. Login now