##// END OF EJS Templates
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
Pierre-Yves David -
r32009:c6cb21dd default
parent child Browse files
Show More
@@ -0,0 +1,72 b''
1 ====================================
2 Testing head checking code: Case B-1
3 ====================================
4
5 Mercurial checks for the introduction of new heads on push. Evolution comes
6 into play to detect if existing branches on the server are being replaced by
7 some of the new one we push.
8
9 This case is part of a series of tests checking this behavior.
10
11 Category B: simple case involving pruned changesets
12 TestCase 1: single pruned changeset
13
14 .. old-state:
15 ..
16 .. * 1 changeset branch
17 ..
18 .. new-state:
19 ..
20 .. * old branch is pruned
21 .. * 1 new unrelated branch
22 ..
23 .. expected-result:
24 ..
25 .. * push allowed
26 ..
27 .. graph-summary:
28 ..
29 .. β—” B
30 .. |
31 .. A βŠ— |
32 .. |/
33 .. ●
34
35 $ . $TESTDIR/testlib/push-checkheads-util.sh
36
37 Test setup
38 ----------
39
40 $ mkdir B1
41 $ cd B1
42 $ setuprepos
43 creating basic server and client repo
44 updating to branch default
45 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
46 $ cd client
47 $ hg up 0
48 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
49 $ mkcommit B0
50 created new head
51 $ hg debugobsolete --record-parents `getid "desc(A0)"`
52 $ hg log -G --hidden
53 @ 74ff5441d343 (draft): B0
54 |
55 | x 8aaa48160adc (draft): A0
56 |/
57 o 1e4be0697311 (public): root
58
59
60 Actual testing
61 --------------
62
63 $ hg push
64 pushing to $TESTTMP/B1/server (glob)
65 searching for changes
66 adding changesets
67 adding manifests
68 adding file changes
69 added 1 changesets with 1 changes to 1 files (+1 heads)
70 1 new obsolescence markers
71
72 $ cd ../..
@@ -1,450 +1,519 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import functools
11
10 from .i18n import _
12 from .i18n import _
11 from .node import (
13 from .node import (
14 hex,
12 nullid,
15 nullid,
13 short,
16 short,
14 )
17 )
15
18
16 from . import (
19 from . import (
17 bookmarks,
20 bookmarks,
18 branchmap,
21 branchmap,
19 error,
22 error,
20 obsolete,
21 phases,
23 phases,
22 setdiscovery,
24 setdiscovery,
23 treediscovery,
25 treediscovery,
24 util,
26 util,
25 )
27 )
26
28
27 def findcommonincoming(repo, remote, heads=None, force=False):
29 def findcommonincoming(repo, remote, heads=None, force=False):
28 """Return a tuple (common, anyincoming, heads) used to identify the common
30 """Return a tuple (common, anyincoming, heads) used to identify the common
29 subset of nodes between repo and remote.
31 subset of nodes between repo and remote.
30
32
31 "common" is a list of (at least) the heads of the common subset.
33 "common" is a list of (at least) the heads of the common subset.
32 "anyincoming" is testable as a boolean indicating if any nodes are missing
34 "anyincoming" is testable as a boolean indicating if any nodes are missing
33 locally. If remote does not support getbundle, this actually is a list of
35 locally. If remote does not support getbundle, this actually is a list of
34 roots of the nodes that would be incoming, to be supplied to
36 roots of the nodes that would be incoming, to be supplied to
35 changegroupsubset. No code except for pull should be relying on this fact
37 changegroupsubset. No code except for pull should be relying on this fact
36 any longer.
38 any longer.
37 "heads" is either the supplied heads, or else the remote's heads.
39 "heads" is either the supplied heads, or else the remote's heads.
38
40
39 If you pass heads and they are all known locally, the response lists just
41 If you pass heads and they are all known locally, the response lists just
40 these heads in "common" and in "heads".
42 these heads in "common" and in "heads".
41
43
42 Please use findcommonoutgoing to compute the set of outgoing nodes to give
44 Please use findcommonoutgoing to compute the set of outgoing nodes to give
43 extensions a good hook into outgoing.
45 extensions a good hook into outgoing.
44 """
46 """
45
47
46 if not remote.capable('getbundle'):
48 if not remote.capable('getbundle'):
47 return treediscovery.findcommonincoming(repo, remote, heads, force)
49 return treediscovery.findcommonincoming(repo, remote, heads, force)
48
50
49 if heads:
51 if heads:
50 allknown = True
52 allknown = True
51 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
53 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
52 for h in heads:
54 for h in heads:
53 if not knownnode(h):
55 if not knownnode(h):
54 allknown = False
56 allknown = False
55 break
57 break
56 if allknown:
58 if allknown:
57 return (heads, False, heads)
59 return (heads, False, heads)
58
60
59 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
61 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
60 abortwhenunrelated=not force)
62 abortwhenunrelated=not force)
61 common, anyinc, srvheads = res
63 common, anyinc, srvheads = res
62 return (list(common), anyinc, heads or list(srvheads))
64 return (list(common), anyinc, heads or list(srvheads))
63
65
64 class outgoing(object):
66 class outgoing(object):
65 '''Represents the set of nodes present in a local repo but not in a
67 '''Represents the set of nodes present in a local repo but not in a
66 (possibly) remote one.
68 (possibly) remote one.
67
69
68 Members:
70 Members:
69
71
70 missing is a list of all nodes present in local but not in remote.
72 missing is a list of all nodes present in local but not in remote.
71 common is a list of all nodes shared between the two repos.
73 common is a list of all nodes shared between the two repos.
72 excluded is the list of missing changeset that shouldn't be sent remotely.
74 excluded is the list of missing changeset that shouldn't be sent remotely.
73 missingheads is the list of heads of missing.
75 missingheads is the list of heads of missing.
74 commonheads is the list of heads of common.
76 commonheads is the list of heads of common.
75
77
76 The sets are computed on demand from the heads, unless provided upfront
78 The sets are computed on demand from the heads, unless provided upfront
77 by discovery.'''
79 by discovery.'''
78
80
79 def __init__(self, repo, commonheads=None, missingheads=None,
81 def __init__(self, repo, commonheads=None, missingheads=None,
80 missingroots=None):
82 missingroots=None):
81 # at least one of them must not be set
83 # at least one of them must not be set
82 assert None in (commonheads, missingroots)
84 assert None in (commonheads, missingroots)
83 cl = repo.changelog
85 cl = repo.changelog
84 if missingheads is None:
86 if missingheads is None:
85 missingheads = cl.heads()
87 missingheads = cl.heads()
86 if missingroots:
88 if missingroots:
87 discbases = []
89 discbases = []
88 for n in missingroots:
90 for n in missingroots:
89 discbases.extend([p for p in cl.parents(n) if p != nullid])
91 discbases.extend([p for p in cl.parents(n) if p != nullid])
90 # TODO remove call to nodesbetween.
92 # TODO remove call to nodesbetween.
91 # TODO populate attributes on outgoing instance instead of setting
93 # TODO populate attributes on outgoing instance instead of setting
92 # discbases.
94 # discbases.
93 csets, roots, heads = cl.nodesbetween(missingroots, missingheads)
95 csets, roots, heads = cl.nodesbetween(missingroots, missingheads)
94 included = set(csets)
96 included = set(csets)
95 missingheads = heads
97 missingheads = heads
96 commonheads = [n for n in discbases if n not in included]
98 commonheads = [n for n in discbases if n not in included]
97 elif not commonheads:
99 elif not commonheads:
98 commonheads = [nullid]
100 commonheads = [nullid]
99 self.commonheads = commonheads
101 self.commonheads = commonheads
100 self.missingheads = missingheads
102 self.missingheads = missingheads
101 self._revlog = cl
103 self._revlog = cl
102 self._common = None
104 self._common = None
103 self._missing = None
105 self._missing = None
104 self.excluded = []
106 self.excluded = []
105
107
106 def _computecommonmissing(self):
108 def _computecommonmissing(self):
107 sets = self._revlog.findcommonmissing(self.commonheads,
109 sets = self._revlog.findcommonmissing(self.commonheads,
108 self.missingheads)
110 self.missingheads)
109 self._common, self._missing = sets
111 self._common, self._missing = sets
110
112
111 @util.propertycache
113 @util.propertycache
112 def common(self):
114 def common(self):
113 if self._common is None:
115 if self._common is None:
114 self._computecommonmissing()
116 self._computecommonmissing()
115 return self._common
117 return self._common
116
118
117 @util.propertycache
119 @util.propertycache
118 def missing(self):
120 def missing(self):
119 if self._missing is None:
121 if self._missing is None:
120 self._computecommonmissing()
122 self._computecommonmissing()
121 return self._missing
123 return self._missing
122
124
123 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
125 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
124 commoninc=None, portable=False):
126 commoninc=None, portable=False):
125 '''Return an outgoing instance to identify the nodes present in repo but
127 '''Return an outgoing instance to identify the nodes present in repo but
126 not in other.
128 not in other.
127
129
128 If onlyheads is given, only nodes ancestral to nodes in onlyheads
130 If onlyheads is given, only nodes ancestral to nodes in onlyheads
129 (inclusive) are included. If you already know the local repo's heads,
131 (inclusive) are included. If you already know the local repo's heads,
130 passing them in onlyheads is faster than letting them be recomputed here.
132 passing them in onlyheads is faster than letting them be recomputed here.
131
133
132 If commoninc is given, it must be the result of a prior call to
134 If commoninc is given, it must be the result of a prior call to
133 findcommonincoming(repo, other, force) to avoid recomputing it here.
135 findcommonincoming(repo, other, force) to avoid recomputing it here.
134
136
135 If portable is given, compute more conservative common and missingheads,
137 If portable is given, compute more conservative common and missingheads,
136 to make bundles created from the instance more portable.'''
138 to make bundles created from the instance more portable.'''
137 # declare an empty outgoing object to be filled later
139 # declare an empty outgoing object to be filled later
138 og = outgoing(repo, None, None)
140 og = outgoing(repo, None, None)
139
141
140 # get common set if not provided
142 # get common set if not provided
141 if commoninc is None:
143 if commoninc is None:
142 commoninc = findcommonincoming(repo, other, force=force)
144 commoninc = findcommonincoming(repo, other, force=force)
143 og.commonheads, _any, _hds = commoninc
145 og.commonheads, _any, _hds = commoninc
144
146
145 # compute outgoing
147 # compute outgoing
146 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
148 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
147 if not mayexclude:
149 if not mayexclude:
148 og.missingheads = onlyheads or repo.heads()
150 og.missingheads = onlyheads or repo.heads()
149 elif onlyheads is None:
151 elif onlyheads is None:
150 # use visible heads as it should be cached
152 # use visible heads as it should be cached
151 og.missingheads = repo.filtered("served").heads()
153 og.missingheads = repo.filtered("served").heads()
152 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
154 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
153 else:
155 else:
154 # compute common, missing and exclude secret stuff
156 # compute common, missing and exclude secret stuff
155 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
157 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
156 og._common, allmissing = sets
158 og._common, allmissing = sets
157 og._missing = missing = []
159 og._missing = missing = []
158 og.excluded = excluded = []
160 og.excluded = excluded = []
159 for node in allmissing:
161 for node in allmissing:
160 ctx = repo[node]
162 ctx = repo[node]
161 if ctx.phase() >= phases.secret or ctx.extinct():
163 if ctx.phase() >= phases.secret or ctx.extinct():
162 excluded.append(node)
164 excluded.append(node)
163 else:
165 else:
164 missing.append(node)
166 missing.append(node)
165 if len(missing) == len(allmissing):
167 if len(missing) == len(allmissing):
166 missingheads = onlyheads
168 missingheads = onlyheads
167 else: # update missing heads
169 else: # update missing heads
168 missingheads = phases.newheads(repo, onlyheads, excluded)
170 missingheads = phases.newheads(repo, onlyheads, excluded)
169 og.missingheads = missingheads
171 og.missingheads = missingheads
170 if portable:
172 if portable:
171 # recompute common and missingheads as if -r<rev> had been given for
173 # recompute common and missingheads as if -r<rev> had been given for
172 # each head of missing, and --base <rev> for each head of the proper
174 # each head of missing, and --base <rev> for each head of the proper
173 # ancestors of missing
175 # ancestors of missing
174 og._computecommonmissing()
176 og._computecommonmissing()
175 cl = repo.changelog
177 cl = repo.changelog
176 missingrevs = set(cl.rev(n) for n in og._missing)
178 missingrevs = set(cl.rev(n) for n in og._missing)
177 og._common = set(cl.ancestors(missingrevs)) - missingrevs
179 og._common = set(cl.ancestors(missingrevs)) - missingrevs
178 commonheads = set(og.commonheads)
180 commonheads = set(og.commonheads)
179 og.missingheads = [h for h in og.missingheads if h not in commonheads]
181 og.missingheads = [h for h in og.missingheads if h not in commonheads]
180
182
181 return og
183 return og
182
184
183 def _headssummary(repo, remote, outgoing):
185 def _headssummary(repo, remote, outgoing):
184 """compute a summary of branch and heads status before and after push
186 """compute a summary of branch and heads status before and after push
185
187
186 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
188 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
187
189
188 - branch: the branch name
190 - branch: the branch name
189 - remoteheads: the list of remote heads known locally
191 - remoteheads: the list of remote heads known locally
190 None if the branch is new
192 None if the branch is new
191 - newheads: the new remote heads (known locally) with outgoing pushed
193 - newheads: the new remote heads (known locally) with outgoing pushed
192 - unsyncedheads: the list of remote heads unknown locally.
194 - unsyncedheads: the list of remote heads unknown locally.
193 """
195 """
194 cl = repo.changelog
196 cl = repo.changelog
195 headssum = {}
197 headssum = {}
196 # A. Create set of branches involved in the push.
198 # A. Create set of branches involved in the push.
197 branches = set(repo[n].branch() for n in outgoing.missing)
199 branches = set(repo[n].branch() for n in outgoing.missing)
198 remotemap = remote.branchmap()
200 remotemap = remote.branchmap()
199 newbranches = branches - set(remotemap)
201 newbranches = branches - set(remotemap)
200 branches.difference_update(newbranches)
202 branches.difference_update(newbranches)
201
203
202 # A. register remote heads
204 # A. register remote heads
203 remotebranches = set()
205 remotebranches = set()
204 for branch, heads in remote.branchmap().iteritems():
206 for branch, heads in remote.branchmap().iteritems():
205 remotebranches.add(branch)
207 remotebranches.add(branch)
206 known = []
208 known = []
207 unsynced = []
209 unsynced = []
208 knownnode = cl.hasnode # do not use nodemap until it is filtered
210 knownnode = cl.hasnode # do not use nodemap until it is filtered
209 for h in heads:
211 for h in heads:
210 if knownnode(h):
212 if knownnode(h):
211 known.append(h)
213 known.append(h)
212 else:
214 else:
213 unsynced.append(h)
215 unsynced.append(h)
214 headssum[branch] = (known, list(known), unsynced)
216 headssum[branch] = (known, list(known), unsynced)
215 # B. add new branch data
217 # B. add new branch data
216 missingctx = list(repo[n] for n in outgoing.missing)
218 missingctx = list(repo[n] for n in outgoing.missing)
217 touchedbranches = set()
219 touchedbranches = set()
218 for ctx in missingctx:
220 for ctx in missingctx:
219 branch = ctx.branch()
221 branch = ctx.branch()
220 touchedbranches.add(branch)
222 touchedbranches.add(branch)
221 if branch not in headssum:
223 if branch not in headssum:
222 headssum[branch] = (None, [], [])
224 headssum[branch] = (None, [], [])
223
225
224 # C drop data about untouched branches:
226 # C drop data about untouched branches:
225 for branch in remotebranches - touchedbranches:
227 for branch in remotebranches - touchedbranches:
226 del headssum[branch]
228 del headssum[branch]
227
229
228 # D. Update newmap with outgoing changes.
230 # D. Update newmap with outgoing changes.
229 # This will possibly add new heads and remove existing ones.
231 # This will possibly add new heads and remove existing ones.
230 newmap = branchmap.branchcache((branch, heads[1])
232 newmap = branchmap.branchcache((branch, heads[1])
231 for branch, heads in headssum.iteritems()
233 for branch, heads in headssum.iteritems()
232 if heads[0] is not None)
234 if heads[0] is not None)
233 newmap.update(repo, (ctx.rev() for ctx in missingctx))
235 newmap.update(repo, (ctx.rev() for ctx in missingctx))
234 for branch, newheads in newmap.iteritems():
236 for branch, newheads in newmap.iteritems():
235 headssum[branch][1][:] = newheads
237 headssum[branch][1][:] = newheads
236 return headssum
238 return headssum
237
239
238 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
240 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
239 """Compute branchmapsummary for repo without branchmap support"""
241 """Compute branchmapsummary for repo without branchmap support"""
240
242
241 # 1-4b. old servers: Check for new topological heads.
243 # 1-4b. old servers: Check for new topological heads.
242 # Construct {old,new}map with branch = None (topological branch).
244 # Construct {old,new}map with branch = None (topological branch).
243 # (code based on update)
245 # (code based on update)
244 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
246 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
245 oldheads = set(h for h in remoteheads if knownnode(h))
247 oldheads = set(h for h in remoteheads if knownnode(h))
246 # all nodes in outgoing.missing are children of either:
248 # all nodes in outgoing.missing are children of either:
247 # - an element of oldheads
249 # - an element of oldheads
248 # - another element of outgoing.missing
250 # - another element of outgoing.missing
249 # - nullrev
251 # - nullrev
250 # This explains why the new head are very simple to compute.
252 # This explains why the new head are very simple to compute.
251 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
253 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
252 newheads = list(c.node() for c in r)
254 newheads = list(c.node() for c in r)
253 # set some unsynced head to issue the "unsynced changes" warning
255 # set some unsynced head to issue the "unsynced changes" warning
254 if inc:
256 if inc:
255 unsynced = set([None])
257 unsynced = set([None])
256 else:
258 else:
257 unsynced = set()
259 unsynced = set()
258 return {None: (oldheads, newheads, unsynced)}
260 return {None: (oldheads, newheads, unsynced)}
259
261
260 def _nowarnheads(pushop):
262 def _nowarnheads(pushop):
261 # Compute newly pushed bookmarks. We don't warn about bookmarked heads.
263 # Compute newly pushed bookmarks. We don't warn about bookmarked heads.
262 repo = pushop.repo.unfiltered()
264 repo = pushop.repo.unfiltered()
263 remote = pushop.remote
265 remote = pushop.remote
264 localbookmarks = repo._bookmarks
266 localbookmarks = repo._bookmarks
265 remotebookmarks = remote.listkeys('bookmarks')
267 remotebookmarks = remote.listkeys('bookmarks')
266 bookmarkedheads = set()
268 bookmarkedheads = set()
267
269
268 # internal config: bookmarks.pushing
270 # internal config: bookmarks.pushing
269 newbookmarks = [localbookmarks.expandname(b)
271 newbookmarks = [localbookmarks.expandname(b)
270 for b in pushop.ui.configlist('bookmarks', 'pushing')]
272 for b in pushop.ui.configlist('bookmarks', 'pushing')]
271
273
272 for bm in localbookmarks:
274 for bm in localbookmarks:
273 rnode = remotebookmarks.get(bm)
275 rnode = remotebookmarks.get(bm)
274 if rnode and rnode in repo:
276 if rnode and rnode in repo:
275 lctx, rctx = repo[bm], repo[rnode]
277 lctx, rctx = repo[bm], repo[rnode]
276 if bookmarks.validdest(repo, rctx, lctx):
278 if bookmarks.validdest(repo, rctx, lctx):
277 bookmarkedheads.add(lctx.node())
279 bookmarkedheads.add(lctx.node())
278 else:
280 else:
279 if bm in newbookmarks and bm not in remotebookmarks:
281 if bm in newbookmarks and bm not in remotebookmarks:
280 bookmarkedheads.add(repo[bm].node())
282 bookmarkedheads.add(repo[bm].node())
281
283
282 return bookmarkedheads
284 return bookmarkedheads
283
285
284 def checkheads(pushop):
286 def checkheads(pushop):
285 """Check that a push won't add any outgoing head
287 """Check that a push won't add any outgoing head
286
288
287 raise Abort error and display ui message as needed.
289 raise Abort error and display ui message as needed.
288 """
290 """
289
291
290 repo = pushop.repo.unfiltered()
292 repo = pushop.repo.unfiltered()
291 remote = pushop.remote
293 remote = pushop.remote
292 outgoing = pushop.outgoing
294 outgoing = pushop.outgoing
293 remoteheads = pushop.remoteheads
295 remoteheads = pushop.remoteheads
294 newbranch = pushop.newbranch
296 newbranch = pushop.newbranch
295 inc = bool(pushop.incoming)
297 inc = bool(pushop.incoming)
296
298
297 # Check for each named branch if we're creating new remote heads.
299 # Check for each named branch if we're creating new remote heads.
298 # To be a remote head after push, node must be either:
300 # To be a remote head after push, node must be either:
299 # - unknown locally
301 # - unknown locally
300 # - a local outgoing head descended from update
302 # - a local outgoing head descended from update
301 # - a remote head that's known locally and not
303 # - a remote head that's known locally and not
302 # ancestral to an outgoing head
304 # ancestral to an outgoing head
303 if remoteheads == [nullid]:
305 if remoteheads == [nullid]:
304 # remote is empty, nothing to check.
306 # remote is empty, nothing to check.
305 return
307 return
306
308
307 if remote.capable('branchmap'):
309 if remote.capable('branchmap'):
308 headssum = _headssummary(repo, remote, outgoing)
310 headssum = _headssummary(repo, remote, outgoing)
309 else:
311 else:
310 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
312 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
311 newbranches = [branch for branch, heads in headssum.iteritems()
313 newbranches = [branch for branch, heads in headssum.iteritems()
312 if heads[0] is None]
314 if heads[0] is None]
313 # 1. Check for new branches on the remote.
315 # 1. Check for new branches on the remote.
314 if newbranches and not newbranch: # new branch requires --new-branch
316 if newbranches and not newbranch: # new branch requires --new-branch
315 branchnames = ', '.join(sorted(newbranches))
317 branchnames = ', '.join(sorted(newbranches))
316 raise error.Abort(_("push creates new remote branches: %s!")
318 raise error.Abort(_("push creates new remote branches: %s!")
317 % branchnames,
319 % branchnames,
318 hint=_("use 'hg push --new-branch' to create"
320 hint=_("use 'hg push --new-branch' to create"
319 " new remote branches"))
321 " new remote branches"))
320
322
321 # 2. Find heads that we need not warn about
323 # 2. Find heads that we need not warn about
322 nowarnheads = _nowarnheads(pushop)
324 nowarnheads = _nowarnheads(pushop)
323
325
324 # 3. Check for new heads.
326 # 3. Check for new heads.
325 # If there are more heads after the push than before, a suitable
327 # If there are more heads after the push than before, a suitable
326 # error message, depending on unsynced status, is displayed.
328 # error message, depending on unsynced status, is displayed.
327 errormsg = None
329 errormsg = None
328 # If there is no obsstore, allfuturecommon won't be used, so no
330 # If there is no obsstore, allfuturecommon won't be used, so no
329 # need to compute it.
331 # need to compute it.
330 if repo.obsstore:
332 if repo.obsstore:
331 allmissing = set(outgoing.missing)
333 allmissing = set(outgoing.missing)
332 cctx = repo.set('%ld', outgoing.common)
334 cctx = repo.set('%ld', outgoing.common)
333 allfuturecommon = set(c.node() for c in cctx)
335 allfuturecommon = set(c.node() for c in cctx)
334 allfuturecommon.update(allmissing)
336 allfuturecommon.update(allmissing)
335 for branch, heads in sorted(headssum.iteritems()):
337 for branch, heads in sorted(headssum.iteritems()):
336 remoteheads, newheads, unsyncedheads = heads
338 remoteheads, newheads, unsyncedheads = heads
337 candidate_newhs = set(newheads)
339 candidate_newhs = set(newheads)
338 # add unsynced data
340 # add unsynced data
339 if remoteheads is None:
341 if remoteheads is None:
340 oldhs = set()
342 oldhs = set()
341 else:
343 else:
342 oldhs = set(remoteheads)
344 oldhs = set(remoteheads)
343 oldhs.update(unsyncedheads)
345 oldhs.update(unsyncedheads)
344 candidate_newhs.update(unsyncedheads)
346 candidate_newhs.update(unsyncedheads)
345 dhs = None # delta heads, the new heads on branch
347 dhs = None # delta heads, the new heads on branch
346 if not repo.obsstore:
348 if not repo.obsstore:
347 discardedheads = set()
349 discardedheads = set()
348 newhs = candidate_newhs
350 newhs = candidate_newhs
349 else:
351 else:
350 newhs, discardedheads = _postprocessobsolete(pushop,
352 newhs, discardedheads = _postprocessobsolete(pushop,
351 allfuturecommon,
353 allfuturecommon,
352 candidate_newhs)
354 candidate_newhs)
353 unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
355 unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
354 if unsynced:
356 if unsynced:
355 if None in unsynced:
357 if None in unsynced:
356 # old remote, no heads data
358 # old remote, no heads data
357 heads = None
359 heads = None
358 elif len(unsynced) <= 4 or repo.ui.verbose:
360 elif len(unsynced) <= 4 or repo.ui.verbose:
359 heads = ' '.join(short(h) for h in unsynced)
361 heads = ' '.join(short(h) for h in unsynced)
360 else:
362 else:
361 heads = (' '.join(short(h) for h in unsynced[:4]) +
363 heads = (' '.join(short(h) for h in unsynced[:4]) +
362 ' ' + _("and %s others") % (len(unsynced) - 4))
364 ' ' + _("and %s others") % (len(unsynced) - 4))
363 if heads is None:
365 if heads is None:
364 repo.ui.status(_("remote has heads that are "
366 repo.ui.status(_("remote has heads that are "
365 "not known locally\n"))
367 "not known locally\n"))
366 elif branch is None:
368 elif branch is None:
367 repo.ui.status(_("remote has heads that are "
369 repo.ui.status(_("remote has heads that are "
368 "not known locally: %s\n") % heads)
370 "not known locally: %s\n") % heads)
369 else:
371 else:
370 repo.ui.status(_("remote has heads on branch '%s' that are "
372 repo.ui.status(_("remote has heads on branch '%s' that are "
371 "not known locally: %s\n") % (branch, heads))
373 "not known locally: %s\n") % (branch, heads))
372 if remoteheads is None:
374 if remoteheads is None:
373 if len(newhs) > 1:
375 if len(newhs) > 1:
374 dhs = list(newhs)
376 dhs = list(newhs)
375 if errormsg is None:
377 if errormsg is None:
376 errormsg = (_("push creates new branch '%s' "
378 errormsg = (_("push creates new branch '%s' "
377 "with multiple heads") % (branch))
379 "with multiple heads") % (branch))
378 hint = _("merge or"
380 hint = _("merge or"
379 " see 'hg help push' for details about"
381 " see 'hg help push' for details about"
380 " pushing new heads")
382 " pushing new heads")
381 elif len(newhs) > len(oldhs):
383 elif len(newhs) > len(oldhs):
382 # remove bookmarked or existing remote heads from the new heads list
384 # remove bookmarked or existing remote heads from the new heads list
383 dhs = sorted(newhs - nowarnheads - oldhs)
385 dhs = sorted(newhs - nowarnheads - oldhs)
384 if dhs:
386 if dhs:
385 if errormsg is None:
387 if errormsg is None:
386 if branch not in ('default', None):
388 if branch not in ('default', None):
387 errormsg = _("push creates new remote head %s "
389 errormsg = _("push creates new remote head %s "
388 "on branch '%s'!") % (short(dhs[0]), branch)
390 "on branch '%s'!") % (short(dhs[0]), branch)
389 elif repo[dhs[0]].bookmarks():
391 elif repo[dhs[0]].bookmarks():
390 errormsg = _("push creates new remote head %s "
392 errormsg = _("push creates new remote head %s "
391 "with bookmark '%s'!") % (
393 "with bookmark '%s'!") % (
392 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
394 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
393 else:
395 else:
394 errormsg = _("push creates new remote head %s!"
396 errormsg = _("push creates new remote head %s!"
395 ) % short(dhs[0])
397 ) % short(dhs[0])
396 if unsyncedheads:
398 if unsyncedheads:
397 hint = _("pull and merge or"
399 hint = _("pull and merge or"
398 " see 'hg help push' for details about"
400 " see 'hg help push' for details about"
399 " pushing new heads")
401 " pushing new heads")
400 else:
402 else:
401 hint = _("merge or"
403 hint = _("merge or"
402 " see 'hg help push' for details about"
404 " see 'hg help push' for details about"
403 " pushing new heads")
405 " pushing new heads")
404 if branch is None:
406 if branch is None:
405 repo.ui.note(_("new remote heads:\n"))
407 repo.ui.note(_("new remote heads:\n"))
406 else:
408 else:
407 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
409 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
408 for h in dhs:
410 for h in dhs:
409 repo.ui.note((" %s\n") % short(h))
411 repo.ui.note((" %s\n") % short(h))
410 if errormsg:
412 if errormsg:
411 raise error.Abort(errormsg, hint=hint)
413 raise error.Abort(errormsg, hint=hint)
412
414
413 def _postprocessobsolete(pushop, futurecommon, candidate_newhs):
415 def _postprocessobsolete(pushop, futurecommon, candidate_newhs):
414 """post process the list of new heads with obsolescence information
416 """post process the list of new heads with obsolescence information
415
417
416 Exists as a subfunction to contain the complexity and allow extensions to
418 Exists as a sub-function to contain the complexity and allow extensions to
417 experiment with smarter logic.
419 experiment with smarter logic.
420
418 Returns (newheads, discarded_heads) tuple
421 Returns (newheads, discarded_heads) tuple
419 """
422 """
420 # remove future heads which are actually obsoleted by another
423 # known issue
421 # pushed element:
422 #
424 #
423 # XXX as above, There are several cases this code does not handle
425 # * We "silently" skip processing on all changeset unknown locally
424 # XXX properly
425 #
426 # (1) if <nh> is public, it won't be affected by obsolete marker
427 # and a new is created
428 #
426 #
429 # (2) if the new heads have ancestors which are not obsolete and
427 # * if <nh> is public on the remote, it won't be affected by obsolete
430 # not ancestors of any other heads we will have a new head too.
428 # marker and a new is created
431 #
429
432 # These two cases will be easy to handle for known changeset but
430 # define various utilities and containers
433 # much more tricky for unsynced changes.
434 #
435 # In addition, this code is confused by prune as it only looks for
436 # successors of the heads (none if pruned) leading to issue4354
437 repo = pushop.repo
431 repo = pushop.repo
438 newhs = set()
432 unfi = repo.unfiltered()
439 discarded = set()
433 tonode = unfi.changelog.node
440 for nh in candidate_newhs:
434 public = phases.public
441 if nh in repo and repo[nh].phase() <= phases.public:
435 getphase = unfi._phasecache.phase
436 ispublic = (lambda r: getphase(unfi, r) == public)
437 hasoutmarker = functools.partial(pushingmarkerfor, unfi.obsstore,
438 futurecommon)
439 successorsmarkers = unfi.obsstore.successors
440 newhs = set() # final set of new heads
441 discarded = set() # new head of fully replaced branch
442
443 localcandidate = set() # candidate heads known locally
444 unknownheads = set() # candidate heads unknown locally
445 for h in candidate_newhs:
446 if h in unfi:
447 localcandidate.add(h)
448 else:
449 if successorsmarkers.get(h) is not None:
450 msg = ('checkheads: remote head unknown locally has'
451 ' local marker: %s\n')
452 repo.ui.debug(msg % hex(h))
453 unknownheads.add(h)
454
455 # fast path the simple case
456 if len(localcandidate) == 1:
457 return unknownheads | set(candidate_newhs), set()
458
459 # actually process branch replacement
460 while localcandidate:
461 nh = localcandidate.pop()
462 # run this check early to skip the evaluation of the whole branch
463 if (nh in futurecommon
464 or unfi[nh].phase() <= public):
465 newhs.add(nh)
466 continue
467
468 # Get all revs/nodes on the branch exclusive to this head
469 # (already filtered heads are "ignored"))
470 branchrevs = unfi.revs('only(%n, (%ln+%ln))',
471 nh, localcandidate, newhs)
472 branchnodes = [tonode(r) for r in branchrevs]
473
474 # The branch won't be hidden on the remote if
475 # * any part of it is public,
476 # * any part of it is considered part of the result by previous logic,
477 # * if we have no markers to push to obsolete it.
478 if (any(ispublic(r) for r in branchrevs)
479 or any(n in futurecommon for n in branchnodes)
480 or any(not hasoutmarker(n) for n in branchnodes)):
442 newhs.add(nh)
481 newhs.add(nh)
443 else:
482 else:
444 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
483 # note: there is a corner case if there is a merge in the branch.
445 if suc != nh and suc in futurecommon:
484 # we might end up with -more- heads. However, these heads are not
446 discarded.add(nh)
485 # "added" by the push, but more by the "removal" on the remote so I
447 break
486 # think is a okay to ignore them,
448 else:
487 discarded.add(nh)
449 newhs.add(nh)
488 newhs |= unknownheads
450 return newhs, discarded
489 return newhs, discarded
490
491 def pushingmarkerfor(obsstore, pushset, node):
492 """true if some markers are to be pushed for node
493
494 We cannot just look in to the pushed obsmarkers from the pushop because
495 discovery might have filtered relevant markers. In addition listing all
496 markers relevant to all changesets in the pushed set would be too expensive
497 (O(len(repo)))
498
499 (note: There are cache opportunity in this function. but it would requires
500 a two dimensional stack.)
501 """
502 successorsmarkers = obsstore.successors
503 stack = [node]
504 seen = set(stack)
505 while stack:
506 current = stack.pop()
507 if current in pushset:
508 return True
509 markers = successorsmarkers.get(current, ())
510 # markers fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
511 for m in markers:
512 nexts = m[1] # successors
513 if not nexts: # this is a prune marker
514 nexts = m[5] # parents
515 for n in nexts:
516 if n not in seen:
517 seen.add(n)
518 stack.append(n)
519 return False
@@ -1,265 +1,283 b''
1 Check that obsolete properly strip heads
1 Check that obsolete properly strip heads
2 $ cat >> $HGRCPATH << EOF
2 $ cat >> $HGRCPATH << EOF
3 > [phases]
3 > [phases]
4 > # public changeset are not obsolete
4 > # public changeset are not obsolete
5 > publish=false
5 > publish=false
6 > [ui]
6 > [ui]
7 > logtemplate='{node|short} ({phase}) {desc|firstline}\n'
7 > logtemplate='{node|short} ({phase}) {desc|firstline}\n'
8 > [experimental]
8 > [experimental]
9 > evolution=createmarkers
9 > evolution=createmarkers
10 > EOF
10 > EOF
11 $ mkcommit() {
11 $ mkcommit() {
12 > echo "$1" > "$1"
12 > echo "$1" > "$1"
13 > hg add "$1"
13 > hg add "$1"
14 > hg ci -m "add $1"
14 > hg ci -m "add $1"
15 > }
15 > }
16 $ getid() {
16 $ getid() {
17 > hg id --debug -ir "desc('$1')"
17 > hg id --debug -ir "desc('$1')"
18 > }
18 > }
19
19
20
20
21 $ hg init remote
21 $ hg init remote
22 $ cd remote
22 $ cd remote
23 $ mkcommit base
23 $ mkcommit base
24 $ hg phase --public .
24 $ hg phase --public .
25 $ cd ..
25 $ cd ..
26 $ cp -R remote base
26 $ cp -R remote base
27 $ hg clone remote local
27 $ hg clone remote local
28 updating to branch default
28 updating to branch default
29 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
29 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
30 $ cd local
30 $ cd local
31
31
32 New head replaces old head
32 New head replaces old head
33 ==========================
33 ==========================
34
34
35 setup
35 setup
36 (we add the 1 flags to prevent bumped error during the test)
36 (we add the 1 flags to prevent bumped error during the test)
37
37
38 $ mkcommit old
38 $ mkcommit old
39 $ hg push
39 $ hg push
40 pushing to $TESTTMP/remote (glob)
40 pushing to $TESTTMP/remote (glob)
41 searching for changes
41 searching for changes
42 adding changesets
42 adding changesets
43 adding manifests
43 adding manifests
44 adding file changes
44 adding file changes
45 added 1 changesets with 1 changes to 1 files
45 added 1 changesets with 1 changes to 1 files
46 $ hg up -q '.^'
46 $ hg up -q '.^'
47 $ mkcommit new
47 $ mkcommit new
48 created new head
48 created new head
49 $ hg debugobsolete --flags 1 `getid old` `getid new`
49 $ hg debugobsolete --flags 1 `getid old` `getid new`
50 $ hg log -G --hidden
50 $ hg log -G --hidden
51 @ 71e3228bffe1 (draft) add new
51 @ 71e3228bffe1 (draft) add new
52 |
52 |
53 | x c70b08862e08 (draft) add old
53 | x c70b08862e08 (draft) add old
54 |/
54 |/
55 o b4952fcf48cf (public) add base
55 o b4952fcf48cf (public) add base
56
56
57 $ cp -R ../remote ../backup1
57 $ cp -R ../remote ../backup1
58
58
59 old exists remotely as draft. It is obsoleted by new that we now push.
59 old exists remotely as draft. It is obsoleted by new that we now push.
60 Push should not warn about creating new head
60 Push should not warn about creating new head
61
61
62 $ hg push
62 $ hg push
63 pushing to $TESTTMP/remote (glob)
63 pushing to $TESTTMP/remote (glob)
64 searching for changes
64 searching for changes
65 adding changesets
65 adding changesets
66 adding manifests
66 adding manifests
67 adding file changes
67 adding file changes
68 added 1 changesets with 1 changes to 1 files (+1 heads)
68 added 1 changesets with 1 changes to 1 files (+1 heads)
69
69
70 old head is now public (public local version)
70 old head is now public (public local version)
71 =============================================
71 =============================================
72
72
73 setup
73 setup
74
74
75 $ rm -fr ../remote
75 $ rm -fr ../remote
76 $ cp -R ../backup1 ../remote
76 $ cp -R ../backup1 ../remote
77 $ hg -R ../remote phase --public c70b08862e08
77 $ hg -R ../remote phase --public c70b08862e08
78 $ hg pull -v
78 $ hg pull -v
79 pulling from $TESTTMP/remote (glob)
79 pulling from $TESTTMP/remote (glob)
80 searching for changes
80 searching for changes
81 no changes found
81 no changes found
82 $ hg log -G --hidden
82 $ hg log -G --hidden
83 @ 71e3228bffe1 (draft) add new
83 @ 71e3228bffe1 (draft) add new
84 |
84 |
85 | o c70b08862e08 (public) add old
85 | o c70b08862e08 (public) add old
86 |/
86 |/
87 o b4952fcf48cf (public) add base
87 o b4952fcf48cf (public) add base
88
88
89
89
90 Abort: old will still be an head because it's public.
90 Abort: old will still be an head because it's public.
91
91
92 $ hg push
92 $ hg push
93 pushing to $TESTTMP/remote (glob)
93 pushing to $TESTTMP/remote (glob)
94 searching for changes
94 searching for changes
95 abort: push creates new remote head 71e3228bffe1!
95 abort: push creates new remote head 71e3228bffe1!
96 (merge or see 'hg help push' for details about pushing new heads)
96 (merge or see 'hg help push' for details about pushing new heads)
97 [255]
97 [255]
98
98
99 old head is now public (public remote version)
99 old head is now public (public remote version)
100 ==============================================
100 ==============================================
101
101
102 TODO: Not implemented yet.
102 TODO: Not implemented yet.
103
103
104 # setup
104 # setup
105 #
105 #
106 # $ rm -fr ../remote
106 # $ rm -fr ../remote
107 # $ cp -R ../backup1 ../remote
107 # $ cp -R ../backup1 ../remote
108 # $ hg -R ../remote phase --public c70b08862e08
108 # $ hg -R ../remote phase --public c70b08862e08
109 # $ hg phase --draft --force c70b08862e08
109 # $ hg phase --draft --force c70b08862e08
110 # $ hg log -G --hidden
110 # $ hg log -G --hidden
111 # @ 71e3228bffe1 (draft) add new
111 # @ 71e3228bffe1 (draft) add new
112 # |
112 # |
113 # | x c70b08862e08 (draft) add old
113 # | x c70b08862e08 (draft) add old
114 # |/
114 # |/
115 # o b4952fcf48cf (public) add base
115 # o b4952fcf48cf (public) add base
116 #
116 #
117 #
117 #
118 #
118 #
119 # Abort: old will still be an head because it's public.
119 # Abort: old will still be an head because it's public.
120 #
120 #
121 # $ hg push
121 # $ hg push
122 # pushing to $TESTTMP/remote
122 # pushing to $TESTTMP/remote
123 # searching for changes
123 # searching for changes
124 # abort: push creates new remote head 71e3228bffe1!
124 # abort: push creates new remote head 71e3228bffe1!
125 # (merge or see 'hg help push' for details about pushing new heads)
125 # (merge or see 'hg help push' for details about pushing new heads)
126 # [255]
126 # [255]
127
127
128 old head is obsolete but replacement is not pushed
128 old head is obsolete but replacement is not pushed
129 ==================================================
129 ==================================================
130
130
131 setup
131 setup
132
132
133 $ rm -fr ../remote
133 $ rm -fr ../remote
134 $ cp -R ../backup1 ../remote
134 $ cp -R ../backup1 ../remote
135 $ hg phase --draft --force '(0::) - 0'
135 $ hg phase --draft --force '(0::) - 0'
136 $ hg up -q '.^'
136 $ hg up -q '.^'
137 $ mkcommit other
137 $ mkcommit other
138 created new head
138 created new head
139 $ hg log -G --hidden
139 $ hg log -G --hidden
140 @ d7d41ccbd4de (draft) add other
140 @ d7d41ccbd4de (draft) add other
141 |
141 |
142 | o 71e3228bffe1 (draft) add new
142 | o 71e3228bffe1 (draft) add new
143 |/
143 |/
144 | x c70b08862e08 (draft) add old
144 | x c70b08862e08 (draft) add old
145 |/
145 |/
146 o b4952fcf48cf (public) add base
146 o b4952fcf48cf (public) add base
147
147
148
148
149 old exists remotely as draft. It is obsoleted by new but we don't push new.
149 old exists remotely as draft. It is obsoleted by new but we don't push new.
150 Push should abort on new head
150 Push should abort on new head
151
151
152 $ hg push -r 'desc("other")'
152 $ hg push -r 'desc("other")'
153 pushing to $TESTTMP/remote (glob)
153 pushing to $TESTTMP/remote (glob)
154 searching for changes
154 searching for changes
155 abort: push creates new remote head d7d41ccbd4de!
155 abort: push creates new remote head d7d41ccbd4de!
156 (merge or see 'hg help push' for details about pushing new heads)
156 (merge or see 'hg help push' for details about pushing new heads)
157 [255]
157 [255]
158
158
159
159
160
160
161 Both precursors and successors are already know remotely. Descendant adds heads
161 Both precursors and successors are already know remotely. Descendant adds heads
162 ===============================================================================
162 ===============================================================================
163
163
164 setup. (The obsolete marker is known locally only
164 setup. (The obsolete marker is known locally only
165
165
166 $ cd ..
166 $ cd ..
167 $ rm -rf local
167 $ rm -rf local
168 $ hg clone remote local
168 $ hg clone remote local
169 updating to branch default
169 updating to branch default
170 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
170 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
171 $ cd local
171 $ cd local
172 $ mkcommit old
172 $ mkcommit old
173 old already tracked!
173 old already tracked!
174 nothing changed
174 nothing changed
175 [1]
175 [1]
176 $ hg up -q '.^'
176 $ hg up -q '.^'
177 $ mkcommit new
177 $ mkcommit new
178 created new head
178 created new head
179 $ hg push -f
179 $ hg push -f
180 pushing to $TESTTMP/remote (glob)
180 pushing to $TESTTMP/remote (glob)
181 searching for changes
181 searching for changes
182 adding changesets
182 adding changesets
183 adding manifests
183 adding manifests
184 adding file changes
184 adding file changes
185 added 1 changesets with 1 changes to 1 files (+1 heads)
185 added 1 changesets with 1 changes to 1 files (+1 heads)
186 $ mkcommit desc1
186 $ mkcommit desc1
187 $ hg up -q '.^'
187 $ hg up -q '.^'
188 $ mkcommit desc2
188 $ mkcommit desc2
189 created new head
189 created new head
190 $ hg debugobsolete `getid old` `getid new`
190 $ hg debugobsolete `getid old` `getid new`
191 $ hg log -G --hidden
191 $ hg log -G --hidden
192 @ 5fe37041cc2b (draft) add desc2
192 @ 5fe37041cc2b (draft) add desc2
193 |
193 |
194 | o a3ef1d111c5f (draft) add desc1
194 | o a3ef1d111c5f (draft) add desc1
195 |/
195 |/
196 o 71e3228bffe1 (draft) add new
196 o 71e3228bffe1 (draft) add new
197 |
197 |
198 | x c70b08862e08 (draft) add old
198 | x c70b08862e08 (draft) add old
199 |/
199 |/
200 o b4952fcf48cf (public) add base
200 o b4952fcf48cf (public) add base
201
201
202 $ hg log -G --hidden -R ../remote
202 $ hg log -G --hidden -R ../remote
203 o 71e3228bffe1 (draft) add new
203 o 71e3228bffe1 (draft) add new
204 |
204 |
205 | o c70b08862e08 (draft) add old
205 | o c70b08862e08 (draft) add old
206 |/
206 |/
207 @ b4952fcf48cf (public) add base
207 @ b4952fcf48cf (public) add base
208
208
209 $ cp -R ../remote ../backup2
209 $ cp -R ../remote ../backup2
210
210
211 Push should not warn about adding new heads. We create one, but we'll delete
211 Push should not warn about adding new heads. We create one, but we'll delete
212 one anyway.
212 one anyway.
213
213
214 $ hg push
214 $ hg push
215 pushing to $TESTTMP/remote (glob)
215 pushing to $TESTTMP/remote (glob)
216 searching for changes
216 searching for changes
217 adding changesets
217 adding changesets
218 adding manifests
218 adding manifests
219 adding file changes
219 adding file changes
220 added 2 changesets with 2 changes to 2 files (+1 heads)
220 added 2 changesets with 2 changes to 2 files (+1 heads)
221
221
222
222
223 Remote head is unknown but obsoleted by a local changeset
223 Remote head is unknown but obsoleted by a local changeset
224 =========================================================
224 =========================================================
225
225
226 setup
226 setup
227
227
228 $ rm -fr ../remote
228 $ rm -fr ../remote
229 $ cp -R ../backup1 ../remote
229 $ cp -R ../backup1 ../remote
230 $ cd ..
230 $ cd ..
231 $ rm -rf local
231 $ rm -rf local
232 $ hg clone remote local -r 0
232 $ hg clone remote local -r 0
233 adding changesets
233 adding changesets
234 adding manifests
234 adding manifests
235 adding file changes
235 adding file changes
236 added 1 changesets with 1 changes to 1 files
236 added 1 changesets with 1 changes to 1 files
237 updating to branch default
237 updating to branch default
238 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
238 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
239 $ cd local
239 $ cd local
240 $ mkcommit new
240 $ mkcommit new
241 $ hg -R ../remote id --debug -r tip
241 $ hg -R ../remote id --debug -r tip
242 c70b08862e0838ea6d7c59c85da2f1ed6c8d67da tip
242 c70b08862e0838ea6d7c59c85da2f1ed6c8d67da tip
243 $ hg id --debug -r tip
243 $ hg id --debug -r tip
244 71e3228bffe1886550777233d6c97bb5a6b2a650 tip
244 71e3228bffe1886550777233d6c97bb5a6b2a650 tip
245 $ hg debugobsolete c70b08862e0838ea6d7c59c85da2f1ed6c8d67da 71e3228bffe1886550777233d6c97bb5a6b2a650
245 $ hg debugobsolete c70b08862e0838ea6d7c59c85da2f1ed6c8d67da 71e3228bffe1886550777233d6c97bb5a6b2a650
246 $ hg log -G --hidden
246 $ hg log -G --hidden
247 @ 71e3228bffe1 (draft) add new
247 @ 71e3228bffe1 (draft) add new
248 |
248 |
249 o b4952fcf48cf (public) add base
249 o b4952fcf48cf (public) add base
250
250
251 $ hg log -G --hidden -R ../remote
251 $ hg log -G --hidden -R ../remote
252 o c70b08862e08 (draft) add old
252 o c70b08862e08 (draft) add old
253 |
253 |
254 @ b4952fcf48cf (public) add base
254 @ b4952fcf48cf (public) add base
255
255
256
256
257 Push should not complain about new heads.
257 We do not have enought data to take the right decision, we should fail
258
259 $ hg push
260 pushing to $TESTTMP/remote (glob)
261 searching for changes
262 remote has heads on branch 'default' that are not known locally: c70b08862e08
263 abort: push creates new remote head 71e3228bffe1!
264 (pull and merge or see 'hg help push' for details about pushing new heads)
265 [255]
258
266
259 $ hg push --traceback
267 Pulling the missing data makes it work
268
269 $ hg pull
270 pulling from $TESTTMP/remote (glob)
271 searching for changes
272 adding changesets
273 adding manifests
274 adding file changes
275 added 1 changesets with 1 changes to 1 files (+1 heads)
276 (run 'hg heads' to see heads)
277 $ hg push
260 pushing to $TESTTMP/remote (glob)
278 pushing to $TESTTMP/remote (glob)
261 searching for changes
279 searching for changes
262 adding changesets
280 adding changesets
263 adding manifests
281 adding manifests
264 adding file changes
282 adding file changes
265 added 1 changesets with 1 changes to 1 files (+1 heads)
283 added 1 changesets with 1 changes to 1 files (+1 heads)
General Comments 0
You need to be logged in to leave comments. Login now