##// END OF EJS Templates
discovery: pass pushop to _nowarnheads...
Ryan McElroy -
r26936:d47ac02f default
parent child Browse files
Show More
@@ -1,413 +1,416 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 nullid,
12 nullid,
13 short,
13 short,
14 )
14 )
15
15
16 from . import (
16 from . import (
17 bookmarks,
17 bookmarks,
18 branchmap,
18 branchmap,
19 error,
19 error,
20 obsolete,
20 obsolete,
21 phases,
21 phases,
22 setdiscovery,
22 setdiscovery,
23 treediscovery,
23 treediscovery,
24 util,
24 util,
25 )
25 )
26
26
27 def findcommonincoming(repo, remote, heads=None, force=False):
27 def findcommonincoming(repo, remote, heads=None, force=False):
28 """Return a tuple (common, anyincoming, heads) used to identify the common
28 """Return a tuple (common, anyincoming, heads) used to identify the common
29 subset of nodes between repo and remote.
29 subset of nodes between repo and remote.
30
30
31 "common" is a list of (at least) the heads of the common subset.
31 "common" is a list of (at least) the heads of the common subset.
32 "anyincoming" is testable as a boolean indicating if any nodes are missing
32 "anyincoming" is testable as a boolean indicating if any nodes are missing
33 locally. If remote does not support getbundle, this actually is a list of
33 locally. If remote does not support getbundle, this actually is a list of
34 roots of the nodes that would be incoming, to be supplied to
34 roots of the nodes that would be incoming, to be supplied to
35 changegroupsubset. No code except for pull should be relying on this fact
35 changegroupsubset. No code except for pull should be relying on this fact
36 any longer.
36 any longer.
37 "heads" is either the supplied heads, or else the remote's heads.
37 "heads" is either the supplied heads, or else the remote's heads.
38
38
39 If you pass heads and they are all known locally, the response lists just
39 If you pass heads and they are all known locally, the response lists just
40 these heads in "common" and in "heads".
40 these heads in "common" and in "heads".
41
41
42 Please use findcommonoutgoing to compute the set of outgoing nodes to give
42 Please use findcommonoutgoing to compute the set of outgoing nodes to give
43 extensions a good hook into outgoing.
43 extensions a good hook into outgoing.
44 """
44 """
45
45
46 if not remote.capable('getbundle'):
46 if not remote.capable('getbundle'):
47 return treediscovery.findcommonincoming(repo, remote, heads, force)
47 return treediscovery.findcommonincoming(repo, remote, heads, force)
48
48
49 if heads:
49 if heads:
50 allknown = True
50 allknown = True
51 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
51 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
52 for h in heads:
52 for h in heads:
53 if not knownnode(h):
53 if not knownnode(h):
54 allknown = False
54 allknown = False
55 break
55 break
56 if allknown:
56 if allknown:
57 return (heads, False, heads)
57 return (heads, False, heads)
58
58
59 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
59 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
60 abortwhenunrelated=not force)
60 abortwhenunrelated=not force)
61 common, anyinc, srvheads = res
61 common, anyinc, srvheads = res
62 return (list(common), anyinc, heads or list(srvheads))
62 return (list(common), anyinc, heads or list(srvheads))
63
63
64 class outgoing(object):
64 class outgoing(object):
65 '''Represents the set of nodes present in a local repo but not in a
65 '''Represents the set of nodes present in a local repo but not in a
66 (possibly) remote one.
66 (possibly) remote one.
67
67
68 Members:
68 Members:
69
69
70 missing is a list of all nodes present in local but not in remote.
70 missing is a list of all nodes present in local but not in remote.
71 common is a list of all nodes shared between the two repos.
71 common is a list of all nodes shared between the two repos.
72 excluded is the list of missing changeset that shouldn't be sent remotely.
72 excluded is the list of missing changeset that shouldn't be sent remotely.
73 missingheads is the list of heads of missing.
73 missingheads is the list of heads of missing.
74 commonheads is the list of heads of common.
74 commonheads is the list of heads of common.
75
75
76 The sets are computed on demand from the heads, unless provided upfront
76 The sets are computed on demand from the heads, unless provided upfront
77 by discovery.'''
77 by discovery.'''
78
78
79 def __init__(self, revlog, commonheads, missingheads):
79 def __init__(self, revlog, commonheads, missingheads):
80 self.commonheads = commonheads
80 self.commonheads = commonheads
81 self.missingheads = missingheads
81 self.missingheads = missingheads
82 self._revlog = revlog
82 self._revlog = revlog
83 self._common = None
83 self._common = None
84 self._missing = None
84 self._missing = None
85 self.excluded = []
85 self.excluded = []
86
86
87 def _computecommonmissing(self):
87 def _computecommonmissing(self):
88 sets = self._revlog.findcommonmissing(self.commonheads,
88 sets = self._revlog.findcommonmissing(self.commonheads,
89 self.missingheads)
89 self.missingheads)
90 self._common, self._missing = sets
90 self._common, self._missing = sets
91
91
92 @util.propertycache
92 @util.propertycache
93 def common(self):
93 def common(self):
94 if self._common is None:
94 if self._common is None:
95 self._computecommonmissing()
95 self._computecommonmissing()
96 return self._common
96 return self._common
97
97
98 @util.propertycache
98 @util.propertycache
99 def missing(self):
99 def missing(self):
100 if self._missing is None:
100 if self._missing is None:
101 self._computecommonmissing()
101 self._computecommonmissing()
102 return self._missing
102 return self._missing
103
103
104 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
104 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
105 commoninc=None, portable=False):
105 commoninc=None, portable=False):
106 '''Return an outgoing instance to identify the nodes present in repo but
106 '''Return an outgoing instance to identify the nodes present in repo but
107 not in other.
107 not in other.
108
108
109 If onlyheads is given, only nodes ancestral to nodes in onlyheads
109 If onlyheads is given, only nodes ancestral to nodes in onlyheads
110 (inclusive) are included. If you already know the local repo's heads,
110 (inclusive) are included. If you already know the local repo's heads,
111 passing them in onlyheads is faster than letting them be recomputed here.
111 passing them in onlyheads is faster than letting them be recomputed here.
112
112
113 If commoninc is given, it must be the result of a prior call to
113 If commoninc is given, it must be the result of a prior call to
114 findcommonincoming(repo, other, force) to avoid recomputing it here.
114 findcommonincoming(repo, other, force) to avoid recomputing it here.
115
115
116 If portable is given, compute more conservative common and missingheads,
116 If portable is given, compute more conservative common and missingheads,
117 to make bundles created from the instance more portable.'''
117 to make bundles created from the instance more portable.'''
118 # declare an empty outgoing object to be filled later
118 # declare an empty outgoing object to be filled later
119 og = outgoing(repo.changelog, None, None)
119 og = outgoing(repo.changelog, None, None)
120
120
121 # get common set if not provided
121 # get common set if not provided
122 if commoninc is None:
122 if commoninc is None:
123 commoninc = findcommonincoming(repo, other, force=force)
123 commoninc = findcommonincoming(repo, other, force=force)
124 og.commonheads, _any, _hds = commoninc
124 og.commonheads, _any, _hds = commoninc
125
125
126 # compute outgoing
126 # compute outgoing
127 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
127 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
128 if not mayexclude:
128 if not mayexclude:
129 og.missingheads = onlyheads or repo.heads()
129 og.missingheads = onlyheads or repo.heads()
130 elif onlyheads is None:
130 elif onlyheads is None:
131 # use visible heads as it should be cached
131 # use visible heads as it should be cached
132 og.missingheads = repo.filtered("served").heads()
132 og.missingheads = repo.filtered("served").heads()
133 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
133 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
134 else:
134 else:
135 # compute common, missing and exclude secret stuff
135 # compute common, missing and exclude secret stuff
136 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
136 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
137 og._common, allmissing = sets
137 og._common, allmissing = sets
138 og._missing = missing = []
138 og._missing = missing = []
139 og.excluded = excluded = []
139 og.excluded = excluded = []
140 for node in allmissing:
140 for node in allmissing:
141 ctx = repo[node]
141 ctx = repo[node]
142 if ctx.phase() >= phases.secret or ctx.extinct():
142 if ctx.phase() >= phases.secret or ctx.extinct():
143 excluded.append(node)
143 excluded.append(node)
144 else:
144 else:
145 missing.append(node)
145 missing.append(node)
146 if len(missing) == len(allmissing):
146 if len(missing) == len(allmissing):
147 missingheads = onlyheads
147 missingheads = onlyheads
148 else: # update missing heads
148 else: # update missing heads
149 missingheads = phases.newheads(repo, onlyheads, excluded)
149 missingheads = phases.newheads(repo, onlyheads, excluded)
150 og.missingheads = missingheads
150 og.missingheads = missingheads
151 if portable:
151 if portable:
152 # recompute common and missingheads as if -r<rev> had been given for
152 # recompute common and missingheads as if -r<rev> had been given for
153 # each head of missing, and --base <rev> for each head of the proper
153 # each head of missing, and --base <rev> for each head of the proper
154 # ancestors of missing
154 # ancestors of missing
155 og._computecommonmissing()
155 og._computecommonmissing()
156 cl = repo.changelog
156 cl = repo.changelog
157 missingrevs = set(cl.rev(n) for n in og._missing)
157 missingrevs = set(cl.rev(n) for n in og._missing)
158 og._common = set(cl.ancestors(missingrevs)) - missingrevs
158 og._common = set(cl.ancestors(missingrevs)) - missingrevs
159 commonheads = set(og.commonheads)
159 commonheads = set(og.commonheads)
160 og.missingheads = [h for h in og.missingheads if h not in commonheads]
160 og.missingheads = [h for h in og.missingheads if h not in commonheads]
161
161
162 return og
162 return og
163
163
164 def _headssummary(repo, remote, outgoing):
164 def _headssummary(repo, remote, outgoing):
165 """compute a summary of branch and heads status before and after push
165 """compute a summary of branch and heads status before and after push
166
166
167 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
167 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
168
168
169 - branch: the branch name
169 - branch: the branch name
170 - remoteheads: the list of remote heads known locally
170 - remoteheads: the list of remote heads known locally
171 None if the branch is new
171 None if the branch is new
172 - newheads: the new remote heads (known locally) with outgoing pushed
172 - newheads: the new remote heads (known locally) with outgoing pushed
173 - unsyncedheads: the list of remote heads unknown locally.
173 - unsyncedheads: the list of remote heads unknown locally.
174 """
174 """
175 cl = repo.changelog
175 cl = repo.changelog
176 headssum = {}
176 headssum = {}
177 # A. Create set of branches involved in the push.
177 # A. Create set of branches involved in the push.
178 branches = set(repo[n].branch() for n in outgoing.missing)
178 branches = set(repo[n].branch() for n in outgoing.missing)
179 remotemap = remote.branchmap()
179 remotemap = remote.branchmap()
180 newbranches = branches - set(remotemap)
180 newbranches = branches - set(remotemap)
181 branches.difference_update(newbranches)
181 branches.difference_update(newbranches)
182
182
183 # A. register remote heads
183 # A. register remote heads
184 remotebranches = set()
184 remotebranches = set()
185 for branch, heads in remote.branchmap().iteritems():
185 for branch, heads in remote.branchmap().iteritems():
186 remotebranches.add(branch)
186 remotebranches.add(branch)
187 known = []
187 known = []
188 unsynced = []
188 unsynced = []
189 knownnode = cl.hasnode # do not use nodemap until it is filtered
189 knownnode = cl.hasnode # do not use nodemap until it is filtered
190 for h in heads:
190 for h in heads:
191 if knownnode(h):
191 if knownnode(h):
192 known.append(h)
192 known.append(h)
193 else:
193 else:
194 unsynced.append(h)
194 unsynced.append(h)
195 headssum[branch] = (known, list(known), unsynced)
195 headssum[branch] = (known, list(known), unsynced)
196 # B. add new branch data
196 # B. add new branch data
197 missingctx = list(repo[n] for n in outgoing.missing)
197 missingctx = list(repo[n] for n in outgoing.missing)
198 touchedbranches = set()
198 touchedbranches = set()
199 for ctx in missingctx:
199 for ctx in missingctx:
200 branch = ctx.branch()
200 branch = ctx.branch()
201 touchedbranches.add(branch)
201 touchedbranches.add(branch)
202 if branch not in headssum:
202 if branch not in headssum:
203 headssum[branch] = (None, [], [])
203 headssum[branch] = (None, [], [])
204
204
205 # C drop data about untouched branches:
205 # C drop data about untouched branches:
206 for branch in remotebranches - touchedbranches:
206 for branch in remotebranches - touchedbranches:
207 del headssum[branch]
207 del headssum[branch]
208
208
209 # D. Update newmap with outgoing changes.
209 # D. Update newmap with outgoing changes.
210 # This will possibly add new heads and remove existing ones.
210 # This will possibly add new heads and remove existing ones.
211 newmap = branchmap.branchcache((branch, heads[1])
211 newmap = branchmap.branchcache((branch, heads[1])
212 for branch, heads in headssum.iteritems()
212 for branch, heads in headssum.iteritems()
213 if heads[0] is not None)
213 if heads[0] is not None)
214 newmap.update(repo, (ctx.rev() for ctx in missingctx))
214 newmap.update(repo, (ctx.rev() for ctx in missingctx))
215 for branch, newheads in newmap.iteritems():
215 for branch, newheads in newmap.iteritems():
216 headssum[branch][1][:] = newheads
216 headssum[branch][1][:] = newheads
217 return headssum
217 return headssum
218
218
219 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
219 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
220 """Compute branchmapsummary for repo without branchmap support"""
220 """Compute branchmapsummary for repo without branchmap support"""
221
221
222 # 1-4b. old servers: Check for new topological heads.
222 # 1-4b. old servers: Check for new topological heads.
223 # Construct {old,new}map with branch = None (topological branch).
223 # Construct {old,new}map with branch = None (topological branch).
224 # (code based on update)
224 # (code based on update)
225 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
225 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
226 oldheads = set(h for h in remoteheads if knownnode(h))
226 oldheads = set(h for h in remoteheads if knownnode(h))
227 # all nodes in outgoing.missing are children of either:
227 # all nodes in outgoing.missing are children of either:
228 # - an element of oldheads
228 # - an element of oldheads
229 # - another element of outgoing.missing
229 # - another element of outgoing.missing
230 # - nullrev
230 # - nullrev
231 # This explains why the new head are very simple to compute.
231 # This explains why the new head are very simple to compute.
232 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
232 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
233 newheads = list(c.node() for c in r)
233 newheads = list(c.node() for c in r)
234 # set some unsynced head to issue the "unsynced changes" warning
234 # set some unsynced head to issue the "unsynced changes" warning
235 if inc:
235 if inc:
236 unsynced = set([None])
236 unsynced = set([None])
237 else:
237 else:
238 unsynced = set()
238 unsynced = set()
239 return {None: (oldheads, newheads, unsynced)}
239 return {None: (oldheads, newheads, unsynced)}
240
240
241 def _nowarnheads(repo, remote, newbookmarks):
241 def _nowarnheads(pushop):
242 # Compute newly pushed bookmarks. We don't warn about bookmarked heads.
242 # Compute newly pushed bookmarks. We don't warn about bookmarked heads.
243
244 # internal config: bookmarks.pushing
245 newbookmarks = pushop.ui.configlist('bookmarks', 'pushing')
246
247 repo = pushop.repo.unfiltered()
248 remote = pushop.remote
243 localbookmarks = repo._bookmarks
249 localbookmarks = repo._bookmarks
244 remotebookmarks = remote.listkeys('bookmarks')
250 remotebookmarks = remote.listkeys('bookmarks')
245 bookmarkedheads = set()
251 bookmarkedheads = set()
246 for bm in localbookmarks:
252 for bm in localbookmarks:
247 rnode = remotebookmarks.get(bm)
253 rnode = remotebookmarks.get(bm)
248 if rnode and rnode in repo:
254 if rnode and rnode in repo:
249 lctx, rctx = repo[bm], repo[rnode]
255 lctx, rctx = repo[bm], repo[rnode]
250 if bookmarks.validdest(repo, rctx, lctx):
256 if bookmarks.validdest(repo, rctx, lctx):
251 bookmarkedheads.add(lctx.node())
257 bookmarkedheads.add(lctx.node())
252 else:
258 else:
253 if bm in newbookmarks and bm not in remotebookmarks:
259 if bm in newbookmarks and bm not in remotebookmarks:
254 bookmarkedheads.add(repo[bm].node())
260 bookmarkedheads.add(repo[bm].node())
255
261
256 return bookmarkedheads
262 return bookmarkedheads
257
263
258 def checkheads(pushop):
264 def checkheads(pushop):
259 """Check that a push won't add any outgoing head
265 """Check that a push won't add any outgoing head
260
266
261 raise Abort error and display ui message as needed.
267 raise Abort error and display ui message as needed.
262 """
268 """
263
269
264 repo = pushop.repo.unfiltered()
270 repo = pushop.repo.unfiltered()
265 remote = pushop.remote
271 remote = pushop.remote
266 outgoing = pushop.outgoing
272 outgoing = pushop.outgoing
267 remoteheads = pushop.remoteheads
273 remoteheads = pushop.remoteheads
268 newbranch = pushop.newbranch
274 newbranch = pushop.newbranch
269 inc = bool(pushop.incoming)
275 inc = bool(pushop.incoming)
270
276
271 # internal config: bookmarks.pushing
272 newbookmarks = pushop.ui.configlist('bookmarks', 'pushing')
273
274 # Check for each named branch if we're creating new remote heads.
277 # Check for each named branch if we're creating new remote heads.
275 # To be a remote head after push, node must be either:
278 # To be a remote head after push, node must be either:
276 # - unknown locally
279 # - unknown locally
277 # - a local outgoing head descended from update
280 # - a local outgoing head descended from update
278 # - a remote head that's known locally and not
281 # - a remote head that's known locally and not
279 # ancestral to an outgoing head
282 # ancestral to an outgoing head
280 if remoteheads == [nullid]:
283 if remoteheads == [nullid]:
281 # remote is empty, nothing to check.
284 # remote is empty, nothing to check.
282 return
285 return
283
286
284 if remote.capable('branchmap'):
287 if remote.capable('branchmap'):
285 headssum = _headssummary(repo, remote, outgoing)
288 headssum = _headssummary(repo, remote, outgoing)
286 else:
289 else:
287 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
290 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
288 newbranches = [branch for branch, heads in headssum.iteritems()
291 newbranches = [branch for branch, heads in headssum.iteritems()
289 if heads[0] is None]
292 if heads[0] is None]
290 # 1. Check for new branches on the remote.
293 # 1. Check for new branches on the remote.
291 if newbranches and not newbranch: # new branch requires --new-branch
294 if newbranches and not newbranch: # new branch requires --new-branch
292 branchnames = ', '.join(sorted(newbranches))
295 branchnames = ', '.join(sorted(newbranches))
293 raise error.Abort(_("push creates new remote branches: %s!")
296 raise error.Abort(_("push creates new remote branches: %s!")
294 % branchnames,
297 % branchnames,
295 hint=_("use 'hg push --new-branch' to create"
298 hint=_("use 'hg push --new-branch' to create"
296 " new remote branches"))
299 " new remote branches"))
297
300
298 # 2. Find heads that we need not warn about
301 # 2. Find heads that we need not warn about
299 nowarnheads = _nowarnheads(repo, remote, newbookmarks)
302 nowarnheads = _nowarnheads(pushop)
300
303
301 # 3. Check for new heads.
304 # 3. Check for new heads.
302 # If there are more heads after the push than before, a suitable
305 # If there are more heads after the push than before, a suitable
303 # error message, depending on unsynced status, is displayed.
306 # error message, depending on unsynced status, is displayed.
304 errormsg = None
307 errormsg = None
305 # If there is no obsstore, allfuturecommon won't be used, so no
308 # If there is no obsstore, allfuturecommon won't be used, so no
306 # need to compute it.
309 # need to compute it.
307 if repo.obsstore:
310 if repo.obsstore:
308 allmissing = set(outgoing.missing)
311 allmissing = set(outgoing.missing)
309 cctx = repo.set('%ld', outgoing.common)
312 cctx = repo.set('%ld', outgoing.common)
310 allfuturecommon = set(c.node() for c in cctx)
313 allfuturecommon = set(c.node() for c in cctx)
311 allfuturecommon.update(allmissing)
314 allfuturecommon.update(allmissing)
312 for branch, heads in sorted(headssum.iteritems()):
315 for branch, heads in sorted(headssum.iteritems()):
313 remoteheads, newheads, unsyncedheads = heads
316 remoteheads, newheads, unsyncedheads = heads
314 candidate_newhs = set(newheads)
317 candidate_newhs = set(newheads)
315 # add unsynced data
318 # add unsynced data
316 if remoteheads is None:
319 if remoteheads is None:
317 oldhs = set()
320 oldhs = set()
318 else:
321 else:
319 oldhs = set(remoteheads)
322 oldhs = set(remoteheads)
320 oldhs.update(unsyncedheads)
323 oldhs.update(unsyncedheads)
321 candidate_newhs.update(unsyncedheads)
324 candidate_newhs.update(unsyncedheads)
322 dhs = None # delta heads, the new heads on branch
325 dhs = None # delta heads, the new heads on branch
323 discardedheads = set()
326 discardedheads = set()
324 if not repo.obsstore:
327 if not repo.obsstore:
325 newhs = candidate_newhs
328 newhs = candidate_newhs
326 else:
329 else:
327 # remove future heads which are actually obsoleted by another
330 # remove future heads which are actually obsoleted by another
328 # pushed element:
331 # pushed element:
329 #
332 #
330 # XXX as above, There are several cases this code does not handle
333 # XXX as above, There are several cases this code does not handle
331 # XXX properly
334 # XXX properly
332 #
335 #
333 # (1) if <nh> is public, it won't be affected by obsolete marker
336 # (1) if <nh> is public, it won't be affected by obsolete marker
334 # and a new is created
337 # and a new is created
335 #
338 #
336 # (2) if the new heads have ancestors which are not obsolete and
339 # (2) if the new heads have ancestors which are not obsolete and
337 # not ancestors of any other heads we will have a new head too.
340 # not ancestors of any other heads we will have a new head too.
338 #
341 #
339 # These two cases will be easy to handle for known changeset but
342 # These two cases will be easy to handle for known changeset but
340 # much more tricky for unsynced changes.
343 # much more tricky for unsynced changes.
341 #
344 #
342 # In addition, this code is confused by prune as it only looks for
345 # In addition, this code is confused by prune as it only looks for
343 # successors of the heads (none if pruned) leading to issue4354
346 # successors of the heads (none if pruned) leading to issue4354
344 newhs = set()
347 newhs = set()
345 for nh in candidate_newhs:
348 for nh in candidate_newhs:
346 if nh in repo and repo[nh].phase() <= phases.public:
349 if nh in repo and repo[nh].phase() <= phases.public:
347 newhs.add(nh)
350 newhs.add(nh)
348 else:
351 else:
349 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
352 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
350 if suc != nh and suc in allfuturecommon:
353 if suc != nh and suc in allfuturecommon:
351 discardedheads.add(nh)
354 discardedheads.add(nh)
352 break
355 break
353 else:
356 else:
354 newhs.add(nh)
357 newhs.add(nh)
355 unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
358 unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
356 if unsynced:
359 if unsynced:
357 if None in unsynced:
360 if None in unsynced:
358 # old remote, no heads data
361 # old remote, no heads data
359 heads = None
362 heads = None
360 elif len(unsynced) <= 4 or repo.ui.verbose:
363 elif len(unsynced) <= 4 or repo.ui.verbose:
361 heads = ' '.join(short(h) for h in unsynced)
364 heads = ' '.join(short(h) for h in unsynced)
362 else:
365 else:
363 heads = (' '.join(short(h) for h in unsynced[:4]) +
366 heads = (' '.join(short(h) for h in unsynced[:4]) +
364 ' ' + _("and %s others") % (len(unsynced) - 4))
367 ' ' + _("and %s others") % (len(unsynced) - 4))
365 if heads is None:
368 if heads is None:
366 repo.ui.status(_("remote has heads that are "
369 repo.ui.status(_("remote has heads that are "
367 "not known locally\n"))
370 "not known locally\n"))
368 elif branch is None:
371 elif branch is None:
369 repo.ui.status(_("remote has heads that are "
372 repo.ui.status(_("remote has heads that are "
370 "not known locally: %s\n") % heads)
373 "not known locally: %s\n") % heads)
371 else:
374 else:
372 repo.ui.status(_("remote has heads on branch '%s' that are "
375 repo.ui.status(_("remote has heads on branch '%s' that are "
373 "not known locally: %s\n") % (branch, heads))
376 "not known locally: %s\n") % (branch, heads))
374 if remoteheads is None:
377 if remoteheads is None:
375 if len(newhs) > 1:
378 if len(newhs) > 1:
376 dhs = list(newhs)
379 dhs = list(newhs)
377 if errormsg is None:
380 if errormsg is None:
378 errormsg = (_("push creates new branch '%s' "
381 errormsg = (_("push creates new branch '%s' "
379 "with multiple heads") % (branch))
382 "with multiple heads") % (branch))
380 hint = _("merge or"
383 hint = _("merge or"
381 " see \"hg help push\" for details about"
384 " see \"hg help push\" for details about"
382 " pushing new heads")
385 " pushing new heads")
383 elif len(newhs) > len(oldhs):
386 elif len(newhs) > len(oldhs):
384 # remove bookmarked or existing remote heads from the new heads list
387 # remove bookmarked or existing remote heads from the new heads list
385 dhs = sorted(newhs - nowarnheads - oldhs)
388 dhs = sorted(newhs - nowarnheads - oldhs)
386 if dhs:
389 if dhs:
387 if errormsg is None:
390 if errormsg is None:
388 if branch not in ('default', None):
391 if branch not in ('default', None):
389 errormsg = _("push creates new remote head %s "
392 errormsg = _("push creates new remote head %s "
390 "on branch '%s'!") % (short(dhs[0]), branch)
393 "on branch '%s'!") % (short(dhs[0]), branch)
391 elif repo[dhs[0]].bookmarks():
394 elif repo[dhs[0]].bookmarks():
392 errormsg = _("push creates new remote head %s "
395 errormsg = _("push creates new remote head %s "
393 "with bookmark '%s'!") % (
396 "with bookmark '%s'!") % (
394 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
397 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
395 else:
398 else:
396 errormsg = _("push creates new remote head %s!"
399 errormsg = _("push creates new remote head %s!"
397 ) % short(dhs[0])
400 ) % short(dhs[0])
398 if unsyncedheads:
401 if unsyncedheads:
399 hint = _("pull and merge or"
402 hint = _("pull and merge or"
400 " see \"hg help push\" for details about"
403 " see \"hg help push\" for details about"
401 " pushing new heads")
404 " pushing new heads")
402 else:
405 else:
403 hint = _("merge or"
406 hint = _("merge or"
404 " see \"hg help push\" for details about"
407 " see \"hg help push\" for details about"
405 " pushing new heads")
408 " pushing new heads")
406 if branch is None:
409 if branch is None:
407 repo.ui.note(_("new remote heads:\n"))
410 repo.ui.note(_("new remote heads:\n"))
408 else:
411 else:
409 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
412 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
410 for h in dhs:
413 for h in dhs:
411 repo.ui.note((" %s\n") % short(h))
414 repo.ui.note((" %s\n") % short(h))
412 if errormsg:
415 if errormsg:
413 raise error.Abort(errormsg, hint=hint)
416 raise error.Abort(errormsg, hint=hint)
General Comments 0
You need to be logged in to leave comments. Login now