##// END OF EJS Templates
discovery: factor out calculation of heads to not warn about...
Ryan McElroy -
r26862:894f54d8 default
parent child Browse files
Show More
@@ -1,397 +1,403 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 nullid,
12 nullid,
13 short,
13 short,
14 )
14 )
15
15
16 from . import (
16 from . import (
17 bookmarks,
17 bookmarks,
18 branchmap,
18 branchmap,
19 error,
19 error,
20 obsolete,
20 obsolete,
21 phases,
21 phases,
22 setdiscovery,
22 setdiscovery,
23 treediscovery,
23 treediscovery,
24 util,
24 util,
25 )
25 )
26
26
27 def findcommonincoming(repo, remote, heads=None, force=False):
27 def findcommonincoming(repo, remote, heads=None, force=False):
28 """Return a tuple (common, anyincoming, heads) used to identify the common
28 """Return a tuple (common, anyincoming, heads) used to identify the common
29 subset of nodes between repo and remote.
29 subset of nodes between repo and remote.
30
30
31 "common" is a list of (at least) the heads of the common subset.
31 "common" is a list of (at least) the heads of the common subset.
32 "anyincoming" is testable as a boolean indicating if any nodes are missing
32 "anyincoming" is testable as a boolean indicating if any nodes are missing
33 locally. If remote does not support getbundle, this actually is a list of
33 locally. If remote does not support getbundle, this actually is a list of
34 roots of the nodes that would be incoming, to be supplied to
34 roots of the nodes that would be incoming, to be supplied to
35 changegroupsubset. No code except for pull should be relying on this fact
35 changegroupsubset. No code except for pull should be relying on this fact
36 any longer.
36 any longer.
37 "heads" is either the supplied heads, or else the remote's heads.
37 "heads" is either the supplied heads, or else the remote's heads.
38
38
39 If you pass heads and they are all known locally, the response lists just
39 If you pass heads and they are all known locally, the response lists just
40 these heads in "common" and in "heads".
40 these heads in "common" and in "heads".
41
41
42 Please use findcommonoutgoing to compute the set of outgoing nodes to give
42 Please use findcommonoutgoing to compute the set of outgoing nodes to give
43 extensions a good hook into outgoing.
43 extensions a good hook into outgoing.
44 """
44 """
45
45
46 if not remote.capable('getbundle'):
46 if not remote.capable('getbundle'):
47 return treediscovery.findcommonincoming(repo, remote, heads, force)
47 return treediscovery.findcommonincoming(repo, remote, heads, force)
48
48
49 if heads:
49 if heads:
50 allknown = True
50 allknown = True
51 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
51 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
52 for h in heads:
52 for h in heads:
53 if not knownnode(h):
53 if not knownnode(h):
54 allknown = False
54 allknown = False
55 break
55 break
56 if allknown:
56 if allknown:
57 return (heads, False, heads)
57 return (heads, False, heads)
58
58
59 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
59 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
60 abortwhenunrelated=not force)
60 abortwhenunrelated=not force)
61 common, anyinc, srvheads = res
61 common, anyinc, srvheads = res
62 return (list(common), anyinc, heads or list(srvheads))
62 return (list(common), anyinc, heads or list(srvheads))
63
63
64 class outgoing(object):
64 class outgoing(object):
65 '''Represents the set of nodes present in a local repo but not in a
65 '''Represents the set of nodes present in a local repo but not in a
66 (possibly) remote one.
66 (possibly) remote one.
67
67
68 Members:
68 Members:
69
69
70 missing is a list of all nodes present in local but not in remote.
70 missing is a list of all nodes present in local but not in remote.
71 common is a list of all nodes shared between the two repos.
71 common is a list of all nodes shared between the two repos.
72 excluded is the list of missing changeset that shouldn't be sent remotely.
72 excluded is the list of missing changeset that shouldn't be sent remotely.
73 missingheads is the list of heads of missing.
73 missingheads is the list of heads of missing.
74 commonheads is the list of heads of common.
74 commonheads is the list of heads of common.
75
75
76 The sets are computed on demand from the heads, unless provided upfront
76 The sets are computed on demand from the heads, unless provided upfront
77 by discovery.'''
77 by discovery.'''
78
78
79 def __init__(self, revlog, commonheads, missingheads):
79 def __init__(self, revlog, commonheads, missingheads):
80 self.commonheads = commonheads
80 self.commonheads = commonheads
81 self.missingheads = missingheads
81 self.missingheads = missingheads
82 self._revlog = revlog
82 self._revlog = revlog
83 self._common = None
83 self._common = None
84 self._missing = None
84 self._missing = None
85 self.excluded = []
85 self.excluded = []
86
86
87 def _computecommonmissing(self):
87 def _computecommonmissing(self):
88 sets = self._revlog.findcommonmissing(self.commonheads,
88 sets = self._revlog.findcommonmissing(self.commonheads,
89 self.missingheads)
89 self.missingheads)
90 self._common, self._missing = sets
90 self._common, self._missing = sets
91
91
92 @util.propertycache
92 @util.propertycache
93 def common(self):
93 def common(self):
94 if self._common is None:
94 if self._common is None:
95 self._computecommonmissing()
95 self._computecommonmissing()
96 return self._common
96 return self._common
97
97
98 @util.propertycache
98 @util.propertycache
99 def missing(self):
99 def missing(self):
100 if self._missing is None:
100 if self._missing is None:
101 self._computecommonmissing()
101 self._computecommonmissing()
102 return self._missing
102 return self._missing
103
103
104 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
104 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
105 commoninc=None, portable=False):
105 commoninc=None, portable=False):
106 '''Return an outgoing instance to identify the nodes present in repo but
106 '''Return an outgoing instance to identify the nodes present in repo but
107 not in other.
107 not in other.
108
108
109 If onlyheads is given, only nodes ancestral to nodes in onlyheads
109 If onlyheads is given, only nodes ancestral to nodes in onlyheads
110 (inclusive) are included. If you already know the local repo's heads,
110 (inclusive) are included. If you already know the local repo's heads,
111 passing them in onlyheads is faster than letting them be recomputed here.
111 passing them in onlyheads is faster than letting them be recomputed here.
112
112
113 If commoninc is given, it must be the result of a prior call to
113 If commoninc is given, it must be the result of a prior call to
114 findcommonincoming(repo, other, force) to avoid recomputing it here.
114 findcommonincoming(repo, other, force) to avoid recomputing it here.
115
115
116 If portable is given, compute more conservative common and missingheads,
116 If portable is given, compute more conservative common and missingheads,
117 to make bundles created from the instance more portable.'''
117 to make bundles created from the instance more portable.'''
118 # declare an empty outgoing object to be filled later
118 # declare an empty outgoing object to be filled later
119 og = outgoing(repo.changelog, None, None)
119 og = outgoing(repo.changelog, None, None)
120
120
121 # get common set if not provided
121 # get common set if not provided
122 if commoninc is None:
122 if commoninc is None:
123 commoninc = findcommonincoming(repo, other, force=force)
123 commoninc = findcommonincoming(repo, other, force=force)
124 og.commonheads, _any, _hds = commoninc
124 og.commonheads, _any, _hds = commoninc
125
125
126 # compute outgoing
126 # compute outgoing
127 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
127 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
128 if not mayexclude:
128 if not mayexclude:
129 og.missingheads = onlyheads or repo.heads()
129 og.missingheads = onlyheads or repo.heads()
130 elif onlyheads is None:
130 elif onlyheads is None:
131 # use visible heads as it should be cached
131 # use visible heads as it should be cached
132 og.missingheads = repo.filtered("served").heads()
132 og.missingheads = repo.filtered("served").heads()
133 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
133 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
134 else:
134 else:
135 # compute common, missing and exclude secret stuff
135 # compute common, missing and exclude secret stuff
136 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
136 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
137 og._common, allmissing = sets
137 og._common, allmissing = sets
138 og._missing = missing = []
138 og._missing = missing = []
139 og.excluded = excluded = []
139 og.excluded = excluded = []
140 for node in allmissing:
140 for node in allmissing:
141 ctx = repo[node]
141 ctx = repo[node]
142 if ctx.phase() >= phases.secret or ctx.extinct():
142 if ctx.phase() >= phases.secret or ctx.extinct():
143 excluded.append(node)
143 excluded.append(node)
144 else:
144 else:
145 missing.append(node)
145 missing.append(node)
146 if len(missing) == len(allmissing):
146 if len(missing) == len(allmissing):
147 missingheads = onlyheads
147 missingheads = onlyheads
148 else: # update missing heads
148 else: # update missing heads
149 missingheads = phases.newheads(repo, onlyheads, excluded)
149 missingheads = phases.newheads(repo, onlyheads, excluded)
150 og.missingheads = missingheads
150 og.missingheads = missingheads
151 if portable:
151 if portable:
152 # recompute common and missingheads as if -r<rev> had been given for
152 # recompute common and missingheads as if -r<rev> had been given for
153 # each head of missing, and --base <rev> for each head of the proper
153 # each head of missing, and --base <rev> for each head of the proper
154 # ancestors of missing
154 # ancestors of missing
155 og._computecommonmissing()
155 og._computecommonmissing()
156 cl = repo.changelog
156 cl = repo.changelog
157 missingrevs = set(cl.rev(n) for n in og._missing)
157 missingrevs = set(cl.rev(n) for n in og._missing)
158 og._common = set(cl.ancestors(missingrevs)) - missingrevs
158 og._common = set(cl.ancestors(missingrevs)) - missingrevs
159 commonheads = set(og.commonheads)
159 commonheads = set(og.commonheads)
160 og.missingheads = [h for h in og.missingheads if h not in commonheads]
160 og.missingheads = [h for h in og.missingheads if h not in commonheads]
161
161
162 return og
162 return og
163
163
164 def _headssummary(repo, remote, outgoing):
164 def _headssummary(repo, remote, outgoing):
165 """compute a summary of branch and heads status before and after push
165 """compute a summary of branch and heads status before and after push
166
166
167 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
167 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
168
168
169 - branch: the branch name
169 - branch: the branch name
170 - remoteheads: the list of remote heads known locally
170 - remoteheads: the list of remote heads known locally
171 None if the branch is new
171 None if the branch is new
172 - newheads: the new remote heads (known locally) with outgoing pushed
172 - newheads: the new remote heads (known locally) with outgoing pushed
173 - unsyncedheads: the list of remote heads unknown locally.
173 - unsyncedheads: the list of remote heads unknown locally.
174 """
174 """
175 cl = repo.changelog
175 cl = repo.changelog
176 headssum = {}
176 headssum = {}
177 # A. Create set of branches involved in the push.
177 # A. Create set of branches involved in the push.
178 branches = set(repo[n].branch() for n in outgoing.missing)
178 branches = set(repo[n].branch() for n in outgoing.missing)
179 remotemap = remote.branchmap()
179 remotemap = remote.branchmap()
180 newbranches = branches - set(remotemap)
180 newbranches = branches - set(remotemap)
181 branches.difference_update(newbranches)
181 branches.difference_update(newbranches)
182
182
183 # A. register remote heads
183 # A. register remote heads
184 remotebranches = set()
184 remotebranches = set()
185 for branch, heads in remote.branchmap().iteritems():
185 for branch, heads in remote.branchmap().iteritems():
186 remotebranches.add(branch)
186 remotebranches.add(branch)
187 known = []
187 known = []
188 unsynced = []
188 unsynced = []
189 knownnode = cl.hasnode # do not use nodemap until it is filtered
189 knownnode = cl.hasnode # do not use nodemap until it is filtered
190 for h in heads:
190 for h in heads:
191 if knownnode(h):
191 if knownnode(h):
192 known.append(h)
192 known.append(h)
193 else:
193 else:
194 unsynced.append(h)
194 unsynced.append(h)
195 headssum[branch] = (known, list(known), unsynced)
195 headssum[branch] = (known, list(known), unsynced)
196 # B. add new branch data
196 # B. add new branch data
197 missingctx = list(repo[n] for n in outgoing.missing)
197 missingctx = list(repo[n] for n in outgoing.missing)
198 touchedbranches = set()
198 touchedbranches = set()
199 for ctx in missingctx:
199 for ctx in missingctx:
200 branch = ctx.branch()
200 branch = ctx.branch()
201 touchedbranches.add(branch)
201 touchedbranches.add(branch)
202 if branch not in headssum:
202 if branch not in headssum:
203 headssum[branch] = (None, [], [])
203 headssum[branch] = (None, [], [])
204
204
205 # C drop data about untouched branches:
205 # C drop data about untouched branches:
206 for branch in remotebranches - touchedbranches:
206 for branch in remotebranches - touchedbranches:
207 del headssum[branch]
207 del headssum[branch]
208
208
209 # D. Update newmap with outgoing changes.
209 # D. Update newmap with outgoing changes.
210 # This will possibly add new heads and remove existing ones.
210 # This will possibly add new heads and remove existing ones.
211 newmap = branchmap.branchcache((branch, heads[1])
211 newmap = branchmap.branchcache((branch, heads[1])
212 for branch, heads in headssum.iteritems()
212 for branch, heads in headssum.iteritems()
213 if heads[0] is not None)
213 if heads[0] is not None)
214 newmap.update(repo, (ctx.rev() for ctx in missingctx))
214 newmap.update(repo, (ctx.rev() for ctx in missingctx))
215 for branch, newheads in newmap.iteritems():
215 for branch, newheads in newmap.iteritems():
216 headssum[branch][1][:] = newheads
216 headssum[branch][1][:] = newheads
217 return headssum
217 return headssum
218
218
219 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
219 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
220 """Compute branchmapsummary for repo without branchmap support"""
220 """Compute branchmapsummary for repo without branchmap support"""
221
221
222 # 1-4b. old servers: Check for new topological heads.
222 # 1-4b. old servers: Check for new topological heads.
223 # Construct {old,new}map with branch = None (topological branch).
223 # Construct {old,new}map with branch = None (topological branch).
224 # (code based on update)
224 # (code based on update)
225 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
225 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
226 oldheads = set(h for h in remoteheads if knownnode(h))
226 oldheads = set(h for h in remoteheads if knownnode(h))
227 # all nodes in outgoing.missing are children of either:
227 # all nodes in outgoing.missing are children of either:
228 # - an element of oldheads
228 # - an element of oldheads
229 # - another element of outgoing.missing
229 # - another element of outgoing.missing
230 # - nullrev
230 # - nullrev
231 # This explains why the new head are very simple to compute.
231 # This explains why the new head are very simple to compute.
232 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
232 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
233 newheads = list(c.node() for c in r)
233 newheads = list(c.node() for c in r)
234 # set some unsynced head to issue the "unsynced changes" warning
234 # set some unsynced head to issue the "unsynced changes" warning
235 if inc:
235 if inc:
236 unsynced = set([None])
236 unsynced = set([None])
237 else:
237 else:
238 unsynced = set()
238 unsynced = set()
239 return {None: (oldheads, newheads, unsynced)}
239 return {None: (oldheads, newheads, unsynced)}
240
240
241 def _nowarnheads(repo, remote, newbookmarks):
242 # Compute newly pushed bookmarks. We don't warn about bookmarked heads.
243 localbookmarks = repo._bookmarks
244 remotebookmarks = remote.listkeys('bookmarks')
245 bookmarkedheads = set()
246 for bm in localbookmarks:
247 rnode = remotebookmarks.get(bm)
248 if rnode and rnode in repo:
249 lctx, rctx = repo[bm], repo[rnode]
250 if bookmarks.validdest(repo, rctx, lctx):
251 bookmarkedheads.add(lctx.node())
252 else:
253 if bm in newbookmarks and bm not in remotebookmarks:
254 bookmarkedheads.add(repo[bm].node())
255
256 return bookmarkedheads
257
241 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False,
258 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False,
242 newbookmarks=[]):
259 newbookmarks=[]):
243 """Check that a push won't add any outgoing head
260 """Check that a push won't add any outgoing head
244
261
245 raise Abort error and display ui message as needed.
262 raise Abort error and display ui message as needed.
246 """
263 """
247 # Check for each named branch if we're creating new remote heads.
264 # Check for each named branch if we're creating new remote heads.
248 # To be a remote head after push, node must be either:
265 # To be a remote head after push, node must be either:
249 # - unknown locally
266 # - unknown locally
250 # - a local outgoing head descended from update
267 # - a local outgoing head descended from update
251 # - a remote head that's known locally and not
268 # - a remote head that's known locally and not
252 # ancestral to an outgoing head
269 # ancestral to an outgoing head
253 if remoteheads == [nullid]:
270 if remoteheads == [nullid]:
254 # remote is empty, nothing to check.
271 # remote is empty, nothing to check.
255 return
272 return
256
273
257 if remote.capable('branchmap'):
274 if remote.capable('branchmap'):
258 headssum = _headssummary(repo, remote, outgoing)
275 headssum = _headssummary(repo, remote, outgoing)
259 else:
276 else:
260 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
277 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
261 newbranches = [branch for branch, heads in headssum.iteritems()
278 newbranches = [branch for branch, heads in headssum.iteritems()
262 if heads[0] is None]
279 if heads[0] is None]
263 # 1. Check for new branches on the remote.
280 # 1. Check for new branches on the remote.
264 if newbranches and not newbranch: # new branch requires --new-branch
281 if newbranches and not newbranch: # new branch requires --new-branch
265 branchnames = ', '.join(sorted(newbranches))
282 branchnames = ', '.join(sorted(newbranches))
266 raise error.Abort(_("push creates new remote branches: %s!")
283 raise error.Abort(_("push creates new remote branches: %s!")
267 % branchnames,
284 % branchnames,
268 hint=_("use 'hg push --new-branch' to create"
285 hint=_("use 'hg push --new-branch' to create"
269 " new remote branches"))
286 " new remote branches"))
270
287
271 # 2. Compute newly pushed bookmarks. We don't warn about bookmarked heads.
288 # 2. Find heads that we need not warn about
272 localbookmarks = repo._bookmarks
289 nowarnheads = _nowarnheads(repo, remote, newbookmarks)
273 remotebookmarks = remote.listkeys('bookmarks')
274 bookmarkedheads = set()
275 for bm in localbookmarks:
276 rnode = remotebookmarks.get(bm)
277 if rnode and rnode in repo:
278 lctx, rctx = repo[bm], repo[rnode]
279 if bookmarks.validdest(repo, rctx, lctx):
280 bookmarkedheads.add(lctx.node())
281 else:
282 if bm in newbookmarks and bm not in remotebookmarks:
283 bookmarkedheads.add(repo[bm].node())
284
290
285 # 3. Check for new heads.
291 # 3. Check for new heads.
286 # If there are more heads after the push than before, a suitable
292 # If there are more heads after the push than before, a suitable
287 # error message, depending on unsynced status, is displayed.
293 # error message, depending on unsynced status, is displayed.
288 errormsg = None
294 errormsg = None
289 # If there is no obsstore, allfuturecommon won't be used, so no
295 # If there is no obsstore, allfuturecommon won't be used, so no
290 # need to compute it.
296 # need to compute it.
291 if repo.obsstore:
297 if repo.obsstore:
292 allmissing = set(outgoing.missing)
298 allmissing = set(outgoing.missing)
293 cctx = repo.set('%ld', outgoing.common)
299 cctx = repo.set('%ld', outgoing.common)
294 allfuturecommon = set(c.node() for c in cctx)
300 allfuturecommon = set(c.node() for c in cctx)
295 allfuturecommon.update(allmissing)
301 allfuturecommon.update(allmissing)
296 for branch, heads in sorted(headssum.iteritems()):
302 for branch, heads in sorted(headssum.iteritems()):
297 remoteheads, newheads, unsyncedheads = heads
303 remoteheads, newheads, unsyncedheads = heads
298 candidate_newhs = set(newheads)
304 candidate_newhs = set(newheads)
299 # add unsynced data
305 # add unsynced data
300 if remoteheads is None:
306 if remoteheads is None:
301 oldhs = set()
307 oldhs = set()
302 else:
308 else:
303 oldhs = set(remoteheads)
309 oldhs = set(remoteheads)
304 oldhs.update(unsyncedheads)
310 oldhs.update(unsyncedheads)
305 candidate_newhs.update(unsyncedheads)
311 candidate_newhs.update(unsyncedheads)
306 dhs = None # delta heads, the new heads on branch
312 dhs = None # delta heads, the new heads on branch
307 discardedheads = set()
313 discardedheads = set()
308 if not repo.obsstore:
314 if not repo.obsstore:
309 newhs = candidate_newhs
315 newhs = candidate_newhs
310 else:
316 else:
311 # remove future heads which are actually obsoleted by another
317 # remove future heads which are actually obsoleted by another
312 # pushed element:
318 # pushed element:
313 #
319 #
314 # XXX as above, There are several cases this code does not handle
320 # XXX as above, There are several cases this code does not handle
315 # XXX properly
321 # XXX properly
316 #
322 #
317 # (1) if <nh> is public, it won't be affected by obsolete marker
323 # (1) if <nh> is public, it won't be affected by obsolete marker
318 # and a new is created
324 # and a new is created
319 #
325 #
320 # (2) if the new heads have ancestors which are not obsolete and
326 # (2) if the new heads have ancestors which are not obsolete and
321 # not ancestors of any other heads we will have a new head too.
327 # not ancestors of any other heads we will have a new head too.
322 #
328 #
323 # These two cases will be easy to handle for known changeset but
329 # These two cases will be easy to handle for known changeset but
324 # much more tricky for unsynced changes.
330 # much more tricky for unsynced changes.
325 #
331 #
326 # In addition, this code is confused by prune as it only looks for
332 # In addition, this code is confused by prune as it only looks for
327 # successors of the heads (none if pruned) leading to issue4354
333 # successors of the heads (none if pruned) leading to issue4354
328 newhs = set()
334 newhs = set()
329 for nh in candidate_newhs:
335 for nh in candidate_newhs:
330 if nh in repo and repo[nh].phase() <= phases.public:
336 if nh in repo and repo[nh].phase() <= phases.public:
331 newhs.add(nh)
337 newhs.add(nh)
332 else:
338 else:
333 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
339 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
334 if suc != nh and suc in allfuturecommon:
340 if suc != nh and suc in allfuturecommon:
335 discardedheads.add(nh)
341 discardedheads.add(nh)
336 break
342 break
337 else:
343 else:
338 newhs.add(nh)
344 newhs.add(nh)
339 unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
345 unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
340 if unsynced:
346 if unsynced:
341 if None in unsynced:
347 if None in unsynced:
342 # old remote, no heads data
348 # old remote, no heads data
343 heads = None
349 heads = None
344 elif len(unsynced) <= 4 or repo.ui.verbose:
350 elif len(unsynced) <= 4 or repo.ui.verbose:
345 heads = ' '.join(short(h) for h in unsynced)
351 heads = ' '.join(short(h) for h in unsynced)
346 else:
352 else:
347 heads = (' '.join(short(h) for h in unsynced[:4]) +
353 heads = (' '.join(short(h) for h in unsynced[:4]) +
348 ' ' + _("and %s others") % (len(unsynced) - 4))
354 ' ' + _("and %s others") % (len(unsynced) - 4))
349 if heads is None:
355 if heads is None:
350 repo.ui.status(_("remote has heads that are "
356 repo.ui.status(_("remote has heads that are "
351 "not known locally\n"))
357 "not known locally\n"))
352 elif branch is None:
358 elif branch is None:
353 repo.ui.status(_("remote has heads that are "
359 repo.ui.status(_("remote has heads that are "
354 "not known locally: %s\n") % heads)
360 "not known locally: %s\n") % heads)
355 else:
361 else:
356 repo.ui.status(_("remote has heads on branch '%s' that are "
362 repo.ui.status(_("remote has heads on branch '%s' that are "
357 "not known locally: %s\n") % (branch, heads))
363 "not known locally: %s\n") % (branch, heads))
358 if remoteheads is None:
364 if remoteheads is None:
359 if len(newhs) > 1:
365 if len(newhs) > 1:
360 dhs = list(newhs)
366 dhs = list(newhs)
361 if errormsg is None:
367 if errormsg is None:
362 errormsg = (_("push creates new branch '%s' "
368 errormsg = (_("push creates new branch '%s' "
363 "with multiple heads") % (branch))
369 "with multiple heads") % (branch))
364 hint = _("merge or"
370 hint = _("merge or"
365 " see \"hg help push\" for details about"
371 " see \"hg help push\" for details about"
366 " pushing new heads")
372 " pushing new heads")
367 elif len(newhs) > len(oldhs):
373 elif len(newhs) > len(oldhs):
368 # remove bookmarked or existing remote heads from the new heads list
374 # remove bookmarked or existing remote heads from the new heads list
369 dhs = sorted(newhs - bookmarkedheads - oldhs)
375 dhs = sorted(newhs - nowarnheads - oldhs)
370 if dhs:
376 if dhs:
371 if errormsg is None:
377 if errormsg is None:
372 if branch not in ('default', None):
378 if branch not in ('default', None):
373 errormsg = _("push creates new remote head %s "
379 errormsg = _("push creates new remote head %s "
374 "on branch '%s'!") % (short(dhs[0]), branch)
380 "on branch '%s'!") % (short(dhs[0]), branch)
375 elif repo[dhs[0]].bookmarks():
381 elif repo[dhs[0]].bookmarks():
376 errormsg = _("push creates new remote head %s "
382 errormsg = _("push creates new remote head %s "
377 "with bookmark '%s'!") % (
383 "with bookmark '%s'!") % (
378 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
384 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
379 else:
385 else:
380 errormsg = _("push creates new remote head %s!"
386 errormsg = _("push creates new remote head %s!"
381 ) % short(dhs[0])
387 ) % short(dhs[0])
382 if unsyncedheads:
388 if unsyncedheads:
383 hint = _("pull and merge or"
389 hint = _("pull and merge or"
384 " see \"hg help push\" for details about"
390 " see \"hg help push\" for details about"
385 " pushing new heads")
391 " pushing new heads")
386 else:
392 else:
387 hint = _("merge or"
393 hint = _("merge or"
388 " see \"hg help push\" for details about"
394 " see \"hg help push\" for details about"
389 " pushing new heads")
395 " pushing new heads")
390 if branch is None:
396 if branch is None:
391 repo.ui.note(_("new remote heads:\n"))
397 repo.ui.note(_("new remote heads:\n"))
392 else:
398 else:
393 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
399 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
394 for h in dhs:
400 for h in dhs:
395 repo.ui.note((" %s\n") % short(h))
401 repo.ui.note((" %s\n") % short(h))
396 if errormsg:
402 if errormsg:
397 raise error.Abort(errormsg, hint=hint)
403 raise error.Abort(errormsg, hint=hint)
General Comments 0
You need to be logged in to leave comments. Login now