##// END OF EJS Templates
discovery: rename 'error' to 'errormsg'...
Pierre-Yves David -
r26585:b38d9373 default
parent child Browse files
Show More
@@ -1,393 +1,393
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from .i18n import _
11 11 from .node import (
12 12 nullid,
13 13 short,
14 14 )
15 15
16 16 from . import (
17 17 bookmarks,
18 18 branchmap,
19 19 obsolete,
20 20 phases,
21 21 setdiscovery,
22 22 treediscovery,
23 23 util,
24 24 )
25 25
26 26 def findcommonincoming(repo, remote, heads=None, force=False):
27 27 """Return a tuple (common, anyincoming, heads) used to identify the common
28 28 subset of nodes between repo and remote.
29 29
30 30 "common" is a list of (at least) the heads of the common subset.
31 31 "anyincoming" is testable as a boolean indicating if any nodes are missing
32 32 locally. If remote does not support getbundle, this actually is a list of
33 33 roots of the nodes that would be incoming, to be supplied to
34 34 changegroupsubset. No code except for pull should be relying on this fact
35 35 any longer.
36 36 "heads" is either the supplied heads, or else the remote's heads.
37 37
38 38 If you pass heads and they are all known locally, the response lists just
39 39 these heads in "common" and in "heads".
40 40
41 41 Please use findcommonoutgoing to compute the set of outgoing nodes to give
42 42 extensions a good hook into outgoing.
43 43 """
44 44
45 45 if not remote.capable('getbundle'):
46 46 return treediscovery.findcommonincoming(repo, remote, heads, force)
47 47
48 48 if heads:
49 49 allknown = True
50 50 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
51 51 for h in heads:
52 52 if not knownnode(h):
53 53 allknown = False
54 54 break
55 55 if allknown:
56 56 return (heads, False, heads)
57 57
58 58 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
59 59 abortwhenunrelated=not force)
60 60 common, anyinc, srvheads = res
61 61 return (list(common), anyinc, heads or list(srvheads))
62 62
63 63 class outgoing(object):
64 64 '''Represents the set of nodes present in a local repo but not in a
65 65 (possibly) remote one.
66 66
67 67 Members:
68 68
69 69 missing is a list of all nodes present in local but not in remote.
70 70 common is a list of all nodes shared between the two repos.
71 71 excluded is the list of missing changeset that shouldn't be sent remotely.
72 72 missingheads is the list of heads of missing.
73 73 commonheads is the list of heads of common.
74 74
75 75 The sets are computed on demand from the heads, unless provided upfront
76 76 by discovery.'''
77 77
78 78 def __init__(self, revlog, commonheads, missingheads):
79 79 self.commonheads = commonheads
80 80 self.missingheads = missingheads
81 81 self._revlog = revlog
82 82 self._common = None
83 83 self._missing = None
84 84 self.excluded = []
85 85
86 86 def _computecommonmissing(self):
87 87 sets = self._revlog.findcommonmissing(self.commonheads,
88 88 self.missingheads)
89 89 self._common, self._missing = sets
90 90
91 91 @util.propertycache
92 92 def common(self):
93 93 if self._common is None:
94 94 self._computecommonmissing()
95 95 return self._common
96 96
97 97 @util.propertycache
98 98 def missing(self):
99 99 if self._missing is None:
100 100 self._computecommonmissing()
101 101 return self._missing
102 102
103 103 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
104 104 commoninc=None, portable=False):
105 105 '''Return an outgoing instance to identify the nodes present in repo but
106 106 not in other.
107 107
108 108 If onlyheads is given, only nodes ancestral to nodes in onlyheads
109 109 (inclusive) are included. If you already know the local repo's heads,
110 110 passing them in onlyheads is faster than letting them be recomputed here.
111 111
112 112 If commoninc is given, it must be the result of a prior call to
113 113 findcommonincoming(repo, other, force) to avoid recomputing it here.
114 114
115 115 If portable is given, compute more conservative common and missingheads,
116 116 to make bundles created from the instance more portable.'''
117 117 # declare an empty outgoing object to be filled later
118 118 og = outgoing(repo.changelog, None, None)
119 119
120 120 # get common set if not provided
121 121 if commoninc is None:
122 122 commoninc = findcommonincoming(repo, other, force=force)
123 123 og.commonheads, _any, _hds = commoninc
124 124
125 125 # compute outgoing
126 126 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
127 127 if not mayexclude:
128 128 og.missingheads = onlyheads or repo.heads()
129 129 elif onlyheads is None:
130 130 # use visible heads as it should be cached
131 131 og.missingheads = repo.filtered("served").heads()
132 132 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
133 133 else:
134 134 # compute common, missing and exclude secret stuff
135 135 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
136 136 og._common, allmissing = sets
137 137 og._missing = missing = []
138 138 og.excluded = excluded = []
139 139 for node in allmissing:
140 140 ctx = repo[node]
141 141 if ctx.phase() >= phases.secret or ctx.extinct():
142 142 excluded.append(node)
143 143 else:
144 144 missing.append(node)
145 145 if len(missing) == len(allmissing):
146 146 missingheads = onlyheads
147 147 else: # update missing heads
148 148 missingheads = phases.newheads(repo, onlyheads, excluded)
149 149 og.missingheads = missingheads
150 150 if portable:
151 151 # recompute common and missingheads as if -r<rev> had been given for
152 152 # each head of missing, and --base <rev> for each head of the proper
153 153 # ancestors of missing
154 154 og._computecommonmissing()
155 155 cl = repo.changelog
156 156 missingrevs = set(cl.rev(n) for n in og._missing)
157 157 og._common = set(cl.ancestors(missingrevs)) - missingrevs
158 158 commonheads = set(og.commonheads)
159 159 og.missingheads = [h for h in og.missingheads if h not in commonheads]
160 160
161 161 return og
162 162
163 163 def _headssummary(repo, remote, outgoing):
164 164 """compute a summary of branch and heads status before and after push
165 165
166 166 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
167 167
168 168 - branch: the branch name
169 169 - remoteheads: the list of remote heads known locally
170 170 None if the branch is new
171 171 - newheads: the new remote heads (known locally) with outgoing pushed
172 172 - unsyncedheads: the list of remote heads unknown locally.
173 173 """
174 174 cl = repo.changelog
175 175 headssum = {}
176 176 # A. Create set of branches involved in the push.
177 177 branches = set(repo[n].branch() for n in outgoing.missing)
178 178 remotemap = remote.branchmap()
179 179 newbranches = branches - set(remotemap)
180 180 branches.difference_update(newbranches)
181 181
182 182 # A. register remote heads
183 183 remotebranches = set()
184 184 for branch, heads in remote.branchmap().iteritems():
185 185 remotebranches.add(branch)
186 186 known = []
187 187 unsynced = []
188 188 knownnode = cl.hasnode # do not use nodemap until it is filtered
189 189 for h in heads:
190 190 if knownnode(h):
191 191 known.append(h)
192 192 else:
193 193 unsynced.append(h)
194 194 headssum[branch] = (known, list(known), unsynced)
195 195 # B. add new branch data
196 196 missingctx = list(repo[n] for n in outgoing.missing)
197 197 touchedbranches = set()
198 198 for ctx in missingctx:
199 199 branch = ctx.branch()
200 200 touchedbranches.add(branch)
201 201 if branch not in headssum:
202 202 headssum[branch] = (None, [], [])
203 203
204 204 # C drop data about untouched branches:
205 205 for branch in remotebranches - touchedbranches:
206 206 del headssum[branch]
207 207
208 208 # D. Update newmap with outgoing changes.
209 209 # This will possibly add new heads and remove existing ones.
210 210 newmap = branchmap.branchcache((branch, heads[1])
211 211 for branch, heads in headssum.iteritems()
212 212 if heads[0] is not None)
213 213 newmap.update(repo, (ctx.rev() for ctx in missingctx))
214 214 for branch, newheads in newmap.iteritems():
215 215 headssum[branch][1][:] = newheads
216 216 return headssum
217 217
218 218 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
219 219 """Compute branchmapsummary for repo without branchmap support"""
220 220
221 221 # 1-4b. old servers: Check for new topological heads.
222 222 # Construct {old,new}map with branch = None (topological branch).
223 223 # (code based on update)
224 224 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
225 225 oldheads = set(h for h in remoteheads if knownnode(h))
226 226 # all nodes in outgoing.missing are children of either:
227 227 # - an element of oldheads
228 228 # - another element of outgoing.missing
229 229 # - nullrev
230 230 # This explains why the new head are very simple to compute.
231 231 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
232 232 newheads = list(c.node() for c in r)
233 233 # set some unsynced head to issue the "unsynced changes" warning
234 234 if inc:
235 235 unsynced = set([None])
236 236 else:
237 237 unsynced = set()
238 238 return {None: (oldheads, newheads, unsynced)}
239 239
240 240 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False,
241 241 newbookmarks=[]):
242 242 """Check that a push won't add any outgoing head
243 243
244 244 raise Abort error and display ui message as needed.
245 245 """
246 246 # Check for each named branch if we're creating new remote heads.
247 247 # To be a remote head after push, node must be either:
248 248 # - unknown locally
249 249 # - a local outgoing head descended from update
250 250 # - a remote head that's known locally and not
251 251 # ancestral to an outgoing head
252 252 if remoteheads == [nullid]:
253 253 # remote is empty, nothing to check.
254 254 return
255 255
256 256 if remote.capable('branchmap'):
257 257 headssum = _headssummary(repo, remote, outgoing)
258 258 else:
259 259 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
260 260 newbranches = [branch for branch, heads in headssum.iteritems()
261 261 if heads[0] is None]
262 262 # 1. Check for new branches on the remote.
263 263 if newbranches and not newbranch: # new branch requires --new-branch
264 264 branchnames = ', '.join(sorted(newbranches))
265 265 raise util.Abort(_("push creates new remote branches: %s!")
266 266 % branchnames,
267 267 hint=_("use 'hg push --new-branch' to create"
268 268 " new remote branches"))
269 269
270 270 # 2. Compute newly pushed bookmarks. We don't warn about bookmarked heads.
271 271 localbookmarks = repo._bookmarks
272 272 remotebookmarks = remote.listkeys('bookmarks')
273 273 bookmarkedheads = set()
274 274 for bm in localbookmarks:
275 275 rnode = remotebookmarks.get(bm)
276 276 if rnode and rnode in repo:
277 277 lctx, rctx = repo[bm], repo[rnode]
278 278 if bookmarks.validdest(repo, rctx, lctx):
279 279 bookmarkedheads.add(lctx.node())
280 280 else:
281 281 if bm in newbookmarks:
282 282 bookmarkedheads.add(repo[bm].node())
283 283
284 284 # 3. Check for new heads.
285 285 # If there are more heads after the push than before, a suitable
286 286 # error message, depending on unsynced status, is displayed.
287 error = None
287 errormsg = None
288 288 # If there is no obsstore, allfuturecommon won't be used, so no
289 289 # need to compute it.
290 290 if repo.obsstore:
291 291 allmissing = set(outgoing.missing)
292 292 cctx = repo.set('%ld', outgoing.common)
293 293 allfuturecommon = set(c.node() for c in cctx)
294 294 allfuturecommon.update(allmissing)
295 295 for branch, heads in sorted(headssum.iteritems()):
296 296 remoteheads, newheads, unsyncedheads = heads
297 297 candidate_newhs = set(newheads)
298 298 # add unsynced data
299 299 if remoteheads is None:
300 300 oldhs = set()
301 301 else:
302 302 oldhs = set(remoteheads)
303 303 oldhs.update(unsyncedheads)
304 304 candidate_newhs.update(unsyncedheads)
305 305 dhs = None # delta heads, the new heads on branch
306 306 discardedheads = set()
307 307 if repo.obsstore:
308 308 # remove future heads which are actually obsoleted by another
309 309 # pushed element:
310 310 #
311 311 # XXX as above, There are several cases this case does not handle
312 312 # XXX properly
313 313 #
314 314 # (1) if <nh> is public, it won't be affected by obsolete marker
315 315 # and a new is created
316 316 #
317 317 # (2) if the new heads have ancestors which are not obsolete and
318 318 # not ancestors of any other heads we will have a new head too.
319 319 #
320 320 # These two cases will be easy to handle for known changeset but
321 321 # much more tricky for unsynced changes.
322 322 newhs = set()
323 323 for nh in candidate_newhs:
324 324 if nh in repo and repo[nh].phase() <= phases.public:
325 325 newhs.add(nh)
326 326 else:
327 327 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
328 328 if suc != nh and suc in allfuturecommon:
329 329 discardedheads.add(nh)
330 330 break
331 331 else:
332 332 newhs.add(nh)
333 333 else:
334 334 newhs = candidate_newhs
335 335 unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
336 336 if unsynced:
337 337 if None in unsynced:
338 338 # old remote, no heads data
339 339 heads = None
340 340 elif len(unsynced) <= 4 or repo.ui.verbose:
341 341 heads = ' '.join(short(h) for h in unsynced)
342 342 else:
343 343 heads = (' '.join(short(h) for h in unsynced[:4]) +
344 344 ' ' + _("and %s others") % (len(unsynced) - 4))
345 345 if heads is None:
346 346 repo.ui.status(_("remote has heads that are "
347 347 "not known locally\n"))
348 348 elif branch is None:
349 349 repo.ui.status(_("remote has heads that are "
350 350 "not known locally: %s\n") % heads)
351 351 else:
352 352 repo.ui.status(_("remote has heads on branch '%s' that are "
353 353 "not known locally: %s\n") % (branch, heads))
354 354 if remoteheads is None:
355 355 if len(newhs) > 1:
356 356 dhs = list(newhs)
357 if error is None:
358 error = (_("push creates new branch '%s' "
359 "with multiple heads") % (branch))
357 if errormsg is None:
358 errormsg = (_("push creates new branch '%s' "
359 "with multiple heads") % (branch))
360 360 hint = _("merge or"
361 361 " see \"hg help push\" for details about"
362 362 " pushing new heads")
363 363 elif len(newhs) > len(oldhs):
364 364 # remove bookmarked or existing remote heads from the new heads list
365 365 dhs = sorted(newhs - bookmarkedheads - oldhs)
366 366 if dhs:
367 if error is None:
367 if errormsg is None:
368 368 if branch not in ('default', None):
369 error = _("push creates new remote head %s "
370 "on branch '%s'!") % (short(dhs[0]), branch)
369 errormsg = _("push creates new remote head %s "
370 "on branch '%s'!") % (short(dhs[0]), branch)
371 371 elif repo[dhs[0]].bookmarks():
372 error = _("push creates new remote head %s "
373 "with bookmark '%s'!") % (
374 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
372 errormsg = _("push creates new remote head %s "
373 "with bookmark '%s'!") % (
374 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
375 375 else:
376 error = _("push creates new remote head %s!"
377 ) % short(dhs[0])
376 errormsg = _("push creates new remote head %s!"
377 ) % short(dhs[0])
378 378 if unsyncedheads:
379 379 hint = _("pull and merge or"
380 380 " see \"hg help push\" for details about"
381 381 " pushing new heads")
382 382 else:
383 383 hint = _("merge or"
384 384 " see \"hg help push\" for details about"
385 385 " pushing new heads")
386 386 if branch is None:
387 387 repo.ui.note(_("new remote heads:\n"))
388 388 else:
389 389 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
390 390 for h in dhs:
391 391 repo.ui.note((" %s\n") % short(h))
392 if error:
393 raise util.Abort(error, hint=hint)
392 if errormsg:
393 raise util.Abort(errormsg, hint=hint)
General Comments 0
You need to be logged in to leave comments. Login now