##// END OF EJS Templates
discovery: pass pushop to _nowarnheads...
Ryan McElroy -
r26936:d47ac02f default
parent child Browse files
Show More
@@ -1,413 +1,416 b''
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 from .i18n import _
11 11 from .node import (
12 12 nullid,
13 13 short,
14 14 )
15 15
16 16 from . import (
17 17 bookmarks,
18 18 branchmap,
19 19 error,
20 20 obsolete,
21 21 phases,
22 22 setdiscovery,
23 23 treediscovery,
24 24 util,
25 25 )
26 26
27 27 def findcommonincoming(repo, remote, heads=None, force=False):
28 28 """Return a tuple (common, anyincoming, heads) used to identify the common
29 29 subset of nodes between repo and remote.
30 30
31 31 "common" is a list of (at least) the heads of the common subset.
32 32 "anyincoming" is testable as a boolean indicating if any nodes are missing
33 33 locally. If remote does not support getbundle, this actually is a list of
34 34 roots of the nodes that would be incoming, to be supplied to
35 35 changegroupsubset. No code except for pull should be relying on this fact
36 36 any longer.
37 37 "heads" is either the supplied heads, or else the remote's heads.
38 38
39 39 If you pass heads and they are all known locally, the response lists just
40 40 these heads in "common" and in "heads".
41 41
42 42 Please use findcommonoutgoing to compute the set of outgoing nodes to give
43 43 extensions a good hook into outgoing.
44 44 """
45 45
46 46 if not remote.capable('getbundle'):
47 47 return treediscovery.findcommonincoming(repo, remote, heads, force)
48 48
49 49 if heads:
50 50 allknown = True
51 51 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
52 52 for h in heads:
53 53 if not knownnode(h):
54 54 allknown = False
55 55 break
56 56 if allknown:
57 57 return (heads, False, heads)
58 58
59 59 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
60 60 abortwhenunrelated=not force)
61 61 common, anyinc, srvheads = res
62 62 return (list(common), anyinc, heads or list(srvheads))
63 63
64 64 class outgoing(object):
65 65 '''Represents the set of nodes present in a local repo but not in a
66 66 (possibly) remote one.
67 67
68 68 Members:
69 69
70 70 missing is a list of all nodes present in local but not in remote.
71 71 common is a list of all nodes shared between the two repos.
72 72 excluded is the list of missing changeset that shouldn't be sent remotely.
73 73 missingheads is the list of heads of missing.
74 74 commonheads is the list of heads of common.
75 75
76 76 The sets are computed on demand from the heads, unless provided upfront
77 77 by discovery.'''
78 78
79 79 def __init__(self, revlog, commonheads, missingheads):
80 80 self.commonheads = commonheads
81 81 self.missingheads = missingheads
82 82 self._revlog = revlog
83 83 self._common = None
84 84 self._missing = None
85 85 self.excluded = []
86 86
87 87 def _computecommonmissing(self):
88 88 sets = self._revlog.findcommonmissing(self.commonheads,
89 89 self.missingheads)
90 90 self._common, self._missing = sets
91 91
92 92 @util.propertycache
93 93 def common(self):
94 94 if self._common is None:
95 95 self._computecommonmissing()
96 96 return self._common
97 97
98 98 @util.propertycache
99 99 def missing(self):
100 100 if self._missing is None:
101 101 self._computecommonmissing()
102 102 return self._missing
103 103
104 104 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
105 105 commoninc=None, portable=False):
106 106 '''Return an outgoing instance to identify the nodes present in repo but
107 107 not in other.
108 108
109 109 If onlyheads is given, only nodes ancestral to nodes in onlyheads
110 110 (inclusive) are included. If you already know the local repo's heads,
111 111 passing them in onlyheads is faster than letting them be recomputed here.
112 112
113 113 If commoninc is given, it must be the result of a prior call to
114 114 findcommonincoming(repo, other, force) to avoid recomputing it here.
115 115
116 116 If portable is given, compute more conservative common and missingheads,
117 117 to make bundles created from the instance more portable.'''
118 118 # declare an empty outgoing object to be filled later
119 119 og = outgoing(repo.changelog, None, None)
120 120
121 121 # get common set if not provided
122 122 if commoninc is None:
123 123 commoninc = findcommonincoming(repo, other, force=force)
124 124 og.commonheads, _any, _hds = commoninc
125 125
126 126 # compute outgoing
127 127 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
128 128 if not mayexclude:
129 129 og.missingheads = onlyheads or repo.heads()
130 130 elif onlyheads is None:
131 131 # use visible heads as it should be cached
132 132 og.missingheads = repo.filtered("served").heads()
133 133 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
134 134 else:
135 135 # compute common, missing and exclude secret stuff
136 136 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
137 137 og._common, allmissing = sets
138 138 og._missing = missing = []
139 139 og.excluded = excluded = []
140 140 for node in allmissing:
141 141 ctx = repo[node]
142 142 if ctx.phase() >= phases.secret or ctx.extinct():
143 143 excluded.append(node)
144 144 else:
145 145 missing.append(node)
146 146 if len(missing) == len(allmissing):
147 147 missingheads = onlyheads
148 148 else: # update missing heads
149 149 missingheads = phases.newheads(repo, onlyheads, excluded)
150 150 og.missingheads = missingheads
151 151 if portable:
152 152 # recompute common and missingheads as if -r<rev> had been given for
153 153 # each head of missing, and --base <rev> for each head of the proper
154 154 # ancestors of missing
155 155 og._computecommonmissing()
156 156 cl = repo.changelog
157 157 missingrevs = set(cl.rev(n) for n in og._missing)
158 158 og._common = set(cl.ancestors(missingrevs)) - missingrevs
159 159 commonheads = set(og.commonheads)
160 160 og.missingheads = [h for h in og.missingheads if h not in commonheads]
161 161
162 162 return og
163 163
164 164 def _headssummary(repo, remote, outgoing):
165 165 """compute a summary of branch and heads status before and after push
166 166
167 167 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
168 168
169 169 - branch: the branch name
170 170 - remoteheads: the list of remote heads known locally
171 171 None if the branch is new
172 172 - newheads: the new remote heads (known locally) with outgoing pushed
173 173 - unsyncedheads: the list of remote heads unknown locally.
174 174 """
175 175 cl = repo.changelog
176 176 headssum = {}
177 177 # A. Create set of branches involved in the push.
178 178 branches = set(repo[n].branch() for n in outgoing.missing)
179 179 remotemap = remote.branchmap()
180 180 newbranches = branches - set(remotemap)
181 181 branches.difference_update(newbranches)
182 182
183 183 # A. register remote heads
184 184 remotebranches = set()
185 185 for branch, heads in remote.branchmap().iteritems():
186 186 remotebranches.add(branch)
187 187 known = []
188 188 unsynced = []
189 189 knownnode = cl.hasnode # do not use nodemap until it is filtered
190 190 for h in heads:
191 191 if knownnode(h):
192 192 known.append(h)
193 193 else:
194 194 unsynced.append(h)
195 195 headssum[branch] = (known, list(known), unsynced)
196 196 # B. add new branch data
197 197 missingctx = list(repo[n] for n in outgoing.missing)
198 198 touchedbranches = set()
199 199 for ctx in missingctx:
200 200 branch = ctx.branch()
201 201 touchedbranches.add(branch)
202 202 if branch not in headssum:
203 203 headssum[branch] = (None, [], [])
204 204
205 205 # C drop data about untouched branches:
206 206 for branch in remotebranches - touchedbranches:
207 207 del headssum[branch]
208 208
209 209 # D. Update newmap with outgoing changes.
210 210 # This will possibly add new heads and remove existing ones.
211 211 newmap = branchmap.branchcache((branch, heads[1])
212 212 for branch, heads in headssum.iteritems()
213 213 if heads[0] is not None)
214 214 newmap.update(repo, (ctx.rev() for ctx in missingctx))
215 215 for branch, newheads in newmap.iteritems():
216 216 headssum[branch][1][:] = newheads
217 217 return headssum
218 218
219 219 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
220 220 """Compute branchmapsummary for repo without branchmap support"""
221 221
222 222 # 1-4b. old servers: Check for new topological heads.
223 223 # Construct {old,new}map with branch = None (topological branch).
224 224 # (code based on update)
225 225 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
226 226 oldheads = set(h for h in remoteheads if knownnode(h))
227 227 # all nodes in outgoing.missing are children of either:
228 228 # - an element of oldheads
229 229 # - another element of outgoing.missing
230 230 # - nullrev
231 231 # This explains why the new head are very simple to compute.
232 232 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
233 233 newheads = list(c.node() for c in r)
234 234 # set some unsynced head to issue the "unsynced changes" warning
235 235 if inc:
236 236 unsynced = set([None])
237 237 else:
238 238 unsynced = set()
239 239 return {None: (oldheads, newheads, unsynced)}
240 240
241 def _nowarnheads(repo, remote, newbookmarks):
241 def _nowarnheads(pushop):
242 242 # Compute newly pushed bookmarks. We don't warn about bookmarked heads.
243
244 # internal config: bookmarks.pushing
245 newbookmarks = pushop.ui.configlist('bookmarks', 'pushing')
246
247 repo = pushop.repo.unfiltered()
248 remote = pushop.remote
243 249 localbookmarks = repo._bookmarks
244 250 remotebookmarks = remote.listkeys('bookmarks')
245 251 bookmarkedheads = set()
246 252 for bm in localbookmarks:
247 253 rnode = remotebookmarks.get(bm)
248 254 if rnode and rnode in repo:
249 255 lctx, rctx = repo[bm], repo[rnode]
250 256 if bookmarks.validdest(repo, rctx, lctx):
251 257 bookmarkedheads.add(lctx.node())
252 258 else:
253 259 if bm in newbookmarks and bm not in remotebookmarks:
254 260 bookmarkedheads.add(repo[bm].node())
255 261
256 262 return bookmarkedheads
257 263
258 264 def checkheads(pushop):
259 265 """Check that a push won't add any outgoing head
260 266
261 267 raise Abort error and display ui message as needed.
262 268 """
263 269
264 270 repo = pushop.repo.unfiltered()
265 271 remote = pushop.remote
266 272 outgoing = pushop.outgoing
267 273 remoteheads = pushop.remoteheads
268 274 newbranch = pushop.newbranch
269 275 inc = bool(pushop.incoming)
270 276
271 # internal config: bookmarks.pushing
272 newbookmarks = pushop.ui.configlist('bookmarks', 'pushing')
273
274 277 # Check for each named branch if we're creating new remote heads.
275 278 # To be a remote head after push, node must be either:
276 279 # - unknown locally
277 280 # - a local outgoing head descended from update
278 281 # - a remote head that's known locally and not
279 282 # ancestral to an outgoing head
280 283 if remoteheads == [nullid]:
281 284 # remote is empty, nothing to check.
282 285 return
283 286
284 287 if remote.capable('branchmap'):
285 288 headssum = _headssummary(repo, remote, outgoing)
286 289 else:
287 290 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
288 291 newbranches = [branch for branch, heads in headssum.iteritems()
289 292 if heads[0] is None]
290 293 # 1. Check for new branches on the remote.
291 294 if newbranches and not newbranch: # new branch requires --new-branch
292 295 branchnames = ', '.join(sorted(newbranches))
293 296 raise error.Abort(_("push creates new remote branches: %s!")
294 297 % branchnames,
295 298 hint=_("use 'hg push --new-branch' to create"
296 299 " new remote branches"))
297 300
298 301 # 2. Find heads that we need not warn about
299 nowarnheads = _nowarnheads(repo, remote, newbookmarks)
302 nowarnheads = _nowarnheads(pushop)
300 303
301 304 # 3. Check for new heads.
302 305 # If there are more heads after the push than before, a suitable
303 306 # error message, depending on unsynced status, is displayed.
304 307 errormsg = None
305 308 # If there is no obsstore, allfuturecommon won't be used, so no
306 309 # need to compute it.
307 310 if repo.obsstore:
308 311 allmissing = set(outgoing.missing)
309 312 cctx = repo.set('%ld', outgoing.common)
310 313 allfuturecommon = set(c.node() for c in cctx)
311 314 allfuturecommon.update(allmissing)
312 315 for branch, heads in sorted(headssum.iteritems()):
313 316 remoteheads, newheads, unsyncedheads = heads
314 317 candidate_newhs = set(newheads)
315 318 # add unsynced data
316 319 if remoteheads is None:
317 320 oldhs = set()
318 321 else:
319 322 oldhs = set(remoteheads)
320 323 oldhs.update(unsyncedheads)
321 324 candidate_newhs.update(unsyncedheads)
322 325 dhs = None # delta heads, the new heads on branch
323 326 discardedheads = set()
324 327 if not repo.obsstore:
325 328 newhs = candidate_newhs
326 329 else:
327 330 # remove future heads which are actually obsoleted by another
328 331 # pushed element:
329 332 #
330 333 # XXX as above, There are several cases this code does not handle
331 334 # XXX properly
332 335 #
333 336 # (1) if <nh> is public, it won't be affected by obsolete marker
334 337 # and a new is created
335 338 #
336 339 # (2) if the new heads have ancestors which are not obsolete and
337 340 # not ancestors of any other heads we will have a new head too.
338 341 #
339 342 # These two cases will be easy to handle for known changeset but
340 343 # much more tricky for unsynced changes.
341 344 #
342 345 # In addition, this code is confused by prune as it only looks for
343 346 # successors of the heads (none if pruned) leading to issue4354
344 347 newhs = set()
345 348 for nh in candidate_newhs:
346 349 if nh in repo and repo[nh].phase() <= phases.public:
347 350 newhs.add(nh)
348 351 else:
349 352 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
350 353 if suc != nh and suc in allfuturecommon:
351 354 discardedheads.add(nh)
352 355 break
353 356 else:
354 357 newhs.add(nh)
355 358 unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
356 359 if unsynced:
357 360 if None in unsynced:
358 361 # old remote, no heads data
359 362 heads = None
360 363 elif len(unsynced) <= 4 or repo.ui.verbose:
361 364 heads = ' '.join(short(h) for h in unsynced)
362 365 else:
363 366 heads = (' '.join(short(h) for h in unsynced[:4]) +
364 367 ' ' + _("and %s others") % (len(unsynced) - 4))
365 368 if heads is None:
366 369 repo.ui.status(_("remote has heads that are "
367 370 "not known locally\n"))
368 371 elif branch is None:
369 372 repo.ui.status(_("remote has heads that are "
370 373 "not known locally: %s\n") % heads)
371 374 else:
372 375 repo.ui.status(_("remote has heads on branch '%s' that are "
373 376 "not known locally: %s\n") % (branch, heads))
374 377 if remoteheads is None:
375 378 if len(newhs) > 1:
376 379 dhs = list(newhs)
377 380 if errormsg is None:
378 381 errormsg = (_("push creates new branch '%s' "
379 382 "with multiple heads") % (branch))
380 383 hint = _("merge or"
381 384 " see \"hg help push\" for details about"
382 385 " pushing new heads")
383 386 elif len(newhs) > len(oldhs):
384 387 # remove bookmarked or existing remote heads from the new heads list
385 388 dhs = sorted(newhs - nowarnheads - oldhs)
386 389 if dhs:
387 390 if errormsg is None:
388 391 if branch not in ('default', None):
389 392 errormsg = _("push creates new remote head %s "
390 393 "on branch '%s'!") % (short(dhs[0]), branch)
391 394 elif repo[dhs[0]].bookmarks():
392 395 errormsg = _("push creates new remote head %s "
393 396 "with bookmark '%s'!") % (
394 397 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
395 398 else:
396 399 errormsg = _("push creates new remote head %s!"
397 400 ) % short(dhs[0])
398 401 if unsyncedheads:
399 402 hint = _("pull and merge or"
400 403 " see \"hg help push\" for details about"
401 404 " pushing new heads")
402 405 else:
403 406 hint = _("merge or"
404 407 " see \"hg help push\" for details about"
405 408 " pushing new heads")
406 409 if branch is None:
407 410 repo.ui.note(_("new remote heads:\n"))
408 411 else:
409 412 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
410 413 for h in dhs:
411 414 repo.ui.note((" %s\n") % short(h))
412 415 if errormsg:
413 416 raise error.Abort(errormsg, hint=hint)
General Comments 0
You need to be logged in to leave comments. Login now