##// END OF EJS Templates
discovery: use absolute_import
Gregory Szorc -
r25944:337d010f default
parent child Browse files
Show More
@@ -1,380 +1,393 b''
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 from node import nullid, short
9 from i18n import _
10 import util, setdiscovery, treediscovery, phases, obsolete, bookmarks
11 import branchmap
8 from __future__ import absolute_import
9
10 from .i18n import _
11 from .node import (
12 nullid,
13 short,
14 )
15
16 from . import (
17 bookmarks,
18 branchmap,
19 obsolete,
20 phases,
21 setdiscovery,
22 treediscovery,
23 util,
24 )
12 25
13 26 def findcommonincoming(repo, remote, heads=None, force=False):
14 27 """Return a tuple (common, anyincoming, heads) used to identify the common
15 28 subset of nodes between repo and remote.
16 29
17 30 "common" is a list of (at least) the heads of the common subset.
18 31 "anyincoming" is testable as a boolean indicating if any nodes are missing
19 32 locally. If remote does not support getbundle, this actually is a list of
20 33 roots of the nodes that would be incoming, to be supplied to
21 34 changegroupsubset. No code except for pull should be relying on this fact
22 35 any longer.
23 36 "heads" is either the supplied heads, or else the remote's heads.
24 37
25 38 If you pass heads and they are all known locally, the response lists just
26 39 these heads in "common" and in "heads".
27 40
28 41 Please use findcommonoutgoing to compute the set of outgoing nodes to give
29 42 extensions a good hook into outgoing.
30 43 """
31 44
32 45 if not remote.capable('getbundle'):
33 46 return treediscovery.findcommonincoming(repo, remote, heads, force)
34 47
35 48 if heads:
36 49 allknown = True
37 50 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
38 51 for h in heads:
39 52 if not knownnode(h):
40 53 allknown = False
41 54 break
42 55 if allknown:
43 56 return (heads, False, heads)
44 57
45 58 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
46 59 abortwhenunrelated=not force)
47 60 common, anyinc, srvheads = res
48 61 return (list(common), anyinc, heads or list(srvheads))
49 62
50 63 class outgoing(object):
51 64 '''Represents the set of nodes present in a local repo but not in a
52 65 (possibly) remote one.
53 66
54 67 Members:
55 68
56 69 missing is a list of all nodes present in local but not in remote.
57 70 common is a list of all nodes shared between the two repos.
58 71 excluded is the list of missing changeset that shouldn't be sent remotely.
59 72 missingheads is the list of heads of missing.
60 73 commonheads is the list of heads of common.
61 74
62 75 The sets are computed on demand from the heads, unless provided upfront
63 76 by discovery.'''
64 77
65 78 def __init__(self, revlog, commonheads, missingheads):
66 79 self.commonheads = commonheads
67 80 self.missingheads = missingheads
68 81 self._revlog = revlog
69 82 self._common = None
70 83 self._missing = None
71 84 self.excluded = []
72 85
73 86 def _computecommonmissing(self):
74 87 sets = self._revlog.findcommonmissing(self.commonheads,
75 88 self.missingheads)
76 89 self._common, self._missing = sets
77 90
78 91 @util.propertycache
79 92 def common(self):
80 93 if self._common is None:
81 94 self._computecommonmissing()
82 95 return self._common
83 96
84 97 @util.propertycache
85 98 def missing(self):
86 99 if self._missing is None:
87 100 self._computecommonmissing()
88 101 return self._missing
89 102
90 103 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
91 104 commoninc=None, portable=False):
92 105 '''Return an outgoing instance to identify the nodes present in repo but
93 106 not in other.
94 107
95 108 If onlyheads is given, only nodes ancestral to nodes in onlyheads
96 109 (inclusive) are included. If you already know the local repo's heads,
97 110 passing them in onlyheads is faster than letting them be recomputed here.
98 111
99 112 If commoninc is given, it must be the result of a prior call to
100 113 findcommonincoming(repo, other, force) to avoid recomputing it here.
101 114
102 115 If portable is given, compute more conservative common and missingheads,
103 116 to make bundles created from the instance more portable.'''
104 117 # declare an empty outgoing object to be filled later
105 118 og = outgoing(repo.changelog, None, None)
106 119
107 120 # get common set if not provided
108 121 if commoninc is None:
109 122 commoninc = findcommonincoming(repo, other, force=force)
110 123 og.commonheads, _any, _hds = commoninc
111 124
112 125 # compute outgoing
113 126 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
114 127 if not mayexclude:
115 128 og.missingheads = onlyheads or repo.heads()
116 129 elif onlyheads is None:
117 130 # use visible heads as it should be cached
118 131 og.missingheads = repo.filtered("served").heads()
119 132 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
120 133 else:
121 134 # compute common, missing and exclude secret stuff
122 135 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
123 136 og._common, allmissing = sets
124 137 og._missing = missing = []
125 138 og.excluded = excluded = []
126 139 for node in allmissing:
127 140 ctx = repo[node]
128 141 if ctx.phase() >= phases.secret or ctx.extinct():
129 142 excluded.append(node)
130 143 else:
131 144 missing.append(node)
132 145 if len(missing) == len(allmissing):
133 146 missingheads = onlyheads
134 147 else: # update missing heads
135 148 missingheads = phases.newheads(repo, onlyheads, excluded)
136 149 og.missingheads = missingheads
137 150 if portable:
138 151 # recompute common and missingheads as if -r<rev> had been given for
139 152 # each head of missing, and --base <rev> for each head of the proper
140 153 # ancestors of missing
141 154 og._computecommonmissing()
142 155 cl = repo.changelog
143 156 missingrevs = set(cl.rev(n) for n in og._missing)
144 157 og._common = set(cl.ancestors(missingrevs)) - missingrevs
145 158 commonheads = set(og.commonheads)
146 159 og.missingheads = [h for h in og.missingheads if h not in commonheads]
147 160
148 161 return og
149 162
150 163 def _headssummary(repo, remote, outgoing):
151 164 """compute a summary of branch and heads status before and after push
152 165
153 166 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
154 167
155 168 - branch: the branch name
156 169 - remoteheads: the list of remote heads known locally
157 170 None if the branch is new
158 171 - newheads: the new remote heads (known locally) with outgoing pushed
159 172 - unsyncedheads: the list of remote heads unknown locally.
160 173 """
161 174 cl = repo.changelog
162 175 headssum = {}
163 176 # A. Create set of branches involved in the push.
164 177 branches = set(repo[n].branch() for n in outgoing.missing)
165 178 remotemap = remote.branchmap()
166 179 newbranches = branches - set(remotemap)
167 180 branches.difference_update(newbranches)
168 181
169 182 # A. register remote heads
170 183 remotebranches = set()
171 184 for branch, heads in remote.branchmap().iteritems():
172 185 remotebranches.add(branch)
173 186 known = []
174 187 unsynced = []
175 188 knownnode = cl.hasnode # do not use nodemap until it is filtered
176 189 for h in heads:
177 190 if knownnode(h):
178 191 known.append(h)
179 192 else:
180 193 unsynced.append(h)
181 194 headssum[branch] = (known, list(known), unsynced)
182 195 # B. add new branch data
183 196 missingctx = list(repo[n] for n in outgoing.missing)
184 197 touchedbranches = set()
185 198 for ctx in missingctx:
186 199 branch = ctx.branch()
187 200 touchedbranches.add(branch)
188 201 if branch not in headssum:
189 202 headssum[branch] = (None, [], [])
190 203
191 204 # C drop data about untouched branches:
192 205 for branch in remotebranches - touchedbranches:
193 206 del headssum[branch]
194 207
195 208 # D. Update newmap with outgoing changes.
196 209 # This will possibly add new heads and remove existing ones.
197 210 newmap = branchmap.branchcache((branch, heads[1])
198 211 for branch, heads in headssum.iteritems()
199 212 if heads[0] is not None)
200 213 newmap.update(repo, (ctx.rev() for ctx in missingctx))
201 214 for branch, newheads in newmap.iteritems():
202 215 headssum[branch][1][:] = newheads
203 216 return headssum
204 217
205 218 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
206 219 """Compute branchmapsummary for repo without branchmap support"""
207 220
208 221 # 1-4b. old servers: Check for new topological heads.
209 222 # Construct {old,new}map with branch = None (topological branch).
210 223 # (code based on update)
211 224 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
212 225 oldheads = set(h for h in remoteheads if knownnode(h))
213 226 # all nodes in outgoing.missing are children of either:
214 227 # - an element of oldheads
215 228 # - another element of outgoing.missing
216 229 # - nullrev
217 230 # This explains why the new head are very simple to compute.
218 231 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
219 232 newheads = list(c.node() for c in r)
220 233 # set some unsynced head to issue the "unsynced changes" warning
221 234 if inc:
222 235 unsynced = set([None])
223 236 else:
224 237 unsynced = set()
225 238 return {None: (oldheads, newheads, unsynced)}
226 239
227 240 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False,
228 241 newbookmarks=[]):
229 242 """Check that a push won't add any outgoing head
230 243
231 244 raise Abort error and display ui message as needed.
232 245 """
233 246 # Check for each named branch if we're creating new remote heads.
234 247 # To be a remote head after push, node must be either:
235 248 # - unknown locally
236 249 # - a local outgoing head descended from update
237 250 # - a remote head that's known locally and not
238 251 # ancestral to an outgoing head
239 252 if remoteheads == [nullid]:
240 253 # remote is empty, nothing to check.
241 254 return
242 255
243 256 if remote.capable('branchmap'):
244 257 headssum = _headssummary(repo, remote, outgoing)
245 258 else:
246 259 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
247 260 newbranches = [branch for branch, heads in headssum.iteritems()
248 261 if heads[0] is None]
249 262 # 1. Check for new branches on the remote.
250 263 if newbranches and not newbranch: # new branch requires --new-branch
251 264 branchnames = ', '.join(sorted(newbranches))
252 265 raise util.Abort(_("push creates new remote branches: %s!")
253 266 % branchnames,
254 267 hint=_("use 'hg push --new-branch' to create"
255 268 " new remote branches"))
256 269
257 270 # 2. Compute newly pushed bookmarks. We don't warn about bookmarked heads.
258 271 localbookmarks = repo._bookmarks
259 272 remotebookmarks = remote.listkeys('bookmarks')
260 273 bookmarkedheads = set()
261 274 for bm in localbookmarks:
262 275 rnode = remotebookmarks.get(bm)
263 276 if rnode and rnode in repo:
264 277 lctx, rctx = repo[bm], repo[rnode]
265 278 if bookmarks.validdest(repo, rctx, lctx):
266 279 bookmarkedheads.add(lctx.node())
267 280 else:
268 281 if bm in newbookmarks:
269 282 bookmarkedheads.add(repo[bm].node())
270 283
271 284 # 3. Check for new heads.
272 285 # If there are more heads after the push than before, a suitable
273 286 # error message, depending on unsynced status, is displayed.
274 287 error = None
275 288 # If there is no obsstore, allfuturecommon won't be used, so no
276 289 # need to compute it.
277 290 if repo.obsstore:
278 291 allmissing = set(outgoing.missing)
279 292 cctx = repo.set('%ld', outgoing.common)
280 293 allfuturecommon = set(c.node() for c in cctx)
281 294 allfuturecommon.update(allmissing)
282 295 for branch, heads in sorted(headssum.iteritems()):
283 296 remoteheads, newheads, unsyncedheads = heads
284 297 candidate_newhs = set(newheads)
285 298 # add unsynced data
286 299 if remoteheads is None:
287 300 oldhs = set()
288 301 else:
289 302 oldhs = set(remoteheads)
290 303 oldhs.update(unsyncedheads)
291 304 candidate_newhs.update(unsyncedheads)
292 305 dhs = None # delta heads, the new heads on branch
293 306 discardedheads = set()
294 307 if repo.obsstore:
295 308 # remove future heads which are actually obsoleted by another
296 309 # pushed element:
297 310 #
298 311 # XXX as above, There are several cases this case does not handle
299 312 # XXX properly
300 313 #
301 314 # (1) if <nh> is public, it won't be affected by obsolete marker
302 315 # and a new is created
303 316 #
304 317 # (2) if the new heads have ancestors which are not obsolete and
305 318 # not ancestors of any other heads we will have a new head too.
306 319 #
307 320 # These two cases will be easy to handle for known changeset but
308 321 # much more tricky for unsynced changes.
309 322 newhs = set()
310 323 for nh in candidate_newhs:
311 324 if nh in repo and repo[nh].phase() <= phases.public:
312 325 newhs.add(nh)
313 326 else:
314 327 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
315 328 if suc != nh and suc in allfuturecommon:
316 329 discardedheads.add(nh)
317 330 break
318 331 else:
319 332 newhs.add(nh)
320 333 else:
321 334 newhs = candidate_newhs
322 335 unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
323 336 if unsynced:
324 337 if None in unsynced:
325 338 # old remote, no heads data
326 339 heads = None
327 340 elif len(unsynced) <= 4 or repo.ui.verbose:
328 341 heads = ' '.join(short(h) for h in unsynced)
329 342 else:
330 343 heads = (' '.join(short(h) for h in unsynced[:4]) +
331 344 ' ' + _("and %s others") % (len(unsynced) - 4))
332 345 if heads is None:
333 346 repo.ui.status(_("remote has heads that are "
334 347 "not known locally\n"))
335 348 elif branch is None:
336 349 repo.ui.status(_("remote has heads that are "
337 350 "not known locally: %s\n") % heads)
338 351 else:
339 352 repo.ui.status(_("remote has heads on branch '%s' that are "
340 353 "not known locally: %s\n") % (branch, heads))
341 354 if remoteheads is None:
342 355 if len(newhs) > 1:
343 356 dhs = list(newhs)
344 357 if error is None:
345 358 error = (_("push creates new branch '%s' "
346 359 "with multiple heads") % (branch))
347 360 hint = _("merge or"
348 361 " see \"hg help push\" for details about"
349 362 " pushing new heads")
350 363 elif len(newhs) > len(oldhs):
351 364 # remove bookmarked or existing remote heads from the new heads list
352 365 dhs = sorted(newhs - bookmarkedheads - oldhs)
353 366 if dhs:
354 367 if error is None:
355 368 if branch not in ('default', None):
356 369 error = _("push creates new remote head %s "
357 370 "on branch '%s'!") % (short(dhs[0]), branch)
358 371 elif repo[dhs[0]].bookmarks():
359 372 error = _("push creates new remote head %s "
360 373 "with bookmark '%s'!") % (
361 374 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
362 375 else:
363 376 error = _("push creates new remote head %s!"
364 377 ) % short(dhs[0])
365 378 if unsyncedheads:
366 379 hint = _("pull and merge or"
367 380 " see \"hg help push\" for details about"
368 381 " pushing new heads")
369 382 else:
370 383 hint = _("merge or"
371 384 " see \"hg help push\" for details about"
372 385 " pushing new heads")
373 386 if branch is None:
374 387 repo.ui.note(_("new remote heads:\n"))
375 388 else:
376 389 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
377 390 for h in dhs:
378 391 repo.ui.note((" %s\n") % short(h))
379 392 if error:
380 393 raise util.Abort(error, hint=hint)
General Comments 0
You need to be logged in to leave comments. Login now