##// END OF EJS Templates
discovery: stop using nodemap for membership testing...
Pierre-Yves David -
r20225:d2704c48 default
parent child Browse files
Show More
@@ -1,355 +1,356
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, short
9 9 from i18n import _
10 10 import util, setdiscovery, treediscovery, phases, obsolete, bookmarks
11 11 import branchmap
12 12
13 13 def findcommonincoming(repo, remote, heads=None, force=False):
14 14 """Return a tuple (common, anyincoming, heads) used to identify the common
15 15 subset of nodes between repo and remote.
16 16
17 17 "common" is a list of (at least) the heads of the common subset.
18 18 "anyincoming" is testable as a boolean indicating if any nodes are missing
19 19 locally. If remote does not support getbundle, this actually is a list of
20 20 roots of the nodes that would be incoming, to be supplied to
21 21 changegroupsubset. No code except for pull should be relying on this fact
22 22 any longer.
23 23 "heads" is either the supplied heads, or else the remote's heads.
24 24
25 25 If you pass heads and they are all known locally, the response lists just
26 26 these heads in "common" and in "heads".
27 27
28 28 Please use findcommonoutgoing to compute the set of outgoing nodes to give
29 29 extensions a good hook into outgoing.
30 30 """
31 31
32 32 if not remote.capable('getbundle'):
33 33 return treediscovery.findcommonincoming(repo, remote, heads, force)
34 34
35 35 if heads:
36 36 allknown = True
37 nm = repo.changelog.nodemap
37 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
38 38 for h in heads:
39 if nm.get(h) is None:
39 if not knownnode(h):
40 40 allknown = False
41 41 break
42 42 if allknown:
43 43 return (heads, False, heads)
44 44
45 45 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
46 46 abortwhenunrelated=not force)
47 47 common, anyinc, srvheads = res
48 48 return (list(common), anyinc, heads or list(srvheads))
49 49
50 50 class outgoing(object):
51 51 '''Represents the set of nodes present in a local repo but not in a
52 52 (possibly) remote one.
53 53
54 54 Members:
55 55
56 56 missing is a list of all nodes present in local but not in remote.
57 57 common is a list of all nodes shared between the two repos.
58 58 excluded is the list of missing changeset that shouldn't be sent remotely.
59 59 missingheads is the list of heads of missing.
60 60 commonheads is the list of heads of common.
61 61
62 62 The sets are computed on demand from the heads, unless provided upfront
63 63 by discovery.'''
64 64
65 65 def __init__(self, revlog, commonheads, missingheads):
66 66 self.commonheads = commonheads
67 67 self.missingheads = missingheads
68 68 self._revlog = revlog
69 69 self._common = None
70 70 self._missing = None
71 71 self.excluded = []
72 72
73 73 def _computecommonmissing(self):
74 74 sets = self._revlog.findcommonmissing(self.commonheads,
75 75 self.missingheads)
76 76 self._common, self._missing = sets
77 77
78 78 @util.propertycache
79 79 def common(self):
80 80 if self._common is None:
81 81 self._computecommonmissing()
82 82 return self._common
83 83
84 84 @util.propertycache
85 85 def missing(self):
86 86 if self._missing is None:
87 87 self._computecommonmissing()
88 88 return self._missing
89 89
90 90 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
91 91 commoninc=None, portable=False):
92 92 '''Return an outgoing instance to identify the nodes present in repo but
93 93 not in other.
94 94
95 95 If onlyheads is given, only nodes ancestral to nodes in onlyheads
96 96 (inclusive) are included. If you already know the local repo's heads,
97 97 passing them in onlyheads is faster than letting them be recomputed here.
98 98
99 99 If commoninc is given, it must be the result of a prior call to
100 100 findcommonincoming(repo, other, force) to avoid recomputing it here.
101 101
102 102 If portable is given, compute more conservative common and missingheads,
103 103 to make bundles created from the instance more portable.'''
104 104 # declare an empty outgoing object to be filled later
105 105 og = outgoing(repo.changelog, None, None)
106 106
107 107 # get common set if not provided
108 108 if commoninc is None:
109 109 commoninc = findcommonincoming(repo, other, force=force)
110 110 og.commonheads, _any, _hds = commoninc
111 111
112 112 # compute outgoing
113 113 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
114 114 if not mayexclude:
115 115 og.missingheads = onlyheads or repo.heads()
116 116 elif onlyheads is None:
117 117 # use visible heads as it should be cached
118 118 og.missingheads = repo.filtered("served").heads()
119 119 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
120 120 else:
121 121 # compute common, missing and exclude secret stuff
122 122 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
123 123 og._common, allmissing = sets
124 124 og._missing = missing = []
125 125 og.excluded = excluded = []
126 126 for node in allmissing:
127 127 ctx = repo[node]
128 128 if ctx.phase() >= phases.secret or ctx.extinct():
129 129 excluded.append(node)
130 130 else:
131 131 missing.append(node)
132 132 if len(missing) == len(allmissing):
133 133 missingheads = onlyheads
134 134 else: # update missing heads
135 135 missingheads = phases.newheads(repo, onlyheads, excluded)
136 136 og.missingheads = missingheads
137 137 if portable:
138 138 # recompute common and missingheads as if -r<rev> had been given for
139 139 # each head of missing, and --base <rev> for each head of the proper
140 140 # ancestors of missing
141 141 og._computecommonmissing()
142 142 cl = repo.changelog
143 143 missingrevs = set(cl.rev(n) for n in og._missing)
144 144 og._common = set(cl.ancestors(missingrevs)) - missingrevs
145 145 commonheads = set(og.commonheads)
146 146 og.missingheads = [h for h in og.missingheads if h not in commonheads]
147 147
148 148 return og
149 149
150 150 def _headssummary(repo, remote, outgoing):
151 151 """compute a summary of branch and heads status before and after push
152 152
153 153 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
154 154
155 155 - branch: the branch name
156 156 - remoteheads: the list of remote heads known locally
157 157 None is the branch is new
158 158 - newheads: the new remote heads (known locally) with outgoing pushed
159 159 - unsyncedheads: the list of remote heads unknown locally.
160 160 """
161 161 cl = repo.changelog
162 162 headssum = {}
163 163 # A. Create set of branches involved in the push.
164 164 branches = set(repo[n].branch() for n in outgoing.missing)
165 165 remotemap = remote.branchmap()
166 166 newbranches = branches - set(remotemap)
167 167 branches.difference_update(newbranches)
168 168
169 169 # A. register remote heads
170 170 remotebranches = set()
171 171 for branch, heads in remote.branchmap().iteritems():
172 172 remotebranches.add(branch)
173 173 known = []
174 174 unsynced = []
175 knownnode = cl.hasnode # do not use nodemap until it is filtered
175 176 for h in heads:
176 if h in cl.nodemap:
177 if knownnode(h):
177 178 known.append(h)
178 179 else:
179 180 unsynced.append(h)
180 181 headssum[branch] = (known, list(known), unsynced)
181 182 # B. add new branch data
182 183 missingctx = list(repo[n] for n in outgoing.missing)
183 184 touchedbranches = set()
184 185 for ctx in missingctx:
185 186 branch = ctx.branch()
186 187 touchedbranches.add(branch)
187 188 if branch not in headssum:
188 189 headssum[branch] = (None, [], [])
189 190
190 191 # C drop data about untouched branches:
191 192 for branch in remotebranches - touchedbranches:
192 193 del headssum[branch]
193 194
194 195 # D. Update newmap with outgoing changes.
195 196 # This will possibly add new heads and remove existing ones.
196 197 newmap = branchmap.branchcache((branch, heads[1])
197 198 for branch, heads in headssum.iteritems()
198 199 if heads[0] is not None)
199 200 newmap.update(repo, (ctx.rev() for ctx in missingctx))
200 201 for branch, newheads in newmap.iteritems():
201 202 headssum[branch][1][:] = newheads
202 203 return headssum
203 204
204 205 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
205 206 """Compute branchmapsummary for repo without branchmap support"""
206 207
207 cl = repo.changelog
208 208 # 1-4b. old servers: Check for new topological heads.
209 209 # Construct {old,new}map with branch = None (topological branch).
210 210 # (code based on update)
211 oldheads = set(h for h in remoteheads if h in cl.nodemap)
211 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
212 oldheads = set(h for h in remoteheads if knownnode(h))
212 213 # all nodes in outgoing.missing are children of either:
213 214 # - an element of oldheads
214 215 # - another element of outgoing.missing
215 216 # - nullrev
216 217 # This explains why the new head are very simple to compute.
217 218 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
218 219 newheads = list(c.node() for c in r)
219 220 unsynced = inc and set([None]) or set()
220 221 return {None: (oldheads, newheads, unsynced)}
221 222
222 223 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False,
223 224 newbookmarks=[]):
224 225 """Check that a push won't add any outgoing head
225 226
226 227 raise Abort error and display ui message as needed.
227 228 """
228 229 # Check for each named branch if we're creating new remote heads.
229 230 # To be a remote head after push, node must be either:
230 231 # - unknown locally
231 232 # - a local outgoing head descended from update
232 233 # - a remote head that's known locally and not
233 234 # ancestral to an outgoing head
234 235 if remoteheads == [nullid]:
235 236 # remote is empty, nothing to check.
236 237 return
237 238
238 239 if remote.capable('branchmap'):
239 240 headssum = _headssummary(repo, remote, outgoing)
240 241 else:
241 242 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
242 243 newbranches = [branch for branch, heads in headssum.iteritems()
243 244 if heads[0] is None]
244 245 # 1. Check for new branches on the remote.
245 246 if newbranches and not newbranch: # new branch requires --new-branch
246 247 branchnames = ', '.join(sorted(newbranches))
247 248 raise util.Abort(_("push creates new remote branches: %s!")
248 249 % branchnames,
249 250 hint=_("use 'hg push --new-branch' to create"
250 251 " new remote branches"))
251 252
252 253 # 2 compute newly pushed bookmarks. We
253 254 # we don't warned about bookmarked heads.
254 255 localbookmarks = repo._bookmarks
255 256 remotebookmarks = remote.listkeys('bookmarks')
256 257 bookmarkedheads = set()
257 258 for bm in localbookmarks:
258 259 rnode = remotebookmarks.get(bm)
259 260 if rnode and rnode in repo:
260 261 lctx, rctx = repo[bm], repo[rnode]
261 262 if bookmarks.validdest(repo, rctx, lctx):
262 263 bookmarkedheads.add(lctx.node())
263 264 else:
264 265 if bm in newbookmarks:
265 266 bookmarkedheads.add(repo[bm].node())
266 267
267 268 # 3. Check for new heads.
268 269 # If there are more heads after the push than before, a suitable
269 270 # error message, depending on unsynced status, is displayed.
270 271 error = None
271 272 unsynced = False
272 273 allmissing = set(outgoing.missing)
273 274 allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))
274 275 allfuturecommon.update(allmissing)
275 276 for branch, heads in sorted(headssum.iteritems()):
276 277 candidate_newhs = set(heads[1])
277 278 # add unsynced data
278 279 if heads[0] is None:
279 280 oldhs = set()
280 281 else:
281 282 oldhs = set(heads[0])
282 283 oldhs.update(heads[2])
283 284 candidate_newhs.update(heads[2])
284 285 dhs = None
285 286 discardedheads = set()
286 287 if repo.obsstore:
287 288 # remove future heads which are actually obsolete by another
288 289 # pushed element:
289 290 #
290 291 # XXX as above, There are several cases this case does not handle
291 292 # XXX properly
292 293 #
293 294 # (1) if <nh> is public, it won't be affected by obsolete marker
294 295 # and a new is created
295 296 #
296 297 # (2) if the new heads have ancestors which are not obsolete and
297 298 # not ancestors of any other heads we will have a new head too.
298 299 #
299 300 # This two case will be easy to handle for know changeset but much
300 301 # more tricky for unsynced changes.
301 302 newhs = set()
302 303 for nh in candidate_newhs:
303 304 if nh in repo and repo[nh].phase() <= phases.public:
304 305 newhs.add(nh)
305 306 else:
306 307 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
307 308 if suc != nh and suc in allfuturecommon:
308 309 discardedheads.add(nh)
309 310 break
310 311 else:
311 312 newhs.add(nh)
312 313 else:
313 314 newhs = candidate_newhs
314 315 if [h for h in heads[2] if h not in discardedheads]:
315 316 unsynced = True
316 317 if heads[0] is None:
317 318 if 1 < len(newhs):
318 319 dhs = list(newhs)
319 320 if error is None:
320 321 error = (_("push creates new branch '%s' "
321 322 "with multiple heads") % (branch))
322 323 hint = _("merge or"
323 324 " see \"hg help push\" for details about"
324 325 " pushing new heads")
325 326 elif len(newhs) > len(oldhs):
326 327 # strip updates to existing remote heads from the new heads list
327 328 dhs = sorted(newhs - bookmarkedheads - oldhs)
328 329 if dhs:
329 330 if error is None:
330 331 if branch not in ('default', None):
331 332 error = _("push creates new remote head %s "
332 333 "on branch '%s'!") % (short(dhs[0]), branch)
333 334 else:
334 335 error = _("push creates new remote head %s!"
335 336 ) % short(dhs[0])
336 337 if heads[2]: # unsynced
337 338 hint = _("pull and merge or"
338 339 " see \"hg help push\" for details about"
339 340 " pushing new heads")
340 341 else:
341 342 hint = _("merge or"
342 343 " see \"hg help push\" for details about"
343 344 " pushing new heads")
344 345 if branch is None:
345 346 repo.ui.note(_("new remote heads:\n"))
346 347 else:
347 348 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
348 349 for h in dhs:
349 350 repo.ui.note((" %s\n") % short(h))
350 351 if error:
351 352 raise util.Abort(error, hint=hint)
352 353
353 354 # 6. Check for unsynced changes on involved branches.
354 355 if unsynced:
355 356 repo.ui.warn(_("note: unsynced remote changes!\n"))
@@ -1,150 +1,150
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, short
9 9 from i18n import _
10 10 import util, error
11 11
12 12 def findcommonincoming(repo, remote, heads=None, force=False):
13 13 """Return a tuple (common, fetch, heads) used to identify the common
14 14 subset of nodes between repo and remote.
15 15
16 16 "common" is a list of (at least) the heads of the common subset.
17 17 "fetch" is a list of roots of the nodes that would be incoming, to be
18 18 supplied to changegroupsubset.
19 19 "heads" is either the supplied heads, or else the remote's heads.
20 20 """
21 21
22 m = repo.changelog.nodemap
22 knownnode = repo.changelog.hasnode
23 23 search = []
24 24 fetch = set()
25 25 seen = set()
26 26 seenbranch = set()
27 27 base = set()
28 28
29 29 if not heads:
30 30 heads = remote.heads()
31 31
32 32 if repo.changelog.tip() == nullid:
33 33 base.add(nullid)
34 34 if heads != [nullid]:
35 35 return [nullid], [nullid], list(heads)
36 36 return [nullid], [], heads
37 37
38 38 # assume we're closer to the tip than the root
39 39 # and start by examining the heads
40 40 repo.ui.status(_("searching for changes\n"))
41 41
42 42 unknown = []
43 43 for h in heads:
44 if h not in m:
44 if not knownnode(h):
45 45 unknown.append(h)
46 46 else:
47 47 base.add(h)
48 48
49 49 if not unknown:
50 50 return list(base), [], list(heads)
51 51
52 52 req = set(unknown)
53 53 reqcnt = 0
54 54
55 55 # search through remote branches
56 56 # a 'branch' here is a linear segment of history, with four parts:
57 57 # head, root, first parent, second parent
58 58 # (a branch always has two parents (or none) by definition)
59 59 unknown = util.deque(remote.branches(unknown))
60 60 while unknown:
61 61 r = []
62 62 while unknown:
63 63 n = unknown.popleft()
64 64 if n[0] in seen:
65 65 continue
66 66
67 67 repo.ui.debug("examining %s:%s\n"
68 68 % (short(n[0]), short(n[1])))
69 69 if n[0] == nullid: # found the end of the branch
70 70 pass
71 71 elif n in seenbranch:
72 72 repo.ui.debug("branch already found\n")
73 73 continue
74 elif n[1] and n[1] in m: # do we know the base?
74 elif n[1] and knownnode(n[1]): # do we know the base?
75 75 repo.ui.debug("found incomplete branch %s:%s\n"
76 76 % (short(n[0]), short(n[1])))
77 77 search.append(n[0:2]) # schedule branch range for scanning
78 78 seenbranch.add(n)
79 79 else:
80 80 if n[1] not in seen and n[1] not in fetch:
81 if n[2] in m and n[3] in m:
81 if knownnode(n[2]) and knownnode(n[3]):
82 82 repo.ui.debug("found new changeset %s\n" %
83 83 short(n[1]))
84 84 fetch.add(n[1]) # earliest unknown
85 85 for p in n[2:4]:
86 if p in m:
86 if knownnode(p):
87 87 base.add(p) # latest known
88 88
89 89 for p in n[2:4]:
90 if p not in req and p not in m:
90 if p not in req and not knownnode(p):
91 91 r.append(p)
92 92 req.add(p)
93 93 seen.add(n[0])
94 94
95 95 if r:
96 96 reqcnt += 1
97 97 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
98 98 repo.ui.debug("request %d: %s\n" %
99 99 (reqcnt, " ".join(map(short, r))))
100 100 for p in xrange(0, len(r), 10):
101 101 for b in remote.branches(r[p:p + 10]):
102 102 repo.ui.debug("received %s:%s\n" %
103 103 (short(b[0]), short(b[1])))
104 104 unknown.append(b)
105 105
106 106 # do binary search on the branches we found
107 107 while search:
108 108 newsearch = []
109 109 reqcnt += 1
110 110 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
111 111 for n, l in zip(search, remote.between(search)):
112 112 l.append(n[1])
113 113 p = n[0]
114 114 f = 1
115 115 for i in l:
116 116 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
117 if i in m:
117 if knownnode(i):
118 118 if f <= 2:
119 119 repo.ui.debug("found new branch changeset %s\n" %
120 120 short(p))
121 121 fetch.add(p)
122 122 base.add(i)
123 123 else:
124 124 repo.ui.debug("narrowed branch search to %s:%s\n"
125 125 % (short(p), short(i)))
126 126 newsearch.append((p, i))
127 127 break
128 128 p, f = i, f * 2
129 129 search = newsearch
130 130
131 131 # sanity check our fetch list
132 132 for f in fetch:
133 if f in m:
133 if knownnode(f):
134 134 raise error.RepoError(_("already have changeset ")
135 135 + short(f[:4]))
136 136
137 137 base = list(base)
138 138 if base == [nullid]:
139 139 if force:
140 140 repo.ui.warn(_("warning: repository is unrelated\n"))
141 141 else:
142 142 raise util.Abort(_("repository is unrelated"))
143 143
144 144 repo.ui.debug("found new changesets starting at " +
145 145 " ".join([short(f) for f in fetch]) + "\n")
146 146
147 147 repo.ui.progress(_('searching'), None)
148 148 repo.ui.debug("%d total queries\n" % reqcnt)
149 149
150 150 return base, list(fetch), heads
General Comments 0
You need to be logged in to leave comments. Login now