##// END OF EJS Templates
discovery: cleanup of variable names and comments
Mads Kiilerich -
r20381:fff0a71f default
parent child Browse files
Show More
@@ -1,356 +1,356
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, short
9 9 from i18n import _
10 10 import util, setdiscovery, treediscovery, phases, obsolete, bookmarks
11 11 import branchmap
12 12
13 13 def findcommonincoming(repo, remote, heads=None, force=False):
14 14 """Return a tuple (common, anyincoming, heads) used to identify the common
15 15 subset of nodes between repo and remote.
16 16
17 17 "common" is a list of (at least) the heads of the common subset.
18 18 "anyincoming" is testable as a boolean indicating if any nodes are missing
19 19 locally. If remote does not support getbundle, this actually is a list of
20 20 roots of the nodes that would be incoming, to be supplied to
21 21 changegroupsubset. No code except for pull should be relying on this fact
22 22 any longer.
23 23 "heads" is either the supplied heads, or else the remote's heads.
24 24
25 25 If you pass heads and they are all known locally, the response lists just
26 26 these heads in "common" and in "heads".
27 27
28 28 Please use findcommonoutgoing to compute the set of outgoing nodes to give
29 29 extensions a good hook into outgoing.
30 30 """
31 31
32 32 if not remote.capable('getbundle'):
33 33 return treediscovery.findcommonincoming(repo, remote, heads, force)
34 34
35 35 if heads:
36 36 allknown = True
37 37 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
38 38 for h in heads:
39 39 if not knownnode(h):
40 40 allknown = False
41 41 break
42 42 if allknown:
43 43 return (heads, False, heads)
44 44
45 45 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
46 46 abortwhenunrelated=not force)
47 47 common, anyinc, srvheads = res
48 48 return (list(common), anyinc, heads or list(srvheads))
49 49
50 50 class outgoing(object):
51 51 '''Represents the set of nodes present in a local repo but not in a
52 52 (possibly) remote one.
53 53
54 54 Members:
55 55
56 56 missing is a list of all nodes present in local but not in remote.
57 57 common is a list of all nodes shared between the two repos.
58 58 excluded is the list of missing changeset that shouldn't be sent remotely.
59 59 missingheads is the list of heads of missing.
60 60 commonheads is the list of heads of common.
61 61
62 62 The sets are computed on demand from the heads, unless provided upfront
63 63 by discovery.'''
64 64
65 65 def __init__(self, revlog, commonheads, missingheads):
66 66 self.commonheads = commonheads
67 67 self.missingheads = missingheads
68 68 self._revlog = revlog
69 69 self._common = None
70 70 self._missing = None
71 71 self.excluded = []
72 72
73 73 def _computecommonmissing(self):
74 74 sets = self._revlog.findcommonmissing(self.commonheads,
75 75 self.missingheads)
76 76 self._common, self._missing = sets
77 77
78 78 @util.propertycache
79 79 def common(self):
80 80 if self._common is None:
81 81 self._computecommonmissing()
82 82 return self._common
83 83
84 84 @util.propertycache
85 85 def missing(self):
86 86 if self._missing is None:
87 87 self._computecommonmissing()
88 88 return self._missing
89 89
90 90 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
91 91 commoninc=None, portable=False):
92 92 '''Return an outgoing instance to identify the nodes present in repo but
93 93 not in other.
94 94
95 95 If onlyheads is given, only nodes ancestral to nodes in onlyheads
96 96 (inclusive) are included. If you already know the local repo's heads,
97 97 passing them in onlyheads is faster than letting them be recomputed here.
98 98
99 99 If commoninc is given, it must be the result of a prior call to
100 100 findcommonincoming(repo, other, force) to avoid recomputing it here.
101 101
102 102 If portable is given, compute more conservative common and missingheads,
103 103 to make bundles created from the instance more portable.'''
104 104 # declare an empty outgoing object to be filled later
105 105 og = outgoing(repo.changelog, None, None)
106 106
107 107 # get common set if not provided
108 108 if commoninc is None:
109 109 commoninc = findcommonincoming(repo, other, force=force)
110 110 og.commonheads, _any, _hds = commoninc
111 111
112 112 # compute outgoing
113 113 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
114 114 if not mayexclude:
115 115 og.missingheads = onlyheads or repo.heads()
116 116 elif onlyheads is None:
117 117 # use visible heads as it should be cached
118 118 og.missingheads = repo.filtered("served").heads()
119 119 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
120 120 else:
121 121 # compute common, missing and exclude secret stuff
122 122 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
123 123 og._common, allmissing = sets
124 124 og._missing = missing = []
125 125 og.excluded = excluded = []
126 126 for node in allmissing:
127 127 ctx = repo[node]
128 128 if ctx.phase() >= phases.secret or ctx.extinct():
129 129 excluded.append(node)
130 130 else:
131 131 missing.append(node)
132 132 if len(missing) == len(allmissing):
133 133 missingheads = onlyheads
134 134 else: # update missing heads
135 135 missingheads = phases.newheads(repo, onlyheads, excluded)
136 136 og.missingheads = missingheads
137 137 if portable:
138 138 # recompute common and missingheads as if -r<rev> had been given for
139 139 # each head of missing, and --base <rev> for each head of the proper
140 140 # ancestors of missing
141 141 og._computecommonmissing()
142 142 cl = repo.changelog
143 143 missingrevs = set(cl.rev(n) for n in og._missing)
144 144 og._common = set(cl.ancestors(missingrevs)) - missingrevs
145 145 commonheads = set(og.commonheads)
146 146 og.missingheads = [h for h in og.missingheads if h not in commonheads]
147 147
148 148 return og
149 149
150 150 def _headssummary(repo, remote, outgoing):
151 151 """compute a summary of branch and heads status before and after push
152 152
153 153 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
154 154
155 155 - branch: the branch name
156 156 - remoteheads: the list of remote heads known locally
157 None is the branch is new
157 None if the branch is new
158 158 - newheads: the new remote heads (known locally) with outgoing pushed
159 159 - unsyncedheads: the list of remote heads unknown locally.
160 160 """
161 161 cl = repo.changelog
162 162 headssum = {}
163 163 # A. Create set of branches involved in the push.
164 164 branches = set(repo[n].branch() for n in outgoing.missing)
165 165 remotemap = remote.branchmap()
166 166 newbranches = branches - set(remotemap)
167 167 branches.difference_update(newbranches)
168 168
169 169 # A. register remote heads
170 170 remotebranches = set()
171 171 for branch, heads in remote.branchmap().iteritems():
172 172 remotebranches.add(branch)
173 173 known = []
174 174 unsynced = []
175 175 knownnode = cl.hasnode # do not use nodemap until it is filtered
176 176 for h in heads:
177 177 if knownnode(h):
178 178 known.append(h)
179 179 else:
180 180 unsynced.append(h)
181 181 headssum[branch] = (known, list(known), unsynced)
182 182 # B. add new branch data
183 183 missingctx = list(repo[n] for n in outgoing.missing)
184 184 touchedbranches = set()
185 185 for ctx in missingctx:
186 186 branch = ctx.branch()
187 187 touchedbranches.add(branch)
188 188 if branch not in headssum:
189 189 headssum[branch] = (None, [], [])
190 190
191 191 # C drop data about untouched branches:
192 192 for branch in remotebranches - touchedbranches:
193 193 del headssum[branch]
194 194
195 195 # D. Update newmap with outgoing changes.
196 196 # This will possibly add new heads and remove existing ones.
197 197 newmap = branchmap.branchcache((branch, heads[1])
198 198 for branch, heads in headssum.iteritems()
199 199 if heads[0] is not None)
200 200 newmap.update(repo, (ctx.rev() for ctx in missingctx))
201 201 for branch, newheads in newmap.iteritems():
202 202 headssum[branch][1][:] = newheads
203 203 return headssum
204 204
205 205 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
206 206 """Compute branchmapsummary for repo without branchmap support"""
207 207
208 208 # 1-4b. old servers: Check for new topological heads.
209 209 # Construct {old,new}map with branch = None (topological branch).
210 210 # (code based on update)
211 211 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
212 212 oldheads = set(h for h in remoteheads if knownnode(h))
213 213 # all nodes in outgoing.missing are children of either:
214 214 # - an element of oldheads
215 215 # - another element of outgoing.missing
216 216 # - nullrev
217 217 # This explains why the new head are very simple to compute.
218 218 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
219 219 newheads = list(c.node() for c in r)
220 220 unsynced = inc and set([None]) or set()
221 221 return {None: (oldheads, newheads, unsynced)}
222 222
223 223 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False,
224 224 newbookmarks=[]):
225 225 """Check that a push won't add any outgoing head
226 226
227 227 raise Abort error and display ui message as needed.
228 228 """
229 229 # Check for each named branch if we're creating new remote heads.
230 230 # To be a remote head after push, node must be either:
231 231 # - unknown locally
232 232 # - a local outgoing head descended from update
233 233 # - a remote head that's known locally and not
234 234 # ancestral to an outgoing head
235 235 if remoteheads == [nullid]:
236 236 # remote is empty, nothing to check.
237 237 return
238 238
239 239 if remote.capable('branchmap'):
240 240 headssum = _headssummary(repo, remote, outgoing)
241 241 else:
242 242 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
243 243 newbranches = [branch for branch, heads in headssum.iteritems()
244 244 if heads[0] is None]
245 245 # 1. Check for new branches on the remote.
246 246 if newbranches and not newbranch: # new branch requires --new-branch
247 247 branchnames = ', '.join(sorted(newbranches))
248 248 raise util.Abort(_("push creates new remote branches: %s!")
249 249 % branchnames,
250 250 hint=_("use 'hg push --new-branch' to create"
251 251 " new remote branches"))
252 252
253 # 2 compute newly pushed bookmarks. We
254 # we don't warned about bookmarked heads.
253 # 2. Compute newly pushed bookmarks. We don't warn about bookmarked heads.
255 254 localbookmarks = repo._bookmarks
256 255 remotebookmarks = remote.listkeys('bookmarks')
257 256 bookmarkedheads = set()
258 257 for bm in localbookmarks:
259 258 rnode = remotebookmarks.get(bm)
260 259 if rnode and rnode in repo:
261 260 lctx, rctx = repo[bm], repo[rnode]
262 261 if bookmarks.validdest(repo, rctx, lctx):
263 262 bookmarkedheads.add(lctx.node())
264 263 else:
265 264 if bm in newbookmarks:
266 265 bookmarkedheads.add(repo[bm].node())
267 266
268 267 # 3. Check for new heads.
269 268 # If there are more heads after the push than before, a suitable
270 269 # error message, depending on unsynced status, is displayed.
271 270 error = None
272 271 unsynced = False
273 272 allmissing = set(outgoing.missing)
274 273 allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))
275 274 allfuturecommon.update(allmissing)
276 275 for branch, heads in sorted(headssum.iteritems()):
277 candidate_newhs = set(heads[1])
276 remoteheads, newheads, unsyncedheads = heads
277 candidate_newhs = set(newheads)
278 278 # add unsynced data
279 if heads[0] is None:
279 if remoteheads is None:
280 280 oldhs = set()
281 281 else:
282 oldhs = set(heads[0])
283 oldhs.update(heads[2])
284 candidate_newhs.update(heads[2])
285 dhs = None
282 oldhs = set(remoteheads)
283 oldhs.update(unsyncedheads)
284 candidate_newhs.update(unsyncedheads)
285 dhs = None # delta heads, the new heads on branch
286 286 discardedheads = set()
287 287 if repo.obsstore:
288 # remove future heads which are actually obsolete by another
288 # remove future heads which are actually obsoleted by another
289 289 # pushed element:
290 290 #
291 291 # XXX as above, There are several cases this case does not handle
292 292 # XXX properly
293 293 #
294 294 # (1) if <nh> is public, it won't be affected by obsolete marker
295 295 # and a new is created
296 296 #
297 297 # (2) if the new heads have ancestors which are not obsolete and
298 298 # not ancestors of any other heads we will have a new head too.
299 299 #
300 # This two case will be easy to handle for know changeset but much
301 # more tricky for unsynced changes.
300 # These two cases will be easy to handle for known changeset but
301 # much more tricky for unsynced changes.
302 302 newhs = set()
303 303 for nh in candidate_newhs:
304 304 if nh in repo and repo[nh].phase() <= phases.public:
305 305 newhs.add(nh)
306 306 else:
307 307 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
308 308 if suc != nh and suc in allfuturecommon:
309 309 discardedheads.add(nh)
310 310 break
311 311 else:
312 312 newhs.add(nh)
313 313 else:
314 314 newhs = candidate_newhs
315 if [h for h in heads[2] if h not in discardedheads]:
315 if [h for h in unsyncedheads if h not in discardedheads]:
316 316 unsynced = True
317 if heads[0] is None:
318 if 1 < len(newhs):
317 if remoteheads is None:
318 if len(newhs) > 1:
319 319 dhs = list(newhs)
320 320 if error is None:
321 321 error = (_("push creates new branch '%s' "
322 322 "with multiple heads") % (branch))
323 323 hint = _("merge or"
324 324 " see \"hg help push\" for details about"
325 325 " pushing new heads")
326 326 elif len(newhs) > len(oldhs):
327 # strip updates to existing remote heads from the new heads list
327 # remove bookmarked or existing remote heads from the new heads list
328 328 dhs = sorted(newhs - bookmarkedheads - oldhs)
329 329 if dhs:
330 330 if error is None:
331 331 if branch not in ('default', None):
332 332 error = _("push creates new remote head %s "
333 333 "on branch '%s'!") % (short(dhs[0]), branch)
334 334 else:
335 335 error = _("push creates new remote head %s!"
336 336 ) % short(dhs[0])
337 if heads[2]: # unsynced
337 if unsyncedheads:
338 338 hint = _("pull and merge or"
339 339 " see \"hg help push\" for details about"
340 340 " pushing new heads")
341 341 else:
342 342 hint = _("merge or"
343 343 " see \"hg help push\" for details about"
344 344 " pushing new heads")
345 345 if branch is None:
346 346 repo.ui.note(_("new remote heads:\n"))
347 347 else:
348 348 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
349 349 for h in dhs:
350 350 repo.ui.note((" %s\n") % short(h))
351 351 if error:
352 352 raise util.Abort(error, hint=hint)
353 353
354 354 # 6. Check for unsynced changes on involved branches.
355 355 if unsynced:
356 356 repo.ui.warn(_("note: unsynced remote changes!\n"))
General Comments 0
You need to be logged in to leave comments. Login now