##// END OF EJS Templates
discovery: clarify comment
Benoit Boissinot -
r11573:7ecf4a08 default
parent child Browse files
Show More
@@ -1,350 +1,352
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, short
9 9 from i18n import _
10 10 import util, error
11 11
12 12 def findincoming(repo, remote, base=None, heads=None, force=False):
13 13 """Return list of roots of the subsets of missing nodes from remote
14 14
15 15 If base dict is specified, assume that these nodes and their parents
16 16 exist on the remote side and that no child of a node of base exists
17 17 in both remote and repo.
18 18 Furthermore base will be updated to include the nodes that exists
19 19 in repo and remote but no children exists in repo and remote.
20 20 If a list of heads is specified, return only nodes which are heads
21 21 or ancestors of these heads.
22 22
23 23 All the ancestors of base are in repo and in remote.
24 24 All the descendants of the list returned are missing in repo.
25 25 (and so we know that the rest of the nodes are missing in remote, see
26 26 outgoing)
27 27 """
28 28 return findcommonincoming(repo, remote, base, heads, force)[1]
29 29
30 30 def findcommonincoming(repo, remote, base=None, heads=None, force=False):
31 31 """Return a tuple (common, missing roots, heads) used to identify
32 32 missing nodes from remote.
33 33
34 34 If base dict is specified, assume that these nodes and their parents
35 35 exist on the remote side and that no child of a node of base exists
36 36 in both remote and repo.
37 37 Furthermore base will be updated to include the nodes that exists
38 in repo and remote but no children exists in repo and remote.
38 in repo and remote but no children exists in both repo and remote.
39 In other words, base is the set of heads of the DAG resulting from
40 the intersection of the nodes from repo and remote.
39 41 If a list of heads is specified, return only nodes which are heads
40 42 or ancestors of these heads.
41 43
42 44 All the ancestors of base are in repo and in remote.
43 45 """
44 46 m = repo.changelog.nodemap
45 47 search = []
46 48 fetch = set()
47 49 seen = set()
48 50 seenbranch = set()
49 51 if base is None:
50 52 base = {}
51 53
52 54 if not heads:
53 55 heads = remote.heads()
54 56
55 57 if repo.changelog.tip() == nullid:
56 58 base[nullid] = 1
57 59 if heads != [nullid]:
58 60 return [nullid], [nullid], list(heads)
59 61 return [nullid], [], []
60 62
61 63 # assume we're closer to the tip than the root
62 64 # and start by examining the heads
63 65 repo.ui.status(_("searching for changes\n"))
64 66
65 67 unknown = []
66 68 for h in heads:
67 69 if h not in m:
68 70 unknown.append(h)
69 71 else:
70 72 base[h] = 1
71 73
72 74 heads = unknown
73 75 if not unknown:
74 76 return base.keys(), [], []
75 77
76 78 req = set(unknown)
77 79 reqcnt = 0
78 80
79 81 # search through remote branches
80 82 # a 'branch' here is a linear segment of history, with four parts:
81 83 # head, root, first parent, second parent
82 84 # (a branch always has two parents (or none) by definition)
83 85 unknown = remote.branches(unknown)
84 86 while unknown:
85 87 r = []
86 88 while unknown:
87 89 n = unknown.pop(0)
88 90 if n[0] in seen:
89 91 continue
90 92
91 93 repo.ui.debug("examining %s:%s\n"
92 94 % (short(n[0]), short(n[1])))
93 95 if n[0] == nullid: # found the end of the branch
94 96 pass
95 97 elif n in seenbranch:
96 98 repo.ui.debug("branch already found\n")
97 99 continue
98 100 elif n[1] and n[1] in m: # do we know the base?
99 101 repo.ui.debug("found incomplete branch %s:%s\n"
100 102 % (short(n[0]), short(n[1])))
101 103 search.append(n[0:2]) # schedule branch range for scanning
102 104 seenbranch.add(n)
103 105 else:
104 106 if n[1] not in seen and n[1] not in fetch:
105 107 if n[2] in m and n[3] in m:
106 108 repo.ui.debug("found new changeset %s\n" %
107 109 short(n[1]))
108 110 fetch.add(n[1]) # earliest unknown
109 111 for p in n[2:4]:
110 112 if p in m:
111 113 base[p] = 1 # latest known
112 114
113 115 for p in n[2:4]:
114 116 if p not in req and p not in m:
115 117 r.append(p)
116 118 req.add(p)
117 119 seen.add(n[0])
118 120
119 121 if r:
120 122 reqcnt += 1
121 123 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
122 124 repo.ui.debug("request %d: %s\n" %
123 125 (reqcnt, " ".join(map(short, r))))
124 126 for p in xrange(0, len(r), 10):
125 127 for b in remote.branches(r[p:p + 10]):
126 128 repo.ui.debug("received %s:%s\n" %
127 129 (short(b[0]), short(b[1])))
128 130 unknown.append(b)
129 131
130 132 # do binary search on the branches we found
131 133 while search:
132 134 newsearch = []
133 135 reqcnt += 1
134 136 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
135 137 for n, l in zip(search, remote.between(search)):
136 138 l.append(n[1])
137 139 p = n[0]
138 140 f = 1
139 141 for i in l:
140 142 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
141 143 if i in m:
142 144 if f <= 2:
143 145 repo.ui.debug("found new branch changeset %s\n" %
144 146 short(p))
145 147 fetch.add(p)
146 148 base[i] = 1
147 149 else:
148 150 repo.ui.debug("narrowed branch search to %s:%s\n"
149 151 % (short(p), short(i)))
150 152 newsearch.append((p, i))
151 153 break
152 154 p, f = i, f * 2
153 155 search = newsearch
154 156
155 157 # sanity check our fetch list
156 158 for f in fetch:
157 159 if f in m:
158 160 raise error.RepoError(_("already have changeset ")
159 161 + short(f[:4]))
160 162
161 163 if base.keys() == [nullid]:
162 164 if force:
163 165 repo.ui.warn(_("warning: repository is unrelated\n"))
164 166 else:
165 167 raise util.Abort(_("repository is unrelated"))
166 168
167 169 repo.ui.debug("found new changesets starting at " +
168 170 " ".join([short(f) for f in fetch]) + "\n")
169 171
170 172 repo.ui.progress(_('searching'), None)
171 173 repo.ui.debug("%d total queries\n" % reqcnt)
172 174
173 175 return base.keys(), list(fetch), heads
174 176
175 177 def findoutgoing(repo, remote, base=None, heads=None, force=False):
176 178 """Return list of nodes that are roots of subsets not in remote
177 179
178 180 If base dict is specified, assume that these nodes and their parents
179 181 exist on the remote side.
180 182 If a list of heads is specified, return only nodes which are heads
181 183 or ancestors of these heads, and return a second element which
182 184 contains all remote heads which get new children.
183 185 """
184 186 if base is None:
185 187 base = {}
186 188 findincoming(repo, remote, base, heads, force=force)
187 189
188 190 repo.ui.debug("common changesets up to "
189 191 + " ".join(map(short, base.keys())) + "\n")
190 192
191 193 remain = set(repo.changelog.nodemap)
192 194
193 195 # prune everything remote has from the tree
194 196 remain.remove(nullid)
195 197 remove = base.keys()
196 198 while remove:
197 199 n = remove.pop(0)
198 200 if n in remain:
199 201 remain.remove(n)
200 202 for p in repo.changelog.parents(n):
201 203 remove.append(p)
202 204
203 205 # find every node whose parents have been pruned
204 206 subset = []
205 207 # find every remote head that will get new children
206 208 updated_heads = set()
207 209 for n in remain:
208 210 p1, p2 = repo.changelog.parents(n)
209 211 if p1 not in remain and p2 not in remain:
210 212 subset.append(n)
211 213 if heads:
212 214 if p1 in heads:
213 215 updated_heads.add(p1)
214 216 if p2 in heads:
215 217 updated_heads.add(p2)
216 218
217 219 # this is the set of all roots we have to push
218 220 if heads:
219 221 return subset, list(updated_heads)
220 222 else:
221 223 return subset
222 224
223 225 def prepush(repo, remote, force, revs, newbranch):
224 226 '''Analyze the local and remote repositories and determine which
225 227 changesets need to be pushed to the remote. Return value depends
226 228 on circumstances:
227 229
228 230 If we are not going to push anything, return a tuple (None,
229 231 outgoing) where outgoing is 0 if there are no outgoing
230 232 changesets and 1 if there are, but we refuse to push them
231 233 (e.g. would create new remote heads).
232 234
233 235 Otherwise, return a tuple (changegroup, remoteheads), where
234 236 changegroup is a readable file-like object whose read() returns
235 237 successive changegroup chunks ready to be sent over the wire and
236 238 remoteheads is the list of remote heads.'''
237 239 common = {}
238 240 remote_heads = remote.heads()
239 241 inc = findincoming(repo, remote, common, remote_heads, force=force)
240 242
241 243 cl = repo.changelog
242 244 update, updated_heads = findoutgoing(repo, remote, common, remote_heads)
243 245 outg, bases, heads = cl.nodesbetween(update, revs)
244 246
245 247 if not bases:
246 248 repo.ui.status(_("no changes found\n"))
247 249 return None, 1
248 250
249 251 if not force and remote_heads != [nullid]:
250 252
251 253 def fail_multiple_heads(unsynced, branch=None):
252 254 if branch:
253 255 msg = _("abort: push creates new remote heads"
254 256 " on branch '%s'!\n") % branch
255 257 else:
256 258 msg = _("abort: push creates new remote heads!\n")
257 259 repo.ui.warn(msg)
258 260 if unsynced:
259 261 repo.ui.status(_("(you should pull and merge or"
260 262 " use push -f to force)\n"))
261 263 else:
262 264 repo.ui.status(_("(did you forget to merge?"
263 265 " use push -f to force)\n"))
264 266 return None, 0
265 267
266 268 if remote.capable('branchmap'):
267 269 # Check for each named branch if we're creating new remote heads.
268 270 # To be a remote head after push, node must be either:
269 271 # - unknown locally
270 272 # - a local outgoing head descended from update
271 273 # - a remote head that's known locally and not
272 274 # ancestral to an outgoing head
273 275 #
274 276 # New named branches cannot be created without --force.
275 277
276 278 # 1. Create set of branches involved in the push.
277 279 branches = set(repo[n].branch() for n in outg)
278 280
279 281 # 2. Check for new branches on the remote.
280 282 remotemap = remote.branchmap()
281 283 newbranches = branches - set(remotemap)
282 284 if newbranches and not newbranch: # new branch requires --new-branch
283 285 branchnames = ', '.join(sorted(newbranches))
284 286 repo.ui.warn(_("abort: push creates "
285 287 "new remote branches: %s!\n")
286 288 % branchnames)
287 289 repo.ui.status(_("(use 'hg push --new-branch' to create new "
288 290 "remote branches)\n"))
289 291 return None, 0
290 292 branches.difference_update(newbranches)
291 293
292 294 # 3. Construct the initial oldmap and newmap dicts.
293 295 # They contain information about the remote heads before and
294 296 # after the push, respectively.
295 297 # Heads not found locally are not included in either dict,
296 298 # since they won't be affected by the push.
297 299 # unsynced contains all branches with incoming changesets.
298 300 oldmap = {}
299 301 newmap = {}
300 302 unsynced = set()
301 303 for branch in branches:
302 304 remoteheads = remotemap[branch]
303 305 prunedheads = [h for h in remoteheads if h in cl.nodemap]
304 306 oldmap[branch] = prunedheads
305 307 newmap[branch] = list(prunedheads)
306 308 if len(remoteheads) > len(prunedheads):
307 309 unsynced.add(branch)
308 310
309 311 # 4. Update newmap with outgoing changes.
310 312 # This will possibly add new heads and remove existing ones.
311 313 ctxgen = (repo[n] for n in outg)
312 314 repo._updatebranchcache(newmap, ctxgen)
313 315
314 316 # 5. Check for new heads.
315 317 # If there are more heads after the push than before, a suitable
316 318 # warning, depending on unsynced status, is displayed.
317 319 for branch in branches:
318 320 if len(newmap[branch]) > len(oldmap[branch]):
319 321 return fail_multiple_heads(branch in unsynced, branch)
320 322
321 323 # 6. Check for unsynced changes on involved branches.
322 324 if unsynced:
323 325 repo.ui.warn(_("note: unsynced remote changes!\n"))
324 326
325 327 else:
326 328 # Old servers: Check for new topological heads.
327 329 # Code based on _updatebranchcache.
328 330 newheads = set(h for h in remote_heads if h in cl.nodemap)
329 331 oldheadcnt = len(newheads)
330 332 newheads.update(outg)
331 333 if len(newheads) > 1:
332 334 for latest in reversed(outg):
333 335 if latest not in newheads:
334 336 continue
335 337 minhrev = min(cl.rev(h) for h in newheads)
336 338 reachable = cl.reachable(latest, cl.node(minhrev))
337 339 reachable.remove(latest)
338 340 newheads.difference_update(reachable)
339 341 if len(newheads) > oldheadcnt:
340 342 return fail_multiple_heads(inc)
341 343 if inc:
342 344 repo.ui.warn(_("note: unsynced remote changes!\n"))
343 345
344 346 if revs is None:
345 347 # use the fast path, no race possible on push
346 348 nodes = repo.changelog.findmissing(common.keys())
347 349 cg = repo._changegroup(nodes, 'push')
348 350 else:
349 351 cg = repo.changegroupsubset(update, revs, 'push')
350 352 return cg, remote_heads
General Comments 0
You need to be logged in to leave comments. Login now