##// END OF EJS Templates
discovery: clarify comment
Benoit Boissinot -
r11573:7ecf4a08 default
parent child Browse files
Show More
@@ -1,350 +1,352
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, error
10 import util, error
11
11
12 def findincoming(repo, remote, base=None, heads=None, force=False):
12 def findincoming(repo, remote, base=None, heads=None, force=False):
13 """Return list of roots of the subsets of missing nodes from remote
13 """Return list of roots of the subsets of missing nodes from remote
14
14
15 If base dict is specified, assume that these nodes and their parents
15 If base dict is specified, assume that these nodes and their parents
16 exist on the remote side and that no child of a node of base exists
16 exist on the remote side and that no child of a node of base exists
17 in both remote and repo.
17 in both remote and repo.
18 Furthermore base will be updated to include the nodes that exists
18 Furthermore base will be updated to include the nodes that exists
19 in repo and remote but no children exists in repo and remote.
19 in repo and remote but no children exists in repo and remote.
20 If a list of heads is specified, return only nodes which are heads
20 If a list of heads is specified, return only nodes which are heads
21 or ancestors of these heads.
21 or ancestors of these heads.
22
22
23 All the ancestors of base are in repo and in remote.
23 All the ancestors of base are in repo and in remote.
24 All the descendants of the list returned are missing in repo.
24 All the descendants of the list returned are missing in repo.
25 (and so we know that the rest of the nodes are missing in remote, see
25 (and so we know that the rest of the nodes are missing in remote, see
26 outgoing)
26 outgoing)
27 """
27 """
28 return findcommonincoming(repo, remote, base, heads, force)[1]
28 return findcommonincoming(repo, remote, base, heads, force)[1]
29
29
30 def findcommonincoming(repo, remote, base=None, heads=None, force=False):
30 def findcommonincoming(repo, remote, base=None, heads=None, force=False):
31 """Return a tuple (common, missing roots, heads) used to identify
31 """Return a tuple (common, missing roots, heads) used to identify
32 missing nodes from remote.
32 missing nodes from remote.
33
33
34 If base dict is specified, assume that these nodes and their parents
34 If base dict is specified, assume that these nodes and their parents
35 exist on the remote side and that no child of a node of base exists
35 exist on the remote side and that no child of a node of base exists
36 in both remote and repo.
36 in both remote and repo.
37 Furthermore base will be updated to include the nodes that exists
37 Furthermore base will be updated to include the nodes that exists
38 in repo and remote but no children exists in repo and remote.
38 in repo and remote but no children exists in both repo and remote.
39 In other words, base is the set of heads of the DAG resulting from
40 the intersection of the nodes from repo and remote.
39 If a list of heads is specified, return only nodes which are heads
41 If a list of heads is specified, return only nodes which are heads
40 or ancestors of these heads.
42 or ancestors of these heads.
41
43
42 All the ancestors of base are in repo and in remote.
44 All the ancestors of base are in repo and in remote.
43 """
45 """
44 m = repo.changelog.nodemap
46 m = repo.changelog.nodemap
45 search = []
47 search = []
46 fetch = set()
48 fetch = set()
47 seen = set()
49 seen = set()
48 seenbranch = set()
50 seenbranch = set()
49 if base is None:
51 if base is None:
50 base = {}
52 base = {}
51
53
52 if not heads:
54 if not heads:
53 heads = remote.heads()
55 heads = remote.heads()
54
56
55 if repo.changelog.tip() == nullid:
57 if repo.changelog.tip() == nullid:
56 base[nullid] = 1
58 base[nullid] = 1
57 if heads != [nullid]:
59 if heads != [nullid]:
58 return [nullid], [nullid], list(heads)
60 return [nullid], [nullid], list(heads)
59 return [nullid], [], []
61 return [nullid], [], []
60
62
61 # assume we're closer to the tip than the root
63 # assume we're closer to the tip than the root
62 # and start by examining the heads
64 # and start by examining the heads
63 repo.ui.status(_("searching for changes\n"))
65 repo.ui.status(_("searching for changes\n"))
64
66
65 unknown = []
67 unknown = []
66 for h in heads:
68 for h in heads:
67 if h not in m:
69 if h not in m:
68 unknown.append(h)
70 unknown.append(h)
69 else:
71 else:
70 base[h] = 1
72 base[h] = 1
71
73
72 heads = unknown
74 heads = unknown
73 if not unknown:
75 if not unknown:
74 return base.keys(), [], []
76 return base.keys(), [], []
75
77
76 req = set(unknown)
78 req = set(unknown)
77 reqcnt = 0
79 reqcnt = 0
78
80
79 # search through remote branches
81 # search through remote branches
80 # a 'branch' here is a linear segment of history, with four parts:
82 # a 'branch' here is a linear segment of history, with four parts:
81 # head, root, first parent, second parent
83 # head, root, first parent, second parent
82 # (a branch always has two parents (or none) by definition)
84 # (a branch always has two parents (or none) by definition)
83 unknown = remote.branches(unknown)
85 unknown = remote.branches(unknown)
84 while unknown:
86 while unknown:
85 r = []
87 r = []
86 while unknown:
88 while unknown:
87 n = unknown.pop(0)
89 n = unknown.pop(0)
88 if n[0] in seen:
90 if n[0] in seen:
89 continue
91 continue
90
92
91 repo.ui.debug("examining %s:%s\n"
93 repo.ui.debug("examining %s:%s\n"
92 % (short(n[0]), short(n[1])))
94 % (short(n[0]), short(n[1])))
93 if n[0] == nullid: # found the end of the branch
95 if n[0] == nullid: # found the end of the branch
94 pass
96 pass
95 elif n in seenbranch:
97 elif n in seenbranch:
96 repo.ui.debug("branch already found\n")
98 repo.ui.debug("branch already found\n")
97 continue
99 continue
98 elif n[1] and n[1] in m: # do we know the base?
100 elif n[1] and n[1] in m: # do we know the base?
99 repo.ui.debug("found incomplete branch %s:%s\n"
101 repo.ui.debug("found incomplete branch %s:%s\n"
100 % (short(n[0]), short(n[1])))
102 % (short(n[0]), short(n[1])))
101 search.append(n[0:2]) # schedule branch range for scanning
103 search.append(n[0:2]) # schedule branch range for scanning
102 seenbranch.add(n)
104 seenbranch.add(n)
103 else:
105 else:
104 if n[1] not in seen and n[1] not in fetch:
106 if n[1] not in seen and n[1] not in fetch:
105 if n[2] in m and n[3] in m:
107 if n[2] in m and n[3] in m:
106 repo.ui.debug("found new changeset %s\n" %
108 repo.ui.debug("found new changeset %s\n" %
107 short(n[1]))
109 short(n[1]))
108 fetch.add(n[1]) # earliest unknown
110 fetch.add(n[1]) # earliest unknown
109 for p in n[2:4]:
111 for p in n[2:4]:
110 if p in m:
112 if p in m:
111 base[p] = 1 # latest known
113 base[p] = 1 # latest known
112
114
113 for p in n[2:4]:
115 for p in n[2:4]:
114 if p not in req and p not in m:
116 if p not in req and p not in m:
115 r.append(p)
117 r.append(p)
116 req.add(p)
118 req.add(p)
117 seen.add(n[0])
119 seen.add(n[0])
118
120
119 if r:
121 if r:
120 reqcnt += 1
122 reqcnt += 1
121 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
123 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
122 repo.ui.debug("request %d: %s\n" %
124 repo.ui.debug("request %d: %s\n" %
123 (reqcnt, " ".join(map(short, r))))
125 (reqcnt, " ".join(map(short, r))))
124 for p in xrange(0, len(r), 10):
126 for p in xrange(0, len(r), 10):
125 for b in remote.branches(r[p:p + 10]):
127 for b in remote.branches(r[p:p + 10]):
126 repo.ui.debug("received %s:%s\n" %
128 repo.ui.debug("received %s:%s\n" %
127 (short(b[0]), short(b[1])))
129 (short(b[0]), short(b[1])))
128 unknown.append(b)
130 unknown.append(b)
129
131
130 # do binary search on the branches we found
132 # do binary search on the branches we found
131 while search:
133 while search:
132 newsearch = []
134 newsearch = []
133 reqcnt += 1
135 reqcnt += 1
134 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
136 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
135 for n, l in zip(search, remote.between(search)):
137 for n, l in zip(search, remote.between(search)):
136 l.append(n[1])
138 l.append(n[1])
137 p = n[0]
139 p = n[0]
138 f = 1
140 f = 1
139 for i in l:
141 for i in l:
140 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
142 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
141 if i in m:
143 if i in m:
142 if f <= 2:
144 if f <= 2:
143 repo.ui.debug("found new branch changeset %s\n" %
145 repo.ui.debug("found new branch changeset %s\n" %
144 short(p))
146 short(p))
145 fetch.add(p)
147 fetch.add(p)
146 base[i] = 1
148 base[i] = 1
147 else:
149 else:
148 repo.ui.debug("narrowed branch search to %s:%s\n"
150 repo.ui.debug("narrowed branch search to %s:%s\n"
149 % (short(p), short(i)))
151 % (short(p), short(i)))
150 newsearch.append((p, i))
152 newsearch.append((p, i))
151 break
153 break
152 p, f = i, f * 2
154 p, f = i, f * 2
153 search = newsearch
155 search = newsearch
154
156
155 # sanity check our fetch list
157 # sanity check our fetch list
156 for f in fetch:
158 for f in fetch:
157 if f in m:
159 if f in m:
158 raise error.RepoError(_("already have changeset ")
160 raise error.RepoError(_("already have changeset ")
159 + short(f[:4]))
161 + short(f[:4]))
160
162
161 if base.keys() == [nullid]:
163 if base.keys() == [nullid]:
162 if force:
164 if force:
163 repo.ui.warn(_("warning: repository is unrelated\n"))
165 repo.ui.warn(_("warning: repository is unrelated\n"))
164 else:
166 else:
165 raise util.Abort(_("repository is unrelated"))
167 raise util.Abort(_("repository is unrelated"))
166
168
167 repo.ui.debug("found new changesets starting at " +
169 repo.ui.debug("found new changesets starting at " +
168 " ".join([short(f) for f in fetch]) + "\n")
170 " ".join([short(f) for f in fetch]) + "\n")
169
171
170 repo.ui.progress(_('searching'), None)
172 repo.ui.progress(_('searching'), None)
171 repo.ui.debug("%d total queries\n" % reqcnt)
173 repo.ui.debug("%d total queries\n" % reqcnt)
172
174
173 return base.keys(), list(fetch), heads
175 return base.keys(), list(fetch), heads
174
176
175 def findoutgoing(repo, remote, base=None, heads=None, force=False):
177 def findoutgoing(repo, remote, base=None, heads=None, force=False):
176 """Return list of nodes that are roots of subsets not in remote
178 """Return list of nodes that are roots of subsets not in remote
177
179
178 If base dict is specified, assume that these nodes and their parents
180 If base dict is specified, assume that these nodes and their parents
179 exist on the remote side.
181 exist on the remote side.
180 If a list of heads is specified, return only nodes which are heads
182 If a list of heads is specified, return only nodes which are heads
181 or ancestors of these heads, and return a second element which
183 or ancestors of these heads, and return a second element which
182 contains all remote heads which get new children.
184 contains all remote heads which get new children.
183 """
185 """
184 if base is None:
186 if base is None:
185 base = {}
187 base = {}
186 findincoming(repo, remote, base, heads, force=force)
188 findincoming(repo, remote, base, heads, force=force)
187
189
188 repo.ui.debug("common changesets up to "
190 repo.ui.debug("common changesets up to "
189 + " ".join(map(short, base.keys())) + "\n")
191 + " ".join(map(short, base.keys())) + "\n")
190
192
191 remain = set(repo.changelog.nodemap)
193 remain = set(repo.changelog.nodemap)
192
194
193 # prune everything remote has from the tree
195 # prune everything remote has from the tree
194 remain.remove(nullid)
196 remain.remove(nullid)
195 remove = base.keys()
197 remove = base.keys()
196 while remove:
198 while remove:
197 n = remove.pop(0)
199 n = remove.pop(0)
198 if n in remain:
200 if n in remain:
199 remain.remove(n)
201 remain.remove(n)
200 for p in repo.changelog.parents(n):
202 for p in repo.changelog.parents(n):
201 remove.append(p)
203 remove.append(p)
202
204
203 # find every node whose parents have been pruned
205 # find every node whose parents have been pruned
204 subset = []
206 subset = []
205 # find every remote head that will get new children
207 # find every remote head that will get new children
206 updated_heads = set()
208 updated_heads = set()
207 for n in remain:
209 for n in remain:
208 p1, p2 = repo.changelog.parents(n)
210 p1, p2 = repo.changelog.parents(n)
209 if p1 not in remain and p2 not in remain:
211 if p1 not in remain and p2 not in remain:
210 subset.append(n)
212 subset.append(n)
211 if heads:
213 if heads:
212 if p1 in heads:
214 if p1 in heads:
213 updated_heads.add(p1)
215 updated_heads.add(p1)
214 if p2 in heads:
216 if p2 in heads:
215 updated_heads.add(p2)
217 updated_heads.add(p2)
216
218
217 # this is the set of all roots we have to push
219 # this is the set of all roots we have to push
218 if heads:
220 if heads:
219 return subset, list(updated_heads)
221 return subset, list(updated_heads)
220 else:
222 else:
221 return subset
223 return subset
222
224
223 def prepush(repo, remote, force, revs, newbranch):
225 def prepush(repo, remote, force, revs, newbranch):
224 '''Analyze the local and remote repositories and determine which
226 '''Analyze the local and remote repositories and determine which
225 changesets need to be pushed to the remote. Return value depends
227 changesets need to be pushed to the remote. Return value depends
226 on circumstances:
228 on circumstances:
227
229
228 If we are not going to push anything, return a tuple (None,
230 If we are not going to push anything, return a tuple (None,
229 outgoing) where outgoing is 0 if there are no outgoing
231 outgoing) where outgoing is 0 if there are no outgoing
230 changesets and 1 if there are, but we refuse to push them
232 changesets and 1 if there are, but we refuse to push them
231 (e.g. would create new remote heads).
233 (e.g. would create new remote heads).
232
234
233 Otherwise, return a tuple (changegroup, remoteheads), where
235 Otherwise, return a tuple (changegroup, remoteheads), where
234 changegroup is a readable file-like object whose read() returns
236 changegroup is a readable file-like object whose read() returns
235 successive changegroup chunks ready to be sent over the wire and
237 successive changegroup chunks ready to be sent over the wire and
236 remoteheads is the list of remote heads.'''
238 remoteheads is the list of remote heads.'''
237 common = {}
239 common = {}
238 remote_heads = remote.heads()
240 remote_heads = remote.heads()
239 inc = findincoming(repo, remote, common, remote_heads, force=force)
241 inc = findincoming(repo, remote, common, remote_heads, force=force)
240
242
241 cl = repo.changelog
243 cl = repo.changelog
242 update, updated_heads = findoutgoing(repo, remote, common, remote_heads)
244 update, updated_heads = findoutgoing(repo, remote, common, remote_heads)
243 outg, bases, heads = cl.nodesbetween(update, revs)
245 outg, bases, heads = cl.nodesbetween(update, revs)
244
246
245 if not bases:
247 if not bases:
246 repo.ui.status(_("no changes found\n"))
248 repo.ui.status(_("no changes found\n"))
247 return None, 1
249 return None, 1
248
250
249 if not force and remote_heads != [nullid]:
251 if not force and remote_heads != [nullid]:
250
252
251 def fail_multiple_heads(unsynced, branch=None):
253 def fail_multiple_heads(unsynced, branch=None):
252 if branch:
254 if branch:
253 msg = _("abort: push creates new remote heads"
255 msg = _("abort: push creates new remote heads"
254 " on branch '%s'!\n") % branch
256 " on branch '%s'!\n") % branch
255 else:
257 else:
256 msg = _("abort: push creates new remote heads!\n")
258 msg = _("abort: push creates new remote heads!\n")
257 repo.ui.warn(msg)
259 repo.ui.warn(msg)
258 if unsynced:
260 if unsynced:
259 repo.ui.status(_("(you should pull and merge or"
261 repo.ui.status(_("(you should pull and merge or"
260 " use push -f to force)\n"))
262 " use push -f to force)\n"))
261 else:
263 else:
262 repo.ui.status(_("(did you forget to merge?"
264 repo.ui.status(_("(did you forget to merge?"
263 " use push -f to force)\n"))
265 " use push -f to force)\n"))
264 return None, 0
266 return None, 0
265
267
266 if remote.capable('branchmap'):
268 if remote.capable('branchmap'):
267 # Check for each named branch if we're creating new remote heads.
269 # Check for each named branch if we're creating new remote heads.
268 # To be a remote head after push, node must be either:
270 # To be a remote head after push, node must be either:
269 # - unknown locally
271 # - unknown locally
270 # - a local outgoing head descended from update
272 # - a local outgoing head descended from update
271 # - a remote head that's known locally and not
273 # - a remote head that's known locally and not
272 # ancestral to an outgoing head
274 # ancestral to an outgoing head
273 #
275 #
274 # New named branches cannot be created without --force.
276 # New named branches cannot be created without --force.
275
277
276 # 1. Create set of branches involved in the push.
278 # 1. Create set of branches involved in the push.
277 branches = set(repo[n].branch() for n in outg)
279 branches = set(repo[n].branch() for n in outg)
278
280
279 # 2. Check for new branches on the remote.
281 # 2. Check for new branches on the remote.
280 remotemap = remote.branchmap()
282 remotemap = remote.branchmap()
281 newbranches = branches - set(remotemap)
283 newbranches = branches - set(remotemap)
282 if newbranches and not newbranch: # new branch requires --new-branch
284 if newbranches and not newbranch: # new branch requires --new-branch
283 branchnames = ', '.join(sorted(newbranches))
285 branchnames = ', '.join(sorted(newbranches))
284 repo.ui.warn(_("abort: push creates "
286 repo.ui.warn(_("abort: push creates "
285 "new remote branches: %s!\n")
287 "new remote branches: %s!\n")
286 % branchnames)
288 % branchnames)
287 repo.ui.status(_("(use 'hg push --new-branch' to create new "
289 repo.ui.status(_("(use 'hg push --new-branch' to create new "
288 "remote branches)\n"))
290 "remote branches)\n"))
289 return None, 0
291 return None, 0
290 branches.difference_update(newbranches)
292 branches.difference_update(newbranches)
291
293
292 # 3. Construct the initial oldmap and newmap dicts.
294 # 3. Construct the initial oldmap and newmap dicts.
293 # They contain information about the remote heads before and
295 # They contain information about the remote heads before and
294 # after the push, respectively.
296 # after the push, respectively.
295 # Heads not found locally are not included in either dict,
297 # Heads not found locally are not included in either dict,
296 # since they won't be affected by the push.
298 # since they won't be affected by the push.
297 # unsynced contains all branches with incoming changesets.
299 # unsynced contains all branches with incoming changesets.
298 oldmap = {}
300 oldmap = {}
299 newmap = {}
301 newmap = {}
300 unsynced = set()
302 unsynced = set()
301 for branch in branches:
303 for branch in branches:
302 remoteheads = remotemap[branch]
304 remoteheads = remotemap[branch]
303 prunedheads = [h for h in remoteheads if h in cl.nodemap]
305 prunedheads = [h for h in remoteheads if h in cl.nodemap]
304 oldmap[branch] = prunedheads
306 oldmap[branch] = prunedheads
305 newmap[branch] = list(prunedheads)
307 newmap[branch] = list(prunedheads)
306 if len(remoteheads) > len(prunedheads):
308 if len(remoteheads) > len(prunedheads):
307 unsynced.add(branch)
309 unsynced.add(branch)
308
310
309 # 4. Update newmap with outgoing changes.
311 # 4. Update newmap with outgoing changes.
310 # This will possibly add new heads and remove existing ones.
312 # This will possibly add new heads and remove existing ones.
311 ctxgen = (repo[n] for n in outg)
313 ctxgen = (repo[n] for n in outg)
312 repo._updatebranchcache(newmap, ctxgen)
314 repo._updatebranchcache(newmap, ctxgen)
313
315
314 # 5. Check for new heads.
316 # 5. Check for new heads.
315 # If there are more heads after the push than before, a suitable
317 # If there are more heads after the push than before, a suitable
316 # warning, depending on unsynced status, is displayed.
318 # warning, depending on unsynced status, is displayed.
317 for branch in branches:
319 for branch in branches:
318 if len(newmap[branch]) > len(oldmap[branch]):
320 if len(newmap[branch]) > len(oldmap[branch]):
319 return fail_multiple_heads(branch in unsynced, branch)
321 return fail_multiple_heads(branch in unsynced, branch)
320
322
321 # 6. Check for unsynced changes on involved branches.
323 # 6. Check for unsynced changes on involved branches.
322 if unsynced:
324 if unsynced:
323 repo.ui.warn(_("note: unsynced remote changes!\n"))
325 repo.ui.warn(_("note: unsynced remote changes!\n"))
324
326
325 else:
327 else:
326 # Old servers: Check for new topological heads.
328 # Old servers: Check for new topological heads.
327 # Code based on _updatebranchcache.
329 # Code based on _updatebranchcache.
328 newheads = set(h for h in remote_heads if h in cl.nodemap)
330 newheads = set(h for h in remote_heads if h in cl.nodemap)
329 oldheadcnt = len(newheads)
331 oldheadcnt = len(newheads)
330 newheads.update(outg)
332 newheads.update(outg)
331 if len(newheads) > 1:
333 if len(newheads) > 1:
332 for latest in reversed(outg):
334 for latest in reversed(outg):
333 if latest not in newheads:
335 if latest not in newheads:
334 continue
336 continue
335 minhrev = min(cl.rev(h) for h in newheads)
337 minhrev = min(cl.rev(h) for h in newheads)
336 reachable = cl.reachable(latest, cl.node(minhrev))
338 reachable = cl.reachable(latest, cl.node(minhrev))
337 reachable.remove(latest)
339 reachable.remove(latest)
338 newheads.difference_update(reachable)
340 newheads.difference_update(reachable)
339 if len(newheads) > oldheadcnt:
341 if len(newheads) > oldheadcnt:
340 return fail_multiple_heads(inc)
342 return fail_multiple_heads(inc)
341 if inc:
343 if inc:
342 repo.ui.warn(_("note: unsynced remote changes!\n"))
344 repo.ui.warn(_("note: unsynced remote changes!\n"))
343
345
344 if revs is None:
346 if revs is None:
345 # use the fast path, no race possible on push
347 # use the fast path, no race possible on push
346 nodes = repo.changelog.findmissing(common.keys())
348 nodes = repo.changelog.findmissing(common.keys())
347 cg = repo._changegroup(nodes, 'push')
349 cg = repo._changegroup(nodes, 'push')
348 else:
350 else:
349 cg = repo.changegroupsubset(update, revs, 'push')
351 cg = repo.changegroupsubset(update, revs, 'push')
350 return cg, remote_heads
352 return cg, remote_heads
General Comments 0
You need to be logged in to leave comments. Login now