##// END OF EJS Templates
discovery: simplify findoutgoing(), the updated_head stuff wasn't used
Benoit Boissinot -
r11576:98c874a9 default
parent child Browse files
Show More
@@ -1,348 +1,337 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, error
10 import util, error
11
11
12 def findincoming(repo, remote, base=None, heads=None, force=False):
12 def findincoming(repo, remote, base=None, heads=None, force=False):
13 """Return list of roots of the subsets of missing nodes from remote
13 """Return list of roots of the subsets of missing nodes from remote
14
14
15 If base dict is specified, assume that these nodes and their parents
15 If base dict is specified, assume that these nodes and their parents
16 exist on the remote side and that no child of a node of base exists
16 exist on the remote side and that no child of a node of base exists
17 in both remote and repo.
17 in both remote and repo.
18 Furthermore base will be updated to include the nodes that exists
18 Furthermore base will be updated to include the nodes that exists
19 in repo and remote but no children exists in repo and remote.
19 in repo and remote but no children exists in repo and remote.
20 If a list of heads is specified, return only nodes which are heads
20 If a list of heads is specified, return only nodes which are heads
21 or ancestors of these heads.
21 or ancestors of these heads.
22
22
23 All the ancestors of base are in repo and in remote.
23 All the ancestors of base are in repo and in remote.
24 All the descendants of the list returned are missing in repo.
24 All the descendants of the list returned are missing in repo.
25 (and so we know that the rest of the nodes are missing in remote, see
25 (and so we know that the rest of the nodes are missing in remote, see
26 outgoing)
26 outgoing)
27 """
27 """
28 return findcommonincoming(repo, remote, base, heads, force)[1]
28 return findcommonincoming(repo, remote, base, heads, force)[1]
29
29
30 def findcommonincoming(repo, remote, base=None, heads=None, force=False):
30 def findcommonincoming(repo, remote, base=None, heads=None, force=False):
31 """Return a tuple (common, missing roots, heads) used to identify
31 """Return a tuple (common, missing roots, heads) used to identify
32 missing nodes from remote.
32 missing nodes from remote.
33
33
34 If base dict is specified, assume that these nodes and their parents
34 If base dict is specified, assume that these nodes and their parents
35 exist on the remote side and that no child of a node of base exists
35 exist on the remote side and that no child of a node of base exists
36 in both remote and repo.
36 in both remote and repo.
37 Furthermore base will be updated to include the nodes that exists
37 Furthermore base will be updated to include the nodes that exists
38 in repo and remote but no children exists in both repo and remote.
38 in repo and remote but no children exists in both repo and remote.
39 In other words, base is the set of heads of the DAG resulting from
39 In other words, base is the set of heads of the DAG resulting from
40 the intersection of the nodes from repo and remote.
40 the intersection of the nodes from repo and remote.
41 If a list of heads is specified, return only nodes which are heads
41 If a list of heads is specified, return only nodes which are heads
42 or ancestors of these heads.
42 or ancestors of these heads.
43
43
44 All the ancestors of base are in repo and in remote.
44 All the ancestors of base are in repo and in remote.
45 """
45 """
46 m = repo.changelog.nodemap
46 m = repo.changelog.nodemap
47 search = []
47 search = []
48 fetch = set()
48 fetch = set()
49 seen = set()
49 seen = set()
50 seenbranch = set()
50 seenbranch = set()
51 if base is None:
51 if base is None:
52 base = {}
52 base = {}
53
53
54 if not heads:
54 if not heads:
55 heads = remote.heads()
55 heads = remote.heads()
56
56
57 if repo.changelog.tip() == nullid:
57 if repo.changelog.tip() == nullid:
58 base[nullid] = 1
58 base[nullid] = 1
59 if heads != [nullid]:
59 if heads != [nullid]:
60 return [nullid], [nullid], list(heads)
60 return [nullid], [nullid], list(heads)
61 return [nullid], [], []
61 return [nullid], [], []
62
62
63 # assume we're closer to the tip than the root
63 # assume we're closer to the tip than the root
64 # and start by examining the heads
64 # and start by examining the heads
65 repo.ui.status(_("searching for changes\n"))
65 repo.ui.status(_("searching for changes\n"))
66
66
67 unknown = []
67 unknown = []
68 for h in heads:
68 for h in heads:
69 if h not in m:
69 if h not in m:
70 unknown.append(h)
70 unknown.append(h)
71 else:
71 else:
72 base[h] = 1
72 base[h] = 1
73
73
74 heads = unknown
74 heads = unknown
75 if not unknown:
75 if not unknown:
76 return base.keys(), [], []
76 return base.keys(), [], []
77
77
78 req = set(unknown)
78 req = set(unknown)
79 reqcnt = 0
79 reqcnt = 0
80
80
81 # search through remote branches
81 # search through remote branches
82 # a 'branch' here is a linear segment of history, with four parts:
82 # a 'branch' here is a linear segment of history, with four parts:
83 # head, root, first parent, second parent
83 # head, root, first parent, second parent
84 # (a branch always has two parents (or none) by definition)
84 # (a branch always has two parents (or none) by definition)
85 unknown = remote.branches(unknown)
85 unknown = remote.branches(unknown)
86 while unknown:
86 while unknown:
87 r = []
87 r = []
88 while unknown:
88 while unknown:
89 n = unknown.pop(0)
89 n = unknown.pop(0)
90 if n[0] in seen:
90 if n[0] in seen:
91 continue
91 continue
92
92
93 repo.ui.debug("examining %s:%s\n"
93 repo.ui.debug("examining %s:%s\n"
94 % (short(n[0]), short(n[1])))
94 % (short(n[0]), short(n[1])))
95 if n[0] == nullid: # found the end of the branch
95 if n[0] == nullid: # found the end of the branch
96 pass
96 pass
97 elif n in seenbranch:
97 elif n in seenbranch:
98 repo.ui.debug("branch already found\n")
98 repo.ui.debug("branch already found\n")
99 continue
99 continue
100 elif n[1] and n[1] in m: # do we know the base?
100 elif n[1] and n[1] in m: # do we know the base?
101 repo.ui.debug("found incomplete branch %s:%s\n"
101 repo.ui.debug("found incomplete branch %s:%s\n"
102 % (short(n[0]), short(n[1])))
102 % (short(n[0]), short(n[1])))
103 search.append(n[0:2]) # schedule branch range for scanning
103 search.append(n[0:2]) # schedule branch range for scanning
104 seenbranch.add(n)
104 seenbranch.add(n)
105 else:
105 else:
106 if n[1] not in seen and n[1] not in fetch:
106 if n[1] not in seen and n[1] not in fetch:
107 if n[2] in m and n[3] in m:
107 if n[2] in m and n[3] in m:
108 repo.ui.debug("found new changeset %s\n" %
108 repo.ui.debug("found new changeset %s\n" %
109 short(n[1]))
109 short(n[1]))
110 fetch.add(n[1]) # earliest unknown
110 fetch.add(n[1]) # earliest unknown
111 for p in n[2:4]:
111 for p in n[2:4]:
112 if p in m:
112 if p in m:
113 base[p] = 1 # latest known
113 base[p] = 1 # latest known
114
114
115 for p in n[2:4]:
115 for p in n[2:4]:
116 if p not in req and p not in m:
116 if p not in req and p not in m:
117 r.append(p)
117 r.append(p)
118 req.add(p)
118 req.add(p)
119 seen.add(n[0])
119 seen.add(n[0])
120
120
121 if r:
121 if r:
122 reqcnt += 1
122 reqcnt += 1
123 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
123 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
124 repo.ui.debug("request %d: %s\n" %
124 repo.ui.debug("request %d: %s\n" %
125 (reqcnt, " ".join(map(short, r))))
125 (reqcnt, " ".join(map(short, r))))
126 for p in xrange(0, len(r), 10):
126 for p in xrange(0, len(r), 10):
127 for b in remote.branches(r[p:p + 10]):
127 for b in remote.branches(r[p:p + 10]):
128 repo.ui.debug("received %s:%s\n" %
128 repo.ui.debug("received %s:%s\n" %
129 (short(b[0]), short(b[1])))
129 (short(b[0]), short(b[1])))
130 unknown.append(b)
130 unknown.append(b)
131
131
132 # do binary search on the branches we found
132 # do binary search on the branches we found
133 while search:
133 while search:
134 newsearch = []
134 newsearch = []
135 reqcnt += 1
135 reqcnt += 1
136 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
136 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
137 for n, l in zip(search, remote.between(search)):
137 for n, l in zip(search, remote.between(search)):
138 l.append(n[1])
138 l.append(n[1])
139 p = n[0]
139 p = n[0]
140 f = 1
140 f = 1
141 for i in l:
141 for i in l:
142 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
142 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
143 if i in m:
143 if i in m:
144 if f <= 2:
144 if f <= 2:
145 repo.ui.debug("found new branch changeset %s\n" %
145 repo.ui.debug("found new branch changeset %s\n" %
146 short(p))
146 short(p))
147 fetch.add(p)
147 fetch.add(p)
148 base[i] = 1
148 base[i] = 1
149 else:
149 else:
150 repo.ui.debug("narrowed branch search to %s:%s\n"
150 repo.ui.debug("narrowed branch search to %s:%s\n"
151 % (short(p), short(i)))
151 % (short(p), short(i)))
152 newsearch.append((p, i))
152 newsearch.append((p, i))
153 break
153 break
154 p, f = i, f * 2
154 p, f = i, f * 2
155 search = newsearch
155 search = newsearch
156
156
157 # sanity check our fetch list
157 # sanity check our fetch list
158 for f in fetch:
158 for f in fetch:
159 if f in m:
159 if f in m:
160 raise error.RepoError(_("already have changeset ")
160 raise error.RepoError(_("already have changeset ")
161 + short(f[:4]))
161 + short(f[:4]))
162
162
163 if base.keys() == [nullid]:
163 if base.keys() == [nullid]:
164 if force:
164 if force:
165 repo.ui.warn(_("warning: repository is unrelated\n"))
165 repo.ui.warn(_("warning: repository is unrelated\n"))
166 else:
166 else:
167 raise util.Abort(_("repository is unrelated"))
167 raise util.Abort(_("repository is unrelated"))
168
168
169 repo.ui.debug("found new changesets starting at " +
169 repo.ui.debug("found new changesets starting at " +
170 " ".join([short(f) for f in fetch]) + "\n")
170 " ".join([short(f) for f in fetch]) + "\n")
171
171
172 repo.ui.progress(_('searching'), None)
172 repo.ui.progress(_('searching'), None)
173 repo.ui.debug("%d total queries\n" % reqcnt)
173 repo.ui.debug("%d total queries\n" % reqcnt)
174
174
175 return base.keys(), list(fetch), heads
175 return base.keys(), list(fetch), heads
176
176
177 def findoutgoing(repo, remote, base=None, heads=None, force=False):
177 def findoutgoing(repo, remote, base=None, remoteheads=None, force=False):
178 """Return list of nodes that are roots of subsets not in remote
178 """Return list of nodes that are roots of subsets not in remote
179
179
180 If base dict is specified, assume that these nodes and their parents
180 If base dict is specified, assume that these nodes and their parents
181 exist on the remote side.
181 exist on the remote side.
182 If a list of heads is specified, return only nodes which are heads
182 If remotehead is specified, assume it is the list of the heads from
183 or ancestors of these heads, and return a second element which
183 the remote repository.
184 contains all remote heads which get new children.
185 """
184 """
186 if base is None:
185 if base is None:
187 base = {}
186 base = {}
188 findincoming(repo, remote, base, heads, force=force)
187 findincoming(repo, remote, base, remoteheads, force=force)
189
188
190 repo.ui.debug("common changesets up to "
189 repo.ui.debug("common changesets up to "
191 + " ".join(map(short, base.keys())) + "\n")
190 + " ".join(map(short, base.keys())) + "\n")
192
191
193 remain = set(repo.changelog.nodemap)
192 remain = set(repo.changelog.nodemap)
194
193
195 # prune everything remote has from the tree
194 # prune everything remote has from the tree
196 remain.remove(nullid)
195 remain.remove(nullid)
197 remove = base.keys()
196 remove = base.keys()
198 while remove:
197 while remove:
199 n = remove.pop(0)
198 n = remove.pop(0)
200 if n in remain:
199 if n in remain:
201 remain.remove(n)
200 remain.remove(n)
202 for p in repo.changelog.parents(n):
201 for p in repo.changelog.parents(n):
203 remove.append(p)
202 remove.append(p)
204
203
205 # find every node whose parents have been pruned
204 # find every node whose parents have been pruned
206 subset = []
205 subset = []
207 # find every remote head that will get new children
206 # find every remote head that will get new children
208 updated_heads = set()
209 for n in remain:
207 for n in remain:
210 p1, p2 = repo.changelog.parents(n)
208 p1, p2 = repo.changelog.parents(n)
211 if p1 not in remain and p2 not in remain:
209 if p1 not in remain and p2 not in remain:
212 subset.append(n)
210 subset.append(n)
213 if heads:
214 if p1 in heads:
215 updated_heads.add(p1)
216 if p2 in heads:
217 updated_heads.add(p2)
218
211
219 # this is the set of all roots we have to push
212 return subset
220 if heads:
221 return subset, list(updated_heads)
222 else:
223 return subset
224
213
225 def prepush(repo, remote, force, revs, newbranch):
214 def prepush(repo, remote, force, revs, newbranch):
226 '''Analyze the local and remote repositories and determine which
215 '''Analyze the local and remote repositories and determine which
227 changesets need to be pushed to the remote. Return value depends
216 changesets need to be pushed to the remote. Return value depends
228 on circumstances:
217 on circumstances:
229
218
230 If we are not going to push anything, return a tuple (None,
219 If we are not going to push anything, return a tuple (None,
231 outgoing) where outgoing is 0 if there are no outgoing
220 outgoing) where outgoing is 0 if there are no outgoing
232 changesets and 1 if there are, but we refuse to push them
221 changesets and 1 if there are, but we refuse to push them
233 (e.g. would create new remote heads).
222 (e.g. would create new remote heads).
234
223
235 Otherwise, return a tuple (changegroup, remoteheads), where
224 Otherwise, return a tuple (changegroup, remoteheads), where
236 changegroup is a readable file-like object whose read() returns
225 changegroup is a readable file-like object whose read() returns
237 successive changegroup chunks ready to be sent over the wire and
226 successive changegroup chunks ready to be sent over the wire and
238 remoteheads is the list of remote heads.'''
227 remoteheads is the list of remote heads.'''
239 common = {}
228 common = {}
240 remote_heads = remote.heads()
229 remote_heads = remote.heads()
241 inc = findincoming(repo, remote, common, remote_heads, force=force)
230 inc = findincoming(repo, remote, common, remote_heads, force=force)
242
231
243 cl = repo.changelog
232 cl = repo.changelog
244 update, updated_heads = findoutgoing(repo, remote, common, remote_heads)
233 update = findoutgoing(repo, remote, common, remote_heads)
245 outg, bases, heads = cl.nodesbetween(update, revs)
234 outg, bases, heads = cl.nodesbetween(update, revs)
246
235
247 if not bases:
236 if not bases:
248 repo.ui.status(_("no changes found\n"))
237 repo.ui.status(_("no changes found\n"))
249 return None, 1
238 return None, 1
250
239
251 if not force and remote_heads != [nullid]:
240 if not force and remote_heads != [nullid]:
252
241
253 def fail_multiple_heads(unsynced, branch=None):
242 def fail_multiple_heads(unsynced, branch=None):
254 if branch:
243 if branch:
255 msg = _("push creates new remote heads "
244 msg = _("push creates new remote heads "
256 "on branch '%s'!") % branch
245 "on branch '%s'!") % branch
257 else:
246 else:
258 msg = _("push creates new remote heads!")
247 msg = _("push creates new remote heads!")
259
248
260 if unsynced:
249 if unsynced:
261 hint = _("you should pull and merge or use push -f to force")
250 hint = _("you should pull and merge or use push -f to force")
262 else:
251 else:
263 hint = _("did you forget to merge? use push -f to force")
252 hint = _("did you forget to merge? use push -f to force")
264 raise util.Abort(msg, hint=hint)
253 raise util.Abort(msg, hint=hint)
265
254
266 if remote.capable('branchmap'):
255 if remote.capable('branchmap'):
267 # Check for each named branch if we're creating new remote heads.
256 # Check for each named branch if we're creating new remote heads.
268 # To be a remote head after push, node must be either:
257 # To be a remote head after push, node must be either:
269 # - unknown locally
258 # - unknown locally
270 # - a local outgoing head descended from update
259 # - a local outgoing head descended from update
271 # - a remote head that's known locally and not
260 # - a remote head that's known locally and not
272 # ancestral to an outgoing head
261 # ancestral to an outgoing head
273 #
262 #
274 # New named branches cannot be created without --force.
263 # New named branches cannot be created without --force.
275
264
276 # 1. Create set of branches involved in the push.
265 # 1. Create set of branches involved in the push.
277 branches = set(repo[n].branch() for n in outg)
266 branches = set(repo[n].branch() for n in outg)
278
267
279 # 2. Check for new branches on the remote.
268 # 2. Check for new branches on the remote.
280 remotemap = remote.branchmap()
269 remotemap = remote.branchmap()
281 newbranches = branches - set(remotemap)
270 newbranches = branches - set(remotemap)
282 if newbranches and not newbranch: # new branch requires --new-branch
271 if newbranches and not newbranch: # new branch requires --new-branch
283 branchnames = ', '.join(sorted(newbranches))
272 branchnames = ', '.join(sorted(newbranches))
284 raise util.Abort(_("push creates new remote branches: %s!")
273 raise util.Abort(_("push creates new remote branches: %s!")
285 % branchnames,
274 % branchnames,
286 hint=_("use 'hg push --new-branch' to create"
275 hint=_("use 'hg push --new-branch' to create"
287 " new remote branches"))
276 " new remote branches"))
288 branches.difference_update(newbranches)
277 branches.difference_update(newbranches)
289
278
290 # 3. Construct the initial oldmap and newmap dicts.
279 # 3. Construct the initial oldmap and newmap dicts.
291 # They contain information about the remote heads before and
280 # They contain information about the remote heads before and
292 # after the push, respectively.
281 # after the push, respectively.
293 # Heads not found locally are not included in either dict,
282 # Heads not found locally are not included in either dict,
294 # since they won't be affected by the push.
283 # since they won't be affected by the push.
295 # unsynced contains all branches with incoming changesets.
284 # unsynced contains all branches with incoming changesets.
296 oldmap = {}
285 oldmap = {}
297 newmap = {}
286 newmap = {}
298 unsynced = set()
287 unsynced = set()
299 for branch in branches:
288 for branch in branches:
300 remoteheads = remotemap[branch]
289 remoteheads = remotemap[branch]
301 prunedheads = [h for h in remoteheads if h in cl.nodemap]
290 prunedheads = [h for h in remoteheads if h in cl.nodemap]
302 oldmap[branch] = prunedheads
291 oldmap[branch] = prunedheads
303 newmap[branch] = list(prunedheads)
292 newmap[branch] = list(prunedheads)
304 if len(remoteheads) > len(prunedheads):
293 if len(remoteheads) > len(prunedheads):
305 unsynced.add(branch)
294 unsynced.add(branch)
306
295
307 # 4. Update newmap with outgoing changes.
296 # 4. Update newmap with outgoing changes.
308 # This will possibly add new heads and remove existing ones.
297 # This will possibly add new heads and remove existing ones.
309 ctxgen = (repo[n] for n in outg)
298 ctxgen = (repo[n] for n in outg)
310 repo._updatebranchcache(newmap, ctxgen)
299 repo._updatebranchcache(newmap, ctxgen)
311
300
312 # 5. Check for new heads.
301 # 5. Check for new heads.
313 # If there are more heads after the push than before, a suitable
302 # If there are more heads after the push than before, a suitable
314 # warning, depending on unsynced status, is displayed.
303 # warning, depending on unsynced status, is displayed.
315 for branch in branches:
304 for branch in branches:
316 if len(newmap[branch]) > len(oldmap[branch]):
305 if len(newmap[branch]) > len(oldmap[branch]):
317 return fail_multiple_heads(branch in unsynced, branch)
306 return fail_multiple_heads(branch in unsynced, branch)
318
307
319 # 6. Check for unsynced changes on involved branches.
308 # 6. Check for unsynced changes on involved branches.
320 if unsynced:
309 if unsynced:
321 repo.ui.warn(_("note: unsynced remote changes!\n"))
310 repo.ui.warn(_("note: unsynced remote changes!\n"))
322
311
323 else:
312 else:
324 # Old servers: Check for new topological heads.
313 # Old servers: Check for new topological heads.
325 # Code based on _updatebranchcache.
314 # Code based on _updatebranchcache.
326 newheads = set(h for h in remote_heads if h in cl.nodemap)
315 newheads = set(h for h in remote_heads if h in cl.nodemap)
327 oldheadcnt = len(newheads)
316 oldheadcnt = len(newheads)
328 newheads.update(outg)
317 newheads.update(outg)
329 if len(newheads) > 1:
318 if len(newheads) > 1:
330 for latest in reversed(outg):
319 for latest in reversed(outg):
331 if latest not in newheads:
320 if latest not in newheads:
332 continue
321 continue
333 minhrev = min(cl.rev(h) for h in newheads)
322 minhrev = min(cl.rev(h) for h in newheads)
334 reachable = cl.reachable(latest, cl.node(minhrev))
323 reachable = cl.reachable(latest, cl.node(minhrev))
335 reachable.remove(latest)
324 reachable.remove(latest)
336 newheads.difference_update(reachable)
325 newheads.difference_update(reachable)
337 if len(newheads) > oldheadcnt:
326 if len(newheads) > oldheadcnt:
338 return fail_multiple_heads(inc)
327 return fail_multiple_heads(inc)
339 if inc:
328 if inc:
340 repo.ui.warn(_("note: unsynced remote changes!\n"))
329 repo.ui.warn(_("note: unsynced remote changes!\n"))
341
330
342 if revs is None:
331 if revs is None:
343 # use the fast path, no race possible on push
332 # use the fast path, no race possible on push
344 nodes = repo.changelog.findmissing(common.keys())
333 nodes = repo.changelog.findmissing(common.keys())
345 cg = repo._changegroup(nodes, 'push')
334 cg = repo._changegroup(nodes, 'push')
346 else:
335 else:
347 cg = repo.changegroupsubset(update, revs, 'push')
336 cg = repo.changegroupsubset(update, revs, 'push')
348 return cg, remote_heads
337 return cg, remote_heads
General Comments 0
You need to be logged in to leave comments. Login now