##// END OF EJS Templates
discovery: better coding style
Benoit Boissinot -
r11577:0bc45025 default
parent child Browse files
Show More
@@ -1,337 +1,337 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, error
10 import util, error
11
11
12 def findincoming(repo, remote, base=None, heads=None, force=False):
12 def findincoming(repo, remote, base=None, heads=None, force=False):
13 """Return list of roots of the subsets of missing nodes from remote
13 """Return list of roots of the subsets of missing nodes from remote
14
14
15 If base dict is specified, assume that these nodes and their parents
15 If base dict is specified, assume that these nodes and their parents
16 exist on the remote side and that no child of a node of base exists
16 exist on the remote side and that no child of a node of base exists
17 in both remote and repo.
17 in both remote and repo.
18 Furthermore base will be updated to include the nodes that exists
18 Furthermore base will be updated to include the nodes that exists
19 in repo and remote but no children exists in repo and remote.
19 in repo and remote but no children exists in repo and remote.
20 If a list of heads is specified, return only nodes which are heads
20 If a list of heads is specified, return only nodes which are heads
21 or ancestors of these heads.
21 or ancestors of these heads.
22
22
23 All the ancestors of base are in repo and in remote.
23 All the ancestors of base are in repo and in remote.
24 All the descendants of the list returned are missing in repo.
24 All the descendants of the list returned are missing in repo.
25 (and so we know that the rest of the nodes are missing in remote, see
25 (and so we know that the rest of the nodes are missing in remote, see
26 outgoing)
26 outgoing)
27 """
27 """
28 return findcommonincoming(repo, remote, base, heads, force)[1]
28 return findcommonincoming(repo, remote, base, heads, force)[1]
29
29
30 def findcommonincoming(repo, remote, base=None, heads=None, force=False):
30 def findcommonincoming(repo, remote, base=None, heads=None, force=False):
31 """Return a tuple (common, missing roots, heads) used to identify
31 """Return a tuple (common, missing roots, heads) used to identify
32 missing nodes from remote.
32 missing nodes from remote.
33
33
34 If base dict is specified, assume that these nodes and their parents
34 If base dict is specified, assume that these nodes and their parents
35 exist on the remote side and that no child of a node of base exists
35 exist on the remote side and that no child of a node of base exists
36 in both remote and repo.
36 in both remote and repo.
37 Furthermore base will be updated to include the nodes that exists
37 Furthermore base will be updated to include the nodes that exists
38 in repo and remote but no children exists in both repo and remote.
38 in repo and remote but no children exists in both repo and remote.
39 In other words, base is the set of heads of the DAG resulting from
39 In other words, base is the set of heads of the DAG resulting from
40 the intersection of the nodes from repo and remote.
40 the intersection of the nodes from repo and remote.
41 If a list of heads is specified, return only nodes which are heads
41 If a list of heads is specified, return only nodes which are heads
42 or ancestors of these heads.
42 or ancestors of these heads.
43
43
44 All the ancestors of base are in repo and in remote.
44 All the ancestors of base are in repo and in remote.
45 """
45 """
46 m = repo.changelog.nodemap
46 m = repo.changelog.nodemap
47 search = []
47 search = []
48 fetch = set()
48 fetch = set()
49 seen = set()
49 seen = set()
50 seenbranch = set()
50 seenbranch = set()
51 if base is None:
51 if base is None:
52 base = {}
52 base = {}
53
53
54 if not heads:
54 if not heads:
55 heads = remote.heads()
55 heads = remote.heads()
56
56
57 if repo.changelog.tip() == nullid:
57 if repo.changelog.tip() == nullid:
58 base[nullid] = 1
58 base[nullid] = 1
59 if heads != [nullid]:
59 if heads != [nullid]:
60 return [nullid], [nullid], list(heads)
60 return [nullid], [nullid], list(heads)
61 return [nullid], [], []
61 return [nullid], [], []
62
62
63 # assume we're closer to the tip than the root
63 # assume we're closer to the tip than the root
64 # and start by examining the heads
64 # and start by examining the heads
65 repo.ui.status(_("searching for changes\n"))
65 repo.ui.status(_("searching for changes\n"))
66
66
67 unknown = []
67 unknown = []
68 for h in heads:
68 for h in heads:
69 if h not in m:
69 if h not in m:
70 unknown.append(h)
70 unknown.append(h)
71 else:
71 else:
72 base[h] = 1
72 base[h] = 1
73
73
74 heads = unknown
74 heads = unknown
75 if not unknown:
75 if not unknown:
76 return base.keys(), [], []
76 return base.keys(), [], []
77
77
78 req = set(unknown)
78 req = set(unknown)
79 reqcnt = 0
79 reqcnt = 0
80
80
81 # search through remote branches
81 # search through remote branches
82 # a 'branch' here is a linear segment of history, with four parts:
82 # a 'branch' here is a linear segment of history, with four parts:
83 # head, root, first parent, second parent
83 # head, root, first parent, second parent
84 # (a branch always has two parents (or none) by definition)
84 # (a branch always has two parents (or none) by definition)
85 unknown = remote.branches(unknown)
85 unknown = remote.branches(unknown)
86 while unknown:
86 while unknown:
87 r = []
87 r = []
88 while unknown:
88 while unknown:
89 n = unknown.pop(0)
89 n = unknown.pop(0)
90 if n[0] in seen:
90 if n[0] in seen:
91 continue
91 continue
92
92
93 repo.ui.debug("examining %s:%s\n"
93 repo.ui.debug("examining %s:%s\n"
94 % (short(n[0]), short(n[1])))
94 % (short(n[0]), short(n[1])))
95 if n[0] == nullid: # found the end of the branch
95 if n[0] == nullid: # found the end of the branch
96 pass
96 pass
97 elif n in seenbranch:
97 elif n in seenbranch:
98 repo.ui.debug("branch already found\n")
98 repo.ui.debug("branch already found\n")
99 continue
99 continue
100 elif n[1] and n[1] in m: # do we know the base?
100 elif n[1] and n[1] in m: # do we know the base?
101 repo.ui.debug("found incomplete branch %s:%s\n"
101 repo.ui.debug("found incomplete branch %s:%s\n"
102 % (short(n[0]), short(n[1])))
102 % (short(n[0]), short(n[1])))
103 search.append(n[0:2]) # schedule branch range for scanning
103 search.append(n[0:2]) # schedule branch range for scanning
104 seenbranch.add(n)
104 seenbranch.add(n)
105 else:
105 else:
106 if n[1] not in seen and n[1] not in fetch:
106 if n[1] not in seen and n[1] not in fetch:
107 if n[2] in m and n[3] in m:
107 if n[2] in m and n[3] in m:
108 repo.ui.debug("found new changeset %s\n" %
108 repo.ui.debug("found new changeset %s\n" %
109 short(n[1]))
109 short(n[1]))
110 fetch.add(n[1]) # earliest unknown
110 fetch.add(n[1]) # earliest unknown
111 for p in n[2:4]:
111 for p in n[2:4]:
112 if p in m:
112 if p in m:
113 base[p] = 1 # latest known
113 base[p] = 1 # latest known
114
114
115 for p in n[2:4]:
115 for p in n[2:4]:
116 if p not in req and p not in m:
116 if p not in req and p not in m:
117 r.append(p)
117 r.append(p)
118 req.add(p)
118 req.add(p)
119 seen.add(n[0])
119 seen.add(n[0])
120
120
121 if r:
121 if r:
122 reqcnt += 1
122 reqcnt += 1
123 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
123 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
124 repo.ui.debug("request %d: %s\n" %
124 repo.ui.debug("request %d: %s\n" %
125 (reqcnt, " ".join(map(short, r))))
125 (reqcnt, " ".join(map(short, r))))
126 for p in xrange(0, len(r), 10):
126 for p in xrange(0, len(r), 10):
127 for b in remote.branches(r[p:p + 10]):
127 for b in remote.branches(r[p:p + 10]):
128 repo.ui.debug("received %s:%s\n" %
128 repo.ui.debug("received %s:%s\n" %
129 (short(b[0]), short(b[1])))
129 (short(b[0]), short(b[1])))
130 unknown.append(b)
130 unknown.append(b)
131
131
132 # do binary search on the branches we found
132 # do binary search on the branches we found
133 while search:
133 while search:
134 newsearch = []
134 newsearch = []
135 reqcnt += 1
135 reqcnt += 1
136 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
136 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
137 for n, l in zip(search, remote.between(search)):
137 for n, l in zip(search, remote.between(search)):
138 l.append(n[1])
138 l.append(n[1])
139 p = n[0]
139 p = n[0]
140 f = 1
140 f = 1
141 for i in l:
141 for i in l:
142 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
142 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
143 if i in m:
143 if i in m:
144 if f <= 2:
144 if f <= 2:
145 repo.ui.debug("found new branch changeset %s\n" %
145 repo.ui.debug("found new branch changeset %s\n" %
146 short(p))
146 short(p))
147 fetch.add(p)
147 fetch.add(p)
148 base[i] = 1
148 base[i] = 1
149 else:
149 else:
150 repo.ui.debug("narrowed branch search to %s:%s\n"
150 repo.ui.debug("narrowed branch search to %s:%s\n"
151 % (short(p), short(i)))
151 % (short(p), short(i)))
152 newsearch.append((p, i))
152 newsearch.append((p, i))
153 break
153 break
154 p, f = i, f * 2
154 p, f = i, f * 2
155 search = newsearch
155 search = newsearch
156
156
157 # sanity check our fetch list
157 # sanity check our fetch list
158 for f in fetch:
158 for f in fetch:
159 if f in m:
159 if f in m:
160 raise error.RepoError(_("already have changeset ")
160 raise error.RepoError(_("already have changeset ")
161 + short(f[:4]))
161 + short(f[:4]))
162
162
163 if base.keys() == [nullid]:
163 if base.keys() == [nullid]:
164 if force:
164 if force:
165 repo.ui.warn(_("warning: repository is unrelated\n"))
165 repo.ui.warn(_("warning: repository is unrelated\n"))
166 else:
166 else:
167 raise util.Abort(_("repository is unrelated"))
167 raise util.Abort(_("repository is unrelated"))
168
168
169 repo.ui.debug("found new changesets starting at " +
169 repo.ui.debug("found new changesets starting at " +
170 " ".join([short(f) for f in fetch]) + "\n")
170 " ".join([short(f) for f in fetch]) + "\n")
171
171
172 repo.ui.progress(_('searching'), None)
172 repo.ui.progress(_('searching'), None)
173 repo.ui.debug("%d total queries\n" % reqcnt)
173 repo.ui.debug("%d total queries\n" % reqcnt)
174
174
175 return base.keys(), list(fetch), heads
175 return base.keys(), list(fetch), heads
176
176
177 def findoutgoing(repo, remote, base=None, remoteheads=None, force=False):
177 def findoutgoing(repo, remote, base=None, remoteheads=None, force=False):
178 """Return list of nodes that are roots of subsets not in remote
178 """Return list of nodes that are roots of subsets not in remote
179
179
180 If base dict is specified, assume that these nodes and their parents
180 If base dict is specified, assume that these nodes and their parents
181 exist on the remote side.
181 exist on the remote side.
182 If remotehead is specified, assume it is the list of the heads from
182 If remotehead is specified, assume it is the list of the heads from
183 the remote repository.
183 the remote repository.
184 """
184 """
185 if base is None:
185 if base is None:
186 base = {}
186 base = {}
187 findincoming(repo, remote, base, remoteheads, force=force)
187 findincoming(repo, remote, base, remoteheads, force=force)
188
188
189 repo.ui.debug("common changesets up to "
189 repo.ui.debug("common changesets up to "
190 + " ".join(map(short, base.keys())) + "\n")
190 + " ".join(map(short, base.keys())) + "\n")
191
191
192 remain = set(repo.changelog.nodemap)
192 remain = set(repo.changelog.nodemap)
193
193
194 # prune everything remote has from the tree
194 # prune everything remote has from the tree
195 remain.remove(nullid)
195 remain.remove(nullid)
196 remove = base.keys()
196 remove = base.keys()
197 while remove:
197 while remove:
198 n = remove.pop(0)
198 n = remove.pop(0)
199 if n in remain:
199 if n in remain:
200 remain.remove(n)
200 remain.remove(n)
201 for p in repo.changelog.parents(n):
201 for p in repo.changelog.parents(n):
202 remove.append(p)
202 remove.append(p)
203
203
204 # find every node whose parents have been pruned
204 # find every node whose parents have been pruned
205 subset = []
205 subset = []
206 # find every remote head that will get new children
206 # find every remote head that will get new children
207 for n in remain:
207 for n in remain:
208 p1, p2 = repo.changelog.parents(n)
208 p1, p2 = repo.changelog.parents(n)
209 if p1 not in remain and p2 not in remain:
209 if p1 not in remain and p2 not in remain:
210 subset.append(n)
210 subset.append(n)
211
211
212 return subset
212 return subset
213
213
214 def prepush(repo, remote, force, revs, newbranch):
214 def prepush(repo, remote, force, revs, newbranch):
215 '''Analyze the local and remote repositories and determine which
215 '''Analyze the local and remote repositories and determine which
216 changesets need to be pushed to the remote. Return value depends
216 changesets need to be pushed to the remote. Return value depends
217 on circumstances:
217 on circumstances:
218
218
219 If we are not going to push anything, return a tuple (None,
219 If we are not going to push anything, return a tuple (None,
220 outgoing) where outgoing is 0 if there are no outgoing
220 outgoing) where outgoing is 0 if there are no outgoing
221 changesets and 1 if there are, but we refuse to push them
221 changesets and 1 if there are, but we refuse to push them
222 (e.g. would create new remote heads).
222 (e.g. would create new remote heads).
223
223
224 Otherwise, return a tuple (changegroup, remoteheads), where
224 Otherwise, return a tuple (changegroup, remoteheads), where
225 changegroup is a readable file-like object whose read() returns
225 changegroup is a readable file-like object whose read() returns
226 successive changegroup chunks ready to be sent over the wire and
226 successive changegroup chunks ready to be sent over the wire and
227 remoteheads is the list of remote heads.'''
227 remoteheads is the list of remote heads.'''
228 common = {}
228 common = {}
229 remote_heads = remote.heads()
229 remoteheads = remote.heads()
230 inc = findincoming(repo, remote, common, remote_heads, force=force)
230 inc = findincoming(repo, remote, common, remoteheads, force=force)
231
231
232 cl = repo.changelog
232 cl = repo.changelog
233 update = findoutgoing(repo, remote, common, remote_heads)
233 update = findoutgoing(repo, remote, common, remoteheads)
234 outg, bases, heads = cl.nodesbetween(update, revs)
234 outg, bases, heads = cl.nodesbetween(update, revs)
235
235
236 if not bases:
236 if not bases:
237 repo.ui.status(_("no changes found\n"))
237 repo.ui.status(_("no changes found\n"))
238 return None, 1
238 return None, 1
239
239
240 if not force and remote_heads != [nullid]:
240 if not force and remoteheads != [nullid]:
241
241
242 def fail_multiple_heads(unsynced, branch=None):
242 def fail_multiple_heads(unsynced, branch=None):
243 if branch:
243 if branch:
244 msg = _("push creates new remote heads "
244 msg = _("push creates new remote heads "
245 "on branch '%s'!") % branch
245 "on branch '%s'!") % branch
246 else:
246 else:
247 msg = _("push creates new remote heads!")
247 msg = _("push creates new remote heads!")
248
248
249 if unsynced:
249 if unsynced:
250 hint = _("you should pull and merge or use push -f to force")
250 hint = _("you should pull and merge or use push -f to force")
251 else:
251 else:
252 hint = _("did you forget to merge? use push -f to force")
252 hint = _("did you forget to merge? use push -f to force")
253 raise util.Abort(msg, hint=hint)
253 raise util.Abort(msg, hint=hint)
254
254
255 if remote.capable('branchmap'):
255 if remote.capable('branchmap'):
256 # Check for each named branch if we're creating new remote heads.
256 # Check for each named branch if we're creating new remote heads.
257 # To be a remote head after push, node must be either:
257 # To be a remote head after push, node must be either:
258 # - unknown locally
258 # - unknown locally
259 # - a local outgoing head descended from update
259 # - a local outgoing head descended from update
260 # - a remote head that's known locally and not
260 # - a remote head that's known locally and not
261 # ancestral to an outgoing head
261 # ancestral to an outgoing head
262 #
262 #
263 # New named branches cannot be created without --force.
263 # New named branches cannot be created without --force.
264
264
265 # 1. Create set of branches involved in the push.
265 # 1. Create set of branches involved in the push.
266 branches = set(repo[n].branch() for n in outg)
266 branches = set(repo[n].branch() for n in outg)
267
267
268 # 2. Check for new branches on the remote.
268 # 2. Check for new branches on the remote.
269 remotemap = remote.branchmap()
269 remotemap = remote.branchmap()
270 newbranches = branches - set(remotemap)
270 newbranches = branches - set(remotemap)
271 if newbranches and not newbranch: # new branch requires --new-branch
271 if newbranches and not newbranch: # new branch requires --new-branch
272 branchnames = ', '.join(sorted(newbranches))
272 branchnames = ', '.join(sorted(newbranches))
273 raise util.Abort(_("push creates new remote branches: %s!")
273 raise util.Abort(_("push creates new remote branches: %s!")
274 % branchnames,
274 % branchnames,
275 hint=_("use 'hg push --new-branch' to create"
275 hint=_("use 'hg push --new-branch' to create"
276 " new remote branches"))
276 " new remote branches"))
277 branches.difference_update(newbranches)
277 branches.difference_update(newbranches)
278
278
279 # 3. Construct the initial oldmap and newmap dicts.
279 # 3. Construct the initial oldmap and newmap dicts.
280 # They contain information about the remote heads before and
280 # They contain information about the remote heads before and
281 # after the push, respectively.
281 # after the push, respectively.
282 # Heads not found locally are not included in either dict,
282 # Heads not found locally are not included in either dict,
283 # since they won't be affected by the push.
283 # since they won't be affected by the push.
284 # unsynced contains all branches with incoming changesets.
284 # unsynced contains all branches with incoming changesets.
285 oldmap = {}
285 oldmap = {}
286 newmap = {}
286 newmap = {}
287 unsynced = set()
287 unsynced = set()
288 for branch in branches:
288 for branch in branches:
289 remoteheads = remotemap[branch]
289 remotebrheads = remotemap[branch]
290 prunedheads = [h for h in remoteheads if h in cl.nodemap]
290 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
291 oldmap[branch] = prunedheads
291 oldmap[branch] = prunedbrheads
292 newmap[branch] = list(prunedheads)
292 newmap[branch] = list(prunedbrheads)
293 if len(remoteheads) > len(prunedheads):
293 if len(remotebrheads) > len(prunedbrheads):
294 unsynced.add(branch)
294 unsynced.add(branch)
295
295
296 # 4. Update newmap with outgoing changes.
296 # 4. Update newmap with outgoing changes.
297 # This will possibly add new heads and remove existing ones.
297 # This will possibly add new heads and remove existing ones.
298 ctxgen = (repo[n] for n in outg)
298 ctxgen = (repo[n] for n in outg)
299 repo._updatebranchcache(newmap, ctxgen)
299 repo._updatebranchcache(newmap, ctxgen)
300
300
301 # 5. Check for new heads.
301 # 5. Check for new heads.
302 # If there are more heads after the push than before, a suitable
302 # If there are more heads after the push than before, a suitable
303 # warning, depending on unsynced status, is displayed.
303 # warning, depending on unsynced status, is displayed.
304 for branch in branches:
304 for branch in branches:
305 if len(newmap[branch]) > len(oldmap[branch]):
305 if len(newmap[branch]) > len(oldmap[branch]):
306 return fail_multiple_heads(branch in unsynced, branch)
306 return fail_multiple_heads(branch in unsynced, branch)
307
307
308 # 6. Check for unsynced changes on involved branches.
308 # 6. Check for unsynced changes on involved branches.
309 if unsynced:
309 if unsynced:
310 repo.ui.warn(_("note: unsynced remote changes!\n"))
310 repo.ui.warn(_("note: unsynced remote changes!\n"))
311
311
312 else:
312 else:
313 # Old servers: Check for new topological heads.
313 # Old servers: Check for new topological heads.
314 # Code based on _updatebranchcache.
314 # Code based on _updatebranchcache.
315 newheads = set(h for h in remote_heads if h in cl.nodemap)
315 newheads = set(h for h in remoteheads if h in cl.nodemap)
316 oldheadcnt = len(newheads)
316 oldheadcnt = len(newheads)
317 newheads.update(outg)
317 newheads.update(outg)
318 if len(newheads) > 1:
318 if len(newheads) > 1:
319 for latest in reversed(outg):
319 for latest in reversed(outg):
320 if latest not in newheads:
320 if latest not in newheads:
321 continue
321 continue
322 minhrev = min(cl.rev(h) for h in newheads)
322 minhrev = min(cl.rev(h) for h in newheads)
323 reachable = cl.reachable(latest, cl.node(minhrev))
323 reachable = cl.reachable(latest, cl.node(minhrev))
324 reachable.remove(latest)
324 reachable.remove(latest)
325 newheads.difference_update(reachable)
325 newheads.difference_update(reachable)
326 if len(newheads) > oldheadcnt:
326 if len(newheads) > oldheadcnt:
327 return fail_multiple_heads(inc)
327 return fail_multiple_heads(inc)
328 if inc:
328 if inc:
329 repo.ui.warn(_("note: unsynced remote changes!\n"))
329 repo.ui.warn(_("note: unsynced remote changes!\n"))
330
330
331 if revs is None:
331 if revs is None:
332 # use the fast path, no race possible on push
332 # use the fast path, no race possible on push
333 nodes = repo.changelog.findmissing(common.keys())
333 nodes = repo.changelog.findmissing(common.keys())
334 cg = repo._changegroup(nodes, 'push')
334 cg = repo._changegroup(nodes, 'push')
335 else:
335 else:
336 cg = repo.changegroupsubset(update, revs, 'push')
336 cg = repo.changegroupsubset(update, revs, 'push')
337 return cg, remote_heads
337 return cg, remoteheads
General Comments 0
You need to be logged in to leave comments. Login now