##// END OF EJS Templates
discovery: use set instead of dict
Benoit Boissinot -
r12761:11c3c77d default
parent child Browse files
Show More
@@ -1,320 +1,321 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, error
10 import util, error
11
11
12 def findincoming(repo, remote, heads=None, force=False):
12 def findincoming(repo, remote, heads=None, force=False):
13 """Return list of roots of the subsets of missing nodes from remote
13 """Return list of roots of the subsets of missing nodes from remote
14
14
15 If a list of heads is specified, return only nodes which are heads
15 If a list of heads is specified, return only nodes which are heads
16 or ancestors of these heads.
16 or ancestors of these heads.
17
17
18 All the ancestors of the list returned are in repo and in remote.
18 All the ancestors of the list returned are in repo and in remote.
19 All the descendants of the list returned are missing in repo.
19 All the descendants of the list returned are missing in repo.
20 (and so we know that the rest of the nodes are missing in remote, see
20 (and so we know that the rest of the nodes are missing in remote, see
21 outgoing)
21 outgoing)
22 """
22 """
23 return findcommonincoming(repo, remote, heads, force)[1]
23 return findcommonincoming(repo, remote, heads, force)[1]
24
24
25 def findcommonincoming(repo, remote, heads=None, force=False):
25 def findcommonincoming(repo, remote, heads=None, force=False):
26 """Return a tuple (common, missing roots, heads) used to identify
26 """Return a tuple (common, missing roots, heads) used to identify
27 missing nodes from remote.
27 missing nodes from remote.
28
28
29 If a list of heads is specified, return only nodes which are heads
29 If a list of heads is specified, return only nodes which are heads
30 or ancestors of these heads.
30 or ancestors of these heads.
31 """
31 """
32 m = repo.changelog.nodemap
32 m = repo.changelog.nodemap
33 search = []
33 search = []
34 fetch = set()
34 fetch = set()
35 seen = set()
35 seen = set()
36 seenbranch = set()
36 seenbranch = set()
37 base = {}
37 base = set()
38
38
39 if not heads:
39 if not heads:
40 heads = remote.heads()
40 heads = remote.heads()
41
41
42 if repo.changelog.tip() == nullid:
42 if repo.changelog.tip() == nullid:
43 base[nullid] = 1
43 base.add(nullid)
44 if heads != [nullid]:
44 if heads != [nullid]:
45 return [nullid], [nullid], list(heads)
45 return [nullid], [nullid], list(heads)
46 return [nullid], [], []
46 return [nullid], [], []
47
47
48 # assume we're closer to the tip than the root
48 # assume we're closer to the tip than the root
49 # and start by examining the heads
49 # and start by examining the heads
50 repo.ui.status(_("searching for changes\n"))
50 repo.ui.status(_("searching for changes\n"))
51
51
52 unknown = []
52 unknown = []
53 for h in heads:
53 for h in heads:
54 if h not in m:
54 if h not in m:
55 unknown.append(h)
55 unknown.append(h)
56 else:
56 else:
57 base[h] = 1
57 base.add(h)
58
58
59 heads = unknown
59 heads = unknown
60 if not unknown:
60 if not unknown:
61 return base.keys(), [], []
61 return list(base), [], []
62
62
63 req = set(unknown)
63 req = set(unknown)
64 reqcnt = 0
64 reqcnt = 0
65
65
66 # search through remote branches
66 # search through remote branches
67 # a 'branch' here is a linear segment of history, with four parts:
67 # a 'branch' here is a linear segment of history, with four parts:
68 # head, root, first parent, second parent
68 # head, root, first parent, second parent
69 # (a branch always has two parents (or none) by definition)
69 # (a branch always has two parents (or none) by definition)
70 unknown = remote.branches(unknown)
70 unknown = remote.branches(unknown)
71 while unknown:
71 while unknown:
72 r = []
72 r = []
73 while unknown:
73 while unknown:
74 n = unknown.pop(0)
74 n = unknown.pop(0)
75 if n[0] in seen:
75 if n[0] in seen:
76 continue
76 continue
77
77
78 repo.ui.debug("examining %s:%s\n"
78 repo.ui.debug("examining %s:%s\n"
79 % (short(n[0]), short(n[1])))
79 % (short(n[0]), short(n[1])))
80 if n[0] == nullid: # found the end of the branch
80 if n[0] == nullid: # found the end of the branch
81 pass
81 pass
82 elif n in seenbranch:
82 elif n in seenbranch:
83 repo.ui.debug("branch already found\n")
83 repo.ui.debug("branch already found\n")
84 continue
84 continue
85 elif n[1] and n[1] in m: # do we know the base?
85 elif n[1] and n[1] in m: # do we know the base?
86 repo.ui.debug("found incomplete branch %s:%s\n"
86 repo.ui.debug("found incomplete branch %s:%s\n"
87 % (short(n[0]), short(n[1])))
87 % (short(n[0]), short(n[1])))
88 search.append(n[0:2]) # schedule branch range for scanning
88 search.append(n[0:2]) # schedule branch range for scanning
89 seenbranch.add(n)
89 seenbranch.add(n)
90 else:
90 else:
91 if n[1] not in seen and n[1] not in fetch:
91 if n[1] not in seen and n[1] not in fetch:
92 if n[2] in m and n[3] in m:
92 if n[2] in m and n[3] in m:
93 repo.ui.debug("found new changeset %s\n" %
93 repo.ui.debug("found new changeset %s\n" %
94 short(n[1]))
94 short(n[1]))
95 fetch.add(n[1]) # earliest unknown
95 fetch.add(n[1]) # earliest unknown
96 for p in n[2:4]:
96 for p in n[2:4]:
97 if p in m:
97 if p in m:
98 base[p] = 1 # latest known
98 base.add(p) # latest known
99
99
100 for p in n[2:4]:
100 for p in n[2:4]:
101 if p not in req and p not in m:
101 if p not in req and p not in m:
102 r.append(p)
102 r.append(p)
103 req.add(p)
103 req.add(p)
104 seen.add(n[0])
104 seen.add(n[0])
105
105
106 if r:
106 if r:
107 reqcnt += 1
107 reqcnt += 1
108 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
108 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
109 repo.ui.debug("request %d: %s\n" %
109 repo.ui.debug("request %d: %s\n" %
110 (reqcnt, " ".join(map(short, r))))
110 (reqcnt, " ".join(map(short, r))))
111 for p in xrange(0, len(r), 10):
111 for p in xrange(0, len(r), 10):
112 for b in remote.branches(r[p:p + 10]):
112 for b in remote.branches(r[p:p + 10]):
113 repo.ui.debug("received %s:%s\n" %
113 repo.ui.debug("received %s:%s\n" %
114 (short(b[0]), short(b[1])))
114 (short(b[0]), short(b[1])))
115 unknown.append(b)
115 unknown.append(b)
116
116
117 # do binary search on the branches we found
117 # do binary search on the branches we found
118 while search:
118 while search:
119 newsearch = []
119 newsearch = []
120 reqcnt += 1
120 reqcnt += 1
121 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
121 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
122 for n, l in zip(search, remote.between(search)):
122 for n, l in zip(search, remote.between(search)):
123 l.append(n[1])
123 l.append(n[1])
124 p = n[0]
124 p = n[0]
125 f = 1
125 f = 1
126 for i in l:
126 for i in l:
127 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
127 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
128 if i in m:
128 if i in m:
129 if f <= 2:
129 if f <= 2:
130 repo.ui.debug("found new branch changeset %s\n" %
130 repo.ui.debug("found new branch changeset %s\n" %
131 short(p))
131 short(p))
132 fetch.add(p)
132 fetch.add(p)
133 base[i] = 1
133 base.add(i)
134 else:
134 else:
135 repo.ui.debug("narrowed branch search to %s:%s\n"
135 repo.ui.debug("narrowed branch search to %s:%s\n"
136 % (short(p), short(i)))
136 % (short(p), short(i)))
137 newsearch.append((p, i))
137 newsearch.append((p, i))
138 break
138 break
139 p, f = i, f * 2
139 p, f = i, f * 2
140 search = newsearch
140 search = newsearch
141
141
142 # sanity check our fetch list
142 # sanity check our fetch list
143 for f in fetch:
143 for f in fetch:
144 if f in m:
144 if f in m:
145 raise error.RepoError(_("already have changeset ")
145 raise error.RepoError(_("already have changeset ")
146 + short(f[:4]))
146 + short(f[:4]))
147
147
148 if base.keys() == [nullid]:
148 base = list(base)
149 if base == [nullid]:
149 if force:
150 if force:
150 repo.ui.warn(_("warning: repository is unrelated\n"))
151 repo.ui.warn(_("warning: repository is unrelated\n"))
151 else:
152 else:
152 raise util.Abort(_("repository is unrelated"))
153 raise util.Abort(_("repository is unrelated"))
153
154
154 repo.ui.debug("found new changesets starting at " +
155 repo.ui.debug("found new changesets starting at " +
155 " ".join([short(f) for f in fetch]) + "\n")
156 " ".join([short(f) for f in fetch]) + "\n")
156
157
157 repo.ui.progress(_('searching'), None)
158 repo.ui.progress(_('searching'), None)
158 repo.ui.debug("%d total queries\n" % reqcnt)
159 repo.ui.debug("%d total queries\n" % reqcnt)
159
160
160 return base.keys(), list(fetch), heads
161 return base, list(fetch), heads
161
162
162 def findoutgoing(repo, remote, base=None, remoteheads=None, force=False):
163 def findoutgoing(repo, remote, base=None, remoteheads=None, force=False):
163 """Return list of nodes that are roots of subsets not in remote
164 """Return list of nodes that are roots of subsets not in remote
164
165
165 If base dict is specified, assume that these nodes and their parents
166 If base dict is specified, assume that these nodes and their parents
166 exist on the remote side.
167 exist on the remote side.
167 If remotehead is specified, assume it is the list of the heads from
168 If remotehead is specified, assume it is the list of the heads from
168 the remote repository.
169 the remote repository.
169 """
170 """
170 if base is None:
171 if base is None:
171 base = findcommonincoming(repo, remote, heads=remoteheads,
172 base = findcommonincoming(repo, remote, heads=remoteheads,
172 force=force)[0]
173 force=force)[0]
173 else:
174 else:
174 base = list(base)
175 base = list(base)
175
176
176 repo.ui.debug("common changesets up to "
177 repo.ui.debug("common changesets up to "
177 + " ".join(map(short, base)) + "\n")
178 + " ".join(map(short, base)) + "\n")
178
179
179 remain = set(repo.changelog.nodemap)
180 remain = set(repo.changelog.nodemap)
180
181
181 # prune everything remote has from the tree
182 # prune everything remote has from the tree
182 remain.remove(nullid)
183 remain.remove(nullid)
183 remove = base
184 remove = base
184 while remove:
185 while remove:
185 n = remove.pop(0)
186 n = remove.pop(0)
186 if n in remain:
187 if n in remain:
187 remain.remove(n)
188 remain.remove(n)
188 for p in repo.changelog.parents(n):
189 for p in repo.changelog.parents(n):
189 remove.append(p)
190 remove.append(p)
190
191
191 # find every node whose parents have been pruned
192 # find every node whose parents have been pruned
192 subset = []
193 subset = []
193 # find every remote head that will get new children
194 # find every remote head that will get new children
194 for n in remain:
195 for n in remain:
195 p1, p2 = repo.changelog.parents(n)
196 p1, p2 = repo.changelog.parents(n)
196 if p1 not in remain and p2 not in remain:
197 if p1 not in remain and p2 not in remain:
197 subset.append(n)
198 subset.append(n)
198
199
199 return subset
200 return subset
200
201
201 def prepush(repo, remote, force, revs, newbranch):
202 def prepush(repo, remote, force, revs, newbranch):
202 '''Analyze the local and remote repositories and determine which
203 '''Analyze the local and remote repositories and determine which
203 changesets need to be pushed to the remote. Return value depends
204 changesets need to be pushed to the remote. Return value depends
204 on circumstances:
205 on circumstances:
205
206
206 If we are not going to push anything, return a tuple (None,
207 If we are not going to push anything, return a tuple (None,
207 outgoing) where outgoing is 0 if there are no outgoing
208 outgoing) where outgoing is 0 if there are no outgoing
208 changesets and 1 if there are, but we refuse to push them
209 changesets and 1 if there are, but we refuse to push them
209 (e.g. would create new remote heads).
210 (e.g. would create new remote heads).
210
211
211 Otherwise, return a tuple (changegroup, remoteheads), where
212 Otherwise, return a tuple (changegroup, remoteheads), where
212 changegroup is a readable file-like object whose read() returns
213 changegroup is a readable file-like object whose read() returns
213 successive changegroup chunks ready to be sent over the wire and
214 successive changegroup chunks ready to be sent over the wire and
214 remoteheads is the list of remote heads.'''
215 remoteheads is the list of remote heads.'''
215 remoteheads = remote.heads()
216 remoteheads = remote.heads()
216 common, inc, rheads = findcommonincoming(repo, remote, heads=remoteheads,
217 common, inc, rheads = findcommonincoming(repo, remote, heads=remoteheads,
217 force=force)
218 force=force)
218
219
219 cl = repo.changelog
220 cl = repo.changelog
220 update = findoutgoing(repo, remote, common, remoteheads)
221 update = findoutgoing(repo, remote, common, remoteheads)
221 outg, bases, heads = cl.nodesbetween(update, revs)
222 outg, bases, heads = cl.nodesbetween(update, revs)
222
223
223 if not bases:
224 if not bases:
224 repo.ui.status(_("no changes found\n"))
225 repo.ui.status(_("no changes found\n"))
225 return None, 1
226 return None, 1
226
227
227 if not force and remoteheads != [nullid]:
228 if not force and remoteheads != [nullid]:
228 if remote.capable('branchmap'):
229 if remote.capable('branchmap'):
229 # Check for each named branch if we're creating new remote heads.
230 # Check for each named branch if we're creating new remote heads.
230 # To be a remote head after push, node must be either:
231 # To be a remote head after push, node must be either:
231 # - unknown locally
232 # - unknown locally
232 # - a local outgoing head descended from update
233 # - a local outgoing head descended from update
233 # - a remote head that's known locally and not
234 # - a remote head that's known locally and not
234 # ancestral to an outgoing head
235 # ancestral to an outgoing head
235 #
236 #
236 # New named branches cannot be created without --force.
237 # New named branches cannot be created without --force.
237
238
238 # 1. Create set of branches involved in the push.
239 # 1. Create set of branches involved in the push.
239 branches = set(repo[n].branch() for n in outg)
240 branches = set(repo[n].branch() for n in outg)
240
241
241 # 2. Check for new branches on the remote.
242 # 2. Check for new branches on the remote.
242 remotemap = remote.branchmap()
243 remotemap = remote.branchmap()
243 newbranches = branches - set(remotemap)
244 newbranches = branches - set(remotemap)
244 if newbranches and not newbranch: # new branch requires --new-branch
245 if newbranches and not newbranch: # new branch requires --new-branch
245 branchnames = ', '.join(sorted(newbranches))
246 branchnames = ', '.join(sorted(newbranches))
246 raise util.Abort(_("push creates new remote branches: %s!")
247 raise util.Abort(_("push creates new remote branches: %s!")
247 % branchnames,
248 % branchnames,
248 hint=_("use 'hg push --new-branch' to create"
249 hint=_("use 'hg push --new-branch' to create"
249 " new remote branches"))
250 " new remote branches"))
250 branches.difference_update(newbranches)
251 branches.difference_update(newbranches)
251
252
252 # 3. Construct the initial oldmap and newmap dicts.
253 # 3. Construct the initial oldmap and newmap dicts.
253 # They contain information about the remote heads before and
254 # They contain information about the remote heads before and
254 # after the push, respectively.
255 # after the push, respectively.
255 # Heads not found locally are not included in either dict,
256 # Heads not found locally are not included in either dict,
256 # since they won't be affected by the push.
257 # since they won't be affected by the push.
257 # unsynced contains all branches with incoming changesets.
258 # unsynced contains all branches with incoming changesets.
258 oldmap = {}
259 oldmap = {}
259 newmap = {}
260 newmap = {}
260 unsynced = set()
261 unsynced = set()
261 for branch in branches:
262 for branch in branches:
262 remotebrheads = remotemap[branch]
263 remotebrheads = remotemap[branch]
263 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
264 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
264 oldmap[branch] = prunedbrheads
265 oldmap[branch] = prunedbrheads
265 newmap[branch] = list(prunedbrheads)
266 newmap[branch] = list(prunedbrheads)
266 if len(remotebrheads) > len(prunedbrheads):
267 if len(remotebrheads) > len(prunedbrheads):
267 unsynced.add(branch)
268 unsynced.add(branch)
268
269
269 # 4. Update newmap with outgoing changes.
270 # 4. Update newmap with outgoing changes.
270 # This will possibly add new heads and remove existing ones.
271 # This will possibly add new heads and remove existing ones.
271 ctxgen = (repo[n] for n in outg)
272 ctxgen = (repo[n] for n in outg)
272 repo._updatebranchcache(newmap, ctxgen)
273 repo._updatebranchcache(newmap, ctxgen)
273
274
274 else:
275 else:
275 # 1-4b. old servers: Check for new topological heads.
276 # 1-4b. old servers: Check for new topological heads.
276 # Construct {old,new}map with branch = None (topological branch).
277 # Construct {old,new}map with branch = None (topological branch).
277 # (code based on _updatebranchcache)
278 # (code based on _updatebranchcache)
278 oldheads = set(h for h in remoteheads if h in cl.nodemap)
279 oldheads = set(h for h in remoteheads if h in cl.nodemap)
279 newheads = oldheads.union(outg)
280 newheads = oldheads.union(outg)
280 if len(newheads) > 1:
281 if len(newheads) > 1:
281 for latest in reversed(outg):
282 for latest in reversed(outg):
282 if latest not in newheads:
283 if latest not in newheads:
283 continue
284 continue
284 minhrev = min(cl.rev(h) for h in newheads)
285 minhrev = min(cl.rev(h) for h in newheads)
285 reachable = cl.reachable(latest, cl.node(minhrev))
286 reachable = cl.reachable(latest, cl.node(minhrev))
286 reachable.remove(latest)
287 reachable.remove(latest)
287 newheads.difference_update(reachable)
288 newheads.difference_update(reachable)
288 branches = set([None])
289 branches = set([None])
289 newmap = {None: newheads}
290 newmap = {None: newheads}
290 oldmap = {None: oldheads}
291 oldmap = {None: oldheads}
291 unsynced = inc and branches or set()
292 unsynced = inc and branches or set()
292
293
293 # 5. Check for new heads.
294 # 5. Check for new heads.
294 # If there are more heads after the push than before, a suitable
295 # If there are more heads after the push than before, a suitable
295 # warning, depending on unsynced status, is displayed.
296 # warning, depending on unsynced status, is displayed.
296 for branch in branches:
297 for branch in branches:
297 if len(newmap[branch]) > len(oldmap[branch]):
298 if len(newmap[branch]) > len(oldmap[branch]):
298 if branch:
299 if branch:
299 msg = _("push creates new remote heads "
300 msg = _("push creates new remote heads "
300 "on branch '%s'!") % branch
301 "on branch '%s'!") % branch
301 else:
302 else:
302 msg = _("push creates new remote heads!")
303 msg = _("push creates new remote heads!")
303
304
304 if branch in unsynced:
305 if branch in unsynced:
305 hint = _("you should pull and merge or use push -f to force")
306 hint = _("you should pull and merge or use push -f to force")
306 else:
307 else:
307 hint = _("did you forget to merge? use push -f to force")
308 hint = _("did you forget to merge? use push -f to force")
308 raise util.Abort(msg, hint=hint)
309 raise util.Abort(msg, hint=hint)
309
310
310 # 6. Check for unsynced changes on involved branches.
311 # 6. Check for unsynced changes on involved branches.
311 if unsynced:
312 if unsynced:
312 repo.ui.warn(_("note: unsynced remote changes!\n"))
313 repo.ui.warn(_("note: unsynced remote changes!\n"))
313
314
314 if revs is None:
315 if revs is None:
315 # use the fast path, no race possible on push
316 # use the fast path, no race possible on push
316 nodes = repo.changelog.findmissing(common)
317 nodes = repo.changelog.findmissing(common)
317 cg = repo._changegroup(nodes, 'push')
318 cg = repo._changegroup(nodes, 'push')
318 else:
319 else:
319 cg = repo.changegroupsubset(update, revs, 'push')
320 cg = repo.changegroupsubset(update, revs, 'push')
320 return cg, remoteheads
321 return cg, remoteheads
General Comments 0
You need to be logged in to leave comments. Login now