##// END OF EJS Templates
discovery: better coding style
Benoit Boissinot -
r11577:0bc45025 default
parent child Browse files
Show More
@@ -1,337 +1,337 b''
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, short
9 9 from i18n import _
10 10 import util, error
11 11
12 12 def findincoming(repo, remote, base=None, heads=None, force=False):
13 13 """Return list of roots of the subsets of missing nodes from remote
14 14
15 15 If base dict is specified, assume that these nodes and their parents
16 16 exist on the remote side and that no child of a node of base exists
17 17 in both remote and repo.
18 18 Furthermore base will be updated to include the nodes that exists
19 19 in repo and remote but no children exists in repo and remote.
20 20 If a list of heads is specified, return only nodes which are heads
21 21 or ancestors of these heads.
22 22
23 23 All the ancestors of base are in repo and in remote.
24 24 All the descendants of the list returned are missing in repo.
25 25 (and so we know that the rest of the nodes are missing in remote, see
26 26 outgoing)
27 27 """
28 28 return findcommonincoming(repo, remote, base, heads, force)[1]
29 29
30 30 def findcommonincoming(repo, remote, base=None, heads=None, force=False):
31 31 """Return a tuple (common, missing roots, heads) used to identify
32 32 missing nodes from remote.
33 33
34 34 If base dict is specified, assume that these nodes and their parents
35 35 exist on the remote side and that no child of a node of base exists
36 36 in both remote and repo.
37 37 Furthermore base will be updated to include the nodes that exists
38 38 in repo and remote but no children exists in both repo and remote.
39 39 In other words, base is the set of heads of the DAG resulting from
40 40 the intersection of the nodes from repo and remote.
41 41 If a list of heads is specified, return only nodes which are heads
42 42 or ancestors of these heads.
43 43
44 44 All the ancestors of base are in repo and in remote.
45 45 """
46 46 m = repo.changelog.nodemap
47 47 search = []
48 48 fetch = set()
49 49 seen = set()
50 50 seenbranch = set()
51 51 if base is None:
52 52 base = {}
53 53
54 54 if not heads:
55 55 heads = remote.heads()
56 56
57 57 if repo.changelog.tip() == nullid:
58 58 base[nullid] = 1
59 59 if heads != [nullid]:
60 60 return [nullid], [nullid], list(heads)
61 61 return [nullid], [], []
62 62
63 63 # assume we're closer to the tip than the root
64 64 # and start by examining the heads
65 65 repo.ui.status(_("searching for changes\n"))
66 66
67 67 unknown = []
68 68 for h in heads:
69 69 if h not in m:
70 70 unknown.append(h)
71 71 else:
72 72 base[h] = 1
73 73
74 74 heads = unknown
75 75 if not unknown:
76 76 return base.keys(), [], []
77 77
78 78 req = set(unknown)
79 79 reqcnt = 0
80 80
81 81 # search through remote branches
82 82 # a 'branch' here is a linear segment of history, with four parts:
83 83 # head, root, first parent, second parent
84 84 # (a branch always has two parents (or none) by definition)
85 85 unknown = remote.branches(unknown)
86 86 while unknown:
87 87 r = []
88 88 while unknown:
89 89 n = unknown.pop(0)
90 90 if n[0] in seen:
91 91 continue
92 92
93 93 repo.ui.debug("examining %s:%s\n"
94 94 % (short(n[0]), short(n[1])))
95 95 if n[0] == nullid: # found the end of the branch
96 96 pass
97 97 elif n in seenbranch:
98 98 repo.ui.debug("branch already found\n")
99 99 continue
100 100 elif n[1] and n[1] in m: # do we know the base?
101 101 repo.ui.debug("found incomplete branch %s:%s\n"
102 102 % (short(n[0]), short(n[1])))
103 103 search.append(n[0:2]) # schedule branch range for scanning
104 104 seenbranch.add(n)
105 105 else:
106 106 if n[1] not in seen and n[1] not in fetch:
107 107 if n[2] in m and n[3] in m:
108 108 repo.ui.debug("found new changeset %s\n" %
109 109 short(n[1]))
110 110 fetch.add(n[1]) # earliest unknown
111 111 for p in n[2:4]:
112 112 if p in m:
113 113 base[p] = 1 # latest known
114 114
115 115 for p in n[2:4]:
116 116 if p not in req and p not in m:
117 117 r.append(p)
118 118 req.add(p)
119 119 seen.add(n[0])
120 120
121 121 if r:
122 122 reqcnt += 1
123 123 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
124 124 repo.ui.debug("request %d: %s\n" %
125 125 (reqcnt, " ".join(map(short, r))))
126 126 for p in xrange(0, len(r), 10):
127 127 for b in remote.branches(r[p:p + 10]):
128 128 repo.ui.debug("received %s:%s\n" %
129 129 (short(b[0]), short(b[1])))
130 130 unknown.append(b)
131 131
132 132 # do binary search on the branches we found
133 133 while search:
134 134 newsearch = []
135 135 reqcnt += 1
136 136 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
137 137 for n, l in zip(search, remote.between(search)):
138 138 l.append(n[1])
139 139 p = n[0]
140 140 f = 1
141 141 for i in l:
142 142 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
143 143 if i in m:
144 144 if f <= 2:
145 145 repo.ui.debug("found new branch changeset %s\n" %
146 146 short(p))
147 147 fetch.add(p)
148 148 base[i] = 1
149 149 else:
150 150 repo.ui.debug("narrowed branch search to %s:%s\n"
151 151 % (short(p), short(i)))
152 152 newsearch.append((p, i))
153 153 break
154 154 p, f = i, f * 2
155 155 search = newsearch
156 156
157 157 # sanity check our fetch list
158 158 for f in fetch:
159 159 if f in m:
160 160 raise error.RepoError(_("already have changeset ")
161 161 + short(f[:4]))
162 162
163 163 if base.keys() == [nullid]:
164 164 if force:
165 165 repo.ui.warn(_("warning: repository is unrelated\n"))
166 166 else:
167 167 raise util.Abort(_("repository is unrelated"))
168 168
169 169 repo.ui.debug("found new changesets starting at " +
170 170 " ".join([short(f) for f in fetch]) + "\n")
171 171
172 172 repo.ui.progress(_('searching'), None)
173 173 repo.ui.debug("%d total queries\n" % reqcnt)
174 174
175 175 return base.keys(), list(fetch), heads
176 176
177 177 def findoutgoing(repo, remote, base=None, remoteheads=None, force=False):
178 178 """Return list of nodes that are roots of subsets not in remote
179 179
180 180 If base dict is specified, assume that these nodes and their parents
181 181 exist on the remote side.
182 182 If remotehead is specified, assume it is the list of the heads from
183 183 the remote repository.
184 184 """
185 185 if base is None:
186 186 base = {}
187 187 findincoming(repo, remote, base, remoteheads, force=force)
188 188
189 189 repo.ui.debug("common changesets up to "
190 190 + " ".join(map(short, base.keys())) + "\n")
191 191
192 192 remain = set(repo.changelog.nodemap)
193 193
194 194 # prune everything remote has from the tree
195 195 remain.remove(nullid)
196 196 remove = base.keys()
197 197 while remove:
198 198 n = remove.pop(0)
199 199 if n in remain:
200 200 remain.remove(n)
201 201 for p in repo.changelog.parents(n):
202 202 remove.append(p)
203 203
204 204 # find every node whose parents have been pruned
205 205 subset = []
206 206 # find every remote head that will get new children
207 207 for n in remain:
208 208 p1, p2 = repo.changelog.parents(n)
209 209 if p1 not in remain and p2 not in remain:
210 210 subset.append(n)
211 211
212 212 return subset
213 213
214 214 def prepush(repo, remote, force, revs, newbranch):
215 215 '''Analyze the local and remote repositories and determine which
216 216 changesets need to be pushed to the remote. Return value depends
217 217 on circumstances:
218 218
219 219 If we are not going to push anything, return a tuple (None,
220 220 outgoing) where outgoing is 0 if there are no outgoing
221 221 changesets and 1 if there are, but we refuse to push them
222 222 (e.g. would create new remote heads).
223 223
224 224 Otherwise, return a tuple (changegroup, remoteheads), where
225 225 changegroup is a readable file-like object whose read() returns
226 226 successive changegroup chunks ready to be sent over the wire and
227 227 remoteheads is the list of remote heads.'''
228 228 common = {}
229 remote_heads = remote.heads()
230 inc = findincoming(repo, remote, common, remote_heads, force=force)
229 remoteheads = remote.heads()
230 inc = findincoming(repo, remote, common, remoteheads, force=force)
231 231
232 232 cl = repo.changelog
233 update = findoutgoing(repo, remote, common, remote_heads)
233 update = findoutgoing(repo, remote, common, remoteheads)
234 234 outg, bases, heads = cl.nodesbetween(update, revs)
235 235
236 236 if not bases:
237 237 repo.ui.status(_("no changes found\n"))
238 238 return None, 1
239 239
240 if not force and remote_heads != [nullid]:
240 if not force and remoteheads != [nullid]:
241 241
242 242 def fail_multiple_heads(unsynced, branch=None):
243 243 if branch:
244 244 msg = _("push creates new remote heads "
245 245 "on branch '%s'!") % branch
246 246 else:
247 247 msg = _("push creates new remote heads!")
248 248
249 249 if unsynced:
250 250 hint = _("you should pull and merge or use push -f to force")
251 251 else:
252 252 hint = _("did you forget to merge? use push -f to force")
253 253 raise util.Abort(msg, hint=hint)
254 254
255 255 if remote.capable('branchmap'):
256 256 # Check for each named branch if we're creating new remote heads.
257 257 # To be a remote head after push, node must be either:
258 258 # - unknown locally
259 259 # - a local outgoing head descended from update
260 260 # - a remote head that's known locally and not
261 261 # ancestral to an outgoing head
262 262 #
263 263 # New named branches cannot be created without --force.
264 264
265 265 # 1. Create set of branches involved in the push.
266 266 branches = set(repo[n].branch() for n in outg)
267 267
268 268 # 2. Check for new branches on the remote.
269 269 remotemap = remote.branchmap()
270 270 newbranches = branches - set(remotemap)
271 271 if newbranches and not newbranch: # new branch requires --new-branch
272 272 branchnames = ', '.join(sorted(newbranches))
273 273 raise util.Abort(_("push creates new remote branches: %s!")
274 274 % branchnames,
275 275 hint=_("use 'hg push --new-branch' to create"
276 276 " new remote branches"))
277 277 branches.difference_update(newbranches)
278 278
279 279 # 3. Construct the initial oldmap and newmap dicts.
280 280 # They contain information about the remote heads before and
281 281 # after the push, respectively.
282 282 # Heads not found locally are not included in either dict,
283 283 # since they won't be affected by the push.
284 284 # unsynced contains all branches with incoming changesets.
285 285 oldmap = {}
286 286 newmap = {}
287 287 unsynced = set()
288 288 for branch in branches:
289 remoteheads = remotemap[branch]
290 prunedheads = [h for h in remoteheads if h in cl.nodemap]
291 oldmap[branch] = prunedheads
292 newmap[branch] = list(prunedheads)
293 if len(remoteheads) > len(prunedheads):
289 remotebrheads = remotemap[branch]
290 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
291 oldmap[branch] = prunedbrheads
292 newmap[branch] = list(prunedbrheads)
293 if len(remotebrheads) > len(prunedbrheads):
294 294 unsynced.add(branch)
295 295
296 296 # 4. Update newmap with outgoing changes.
297 297 # This will possibly add new heads and remove existing ones.
298 298 ctxgen = (repo[n] for n in outg)
299 299 repo._updatebranchcache(newmap, ctxgen)
300 300
301 301 # 5. Check for new heads.
302 302 # If there are more heads after the push than before, a suitable
303 303 # warning, depending on unsynced status, is displayed.
304 304 for branch in branches:
305 305 if len(newmap[branch]) > len(oldmap[branch]):
306 306 return fail_multiple_heads(branch in unsynced, branch)
307 307
308 308 # 6. Check for unsynced changes on involved branches.
309 309 if unsynced:
310 310 repo.ui.warn(_("note: unsynced remote changes!\n"))
311 311
312 312 else:
313 313 # Old servers: Check for new topological heads.
314 314 # Code based on _updatebranchcache.
315 newheads = set(h for h in remote_heads if h in cl.nodemap)
315 newheads = set(h for h in remoteheads if h in cl.nodemap)
316 316 oldheadcnt = len(newheads)
317 317 newheads.update(outg)
318 318 if len(newheads) > 1:
319 319 for latest in reversed(outg):
320 320 if latest not in newheads:
321 321 continue
322 322 minhrev = min(cl.rev(h) for h in newheads)
323 323 reachable = cl.reachable(latest, cl.node(minhrev))
324 324 reachable.remove(latest)
325 325 newheads.difference_update(reachable)
326 326 if len(newheads) > oldheadcnt:
327 327 return fail_multiple_heads(inc)
328 328 if inc:
329 329 repo.ui.warn(_("note: unsynced remote changes!\n"))
330 330
331 331 if revs is None:
332 332 # use the fast path, no race possible on push
333 333 nodes = repo.changelog.findmissing(common.keys())
334 334 cg = repo._changegroup(nodes, 'push')
335 335 else:
336 336 cg = repo.changegroupsubset(update, revs, 'push')
337 return cg, remote_heads
337 return cg, remoteheads
General Comments 0
You need to be logged in to leave comments. Login now