##// END OF EJS Templates
discovery: simplify findoutgoing(), the updated_head stuff wasn't used
Benoit Boissinot -
r11576:98c874a9 default
parent child Browse files
Show More
@@ -1,348 +1,337 b''
1 1 # discovery.py - protocol changeset discovery functions
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, short
9 9 from i18n import _
10 10 import util, error
11 11
12 12 def findincoming(repo, remote, base=None, heads=None, force=False):
13 13 """Return list of roots of the subsets of missing nodes from remote
14 14
15 15 If base dict is specified, assume that these nodes and their parents
16 16 exist on the remote side and that no child of a node of base exists
17 17 in both remote and repo.
18 18 Furthermore base will be updated to include the nodes that exists
19 19 in repo and remote but no children exists in repo and remote.
20 20 If a list of heads is specified, return only nodes which are heads
21 21 or ancestors of these heads.
22 22
23 23 All the ancestors of base are in repo and in remote.
24 24 All the descendants of the list returned are missing in repo.
25 25 (and so we know that the rest of the nodes are missing in remote, see
26 26 outgoing)
27 27 """
28 28 return findcommonincoming(repo, remote, base, heads, force)[1]
29 29
30 30 def findcommonincoming(repo, remote, base=None, heads=None, force=False):
31 31 """Return a tuple (common, missing roots, heads) used to identify
32 32 missing nodes from remote.
33 33
34 34 If base dict is specified, assume that these nodes and their parents
35 35 exist on the remote side and that no child of a node of base exists
36 36 in both remote and repo.
37 37 Furthermore base will be updated to include the nodes that exists
38 38 in repo and remote but no children exists in both repo and remote.
39 39 In other words, base is the set of heads of the DAG resulting from
40 40 the intersection of the nodes from repo and remote.
41 41 If a list of heads is specified, return only nodes which are heads
42 42 or ancestors of these heads.
43 43
44 44 All the ancestors of base are in repo and in remote.
45 45 """
46 46 m = repo.changelog.nodemap
47 47 search = []
48 48 fetch = set()
49 49 seen = set()
50 50 seenbranch = set()
51 51 if base is None:
52 52 base = {}
53 53
54 54 if not heads:
55 55 heads = remote.heads()
56 56
57 57 if repo.changelog.tip() == nullid:
58 58 base[nullid] = 1
59 59 if heads != [nullid]:
60 60 return [nullid], [nullid], list(heads)
61 61 return [nullid], [], []
62 62
63 63 # assume we're closer to the tip than the root
64 64 # and start by examining the heads
65 65 repo.ui.status(_("searching for changes\n"))
66 66
67 67 unknown = []
68 68 for h in heads:
69 69 if h not in m:
70 70 unknown.append(h)
71 71 else:
72 72 base[h] = 1
73 73
74 74 heads = unknown
75 75 if not unknown:
76 76 return base.keys(), [], []
77 77
78 78 req = set(unknown)
79 79 reqcnt = 0
80 80
81 81 # search through remote branches
82 82 # a 'branch' here is a linear segment of history, with four parts:
83 83 # head, root, first parent, second parent
84 84 # (a branch always has two parents (or none) by definition)
85 85 unknown = remote.branches(unknown)
86 86 while unknown:
87 87 r = []
88 88 while unknown:
89 89 n = unknown.pop(0)
90 90 if n[0] in seen:
91 91 continue
92 92
93 93 repo.ui.debug("examining %s:%s\n"
94 94 % (short(n[0]), short(n[1])))
95 95 if n[0] == nullid: # found the end of the branch
96 96 pass
97 97 elif n in seenbranch:
98 98 repo.ui.debug("branch already found\n")
99 99 continue
100 100 elif n[1] and n[1] in m: # do we know the base?
101 101 repo.ui.debug("found incomplete branch %s:%s\n"
102 102 % (short(n[0]), short(n[1])))
103 103 search.append(n[0:2]) # schedule branch range for scanning
104 104 seenbranch.add(n)
105 105 else:
106 106 if n[1] not in seen and n[1] not in fetch:
107 107 if n[2] in m and n[3] in m:
108 108 repo.ui.debug("found new changeset %s\n" %
109 109 short(n[1]))
110 110 fetch.add(n[1]) # earliest unknown
111 111 for p in n[2:4]:
112 112 if p in m:
113 113 base[p] = 1 # latest known
114 114
115 115 for p in n[2:4]:
116 116 if p not in req and p not in m:
117 117 r.append(p)
118 118 req.add(p)
119 119 seen.add(n[0])
120 120
121 121 if r:
122 122 reqcnt += 1
123 123 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
124 124 repo.ui.debug("request %d: %s\n" %
125 125 (reqcnt, " ".join(map(short, r))))
126 126 for p in xrange(0, len(r), 10):
127 127 for b in remote.branches(r[p:p + 10]):
128 128 repo.ui.debug("received %s:%s\n" %
129 129 (short(b[0]), short(b[1])))
130 130 unknown.append(b)
131 131
132 132 # do binary search on the branches we found
133 133 while search:
134 134 newsearch = []
135 135 reqcnt += 1
136 136 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
137 137 for n, l in zip(search, remote.between(search)):
138 138 l.append(n[1])
139 139 p = n[0]
140 140 f = 1
141 141 for i in l:
142 142 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
143 143 if i in m:
144 144 if f <= 2:
145 145 repo.ui.debug("found new branch changeset %s\n" %
146 146 short(p))
147 147 fetch.add(p)
148 148 base[i] = 1
149 149 else:
150 150 repo.ui.debug("narrowed branch search to %s:%s\n"
151 151 % (short(p), short(i)))
152 152 newsearch.append((p, i))
153 153 break
154 154 p, f = i, f * 2
155 155 search = newsearch
156 156
157 157 # sanity check our fetch list
158 158 for f in fetch:
159 159 if f in m:
160 160 raise error.RepoError(_("already have changeset ")
161 161 + short(f[:4]))
162 162
163 163 if base.keys() == [nullid]:
164 164 if force:
165 165 repo.ui.warn(_("warning: repository is unrelated\n"))
166 166 else:
167 167 raise util.Abort(_("repository is unrelated"))
168 168
169 169 repo.ui.debug("found new changesets starting at " +
170 170 " ".join([short(f) for f in fetch]) + "\n")
171 171
172 172 repo.ui.progress(_('searching'), None)
173 173 repo.ui.debug("%d total queries\n" % reqcnt)
174 174
175 175 return base.keys(), list(fetch), heads
176 176
177 def findoutgoing(repo, remote, base=None, heads=None, force=False):
177 def findoutgoing(repo, remote, base=None, remoteheads=None, force=False):
178 178 """Return list of nodes that are roots of subsets not in remote
179 179
180 180 If base dict is specified, assume that these nodes and their parents
181 181 exist on the remote side.
182 If a list of heads is specified, return only nodes which are heads
183 or ancestors of these heads, and return a second element which
184 contains all remote heads which get new children.
182 If remotehead is specified, assume it is the list of the heads from
183 the remote repository.
185 184 """
186 185 if base is None:
187 186 base = {}
188 findincoming(repo, remote, base, heads, force=force)
187 findincoming(repo, remote, base, remoteheads, force=force)
189 188
190 189 repo.ui.debug("common changesets up to "
191 190 + " ".join(map(short, base.keys())) + "\n")
192 191
193 192 remain = set(repo.changelog.nodemap)
194 193
195 194 # prune everything remote has from the tree
196 195 remain.remove(nullid)
197 196 remove = base.keys()
198 197 while remove:
199 198 n = remove.pop(0)
200 199 if n in remain:
201 200 remain.remove(n)
202 201 for p in repo.changelog.parents(n):
203 202 remove.append(p)
204 203
205 204 # find every node whose parents have been pruned
206 205 subset = []
207 206 # find every remote head that will get new children
208 updated_heads = set()
209 207 for n in remain:
210 208 p1, p2 = repo.changelog.parents(n)
211 209 if p1 not in remain and p2 not in remain:
212 210 subset.append(n)
213 if heads:
214 if p1 in heads:
215 updated_heads.add(p1)
216 if p2 in heads:
217 updated_heads.add(p2)
218 211
219 # this is the set of all roots we have to push
220 if heads:
221 return subset, list(updated_heads)
222 else:
223 return subset
212 return subset
224 213
225 214 def prepush(repo, remote, force, revs, newbranch):
226 215 '''Analyze the local and remote repositories and determine which
227 216 changesets need to be pushed to the remote. Return value depends
228 217 on circumstances:
229 218
230 219 If we are not going to push anything, return a tuple (None,
231 220 outgoing) where outgoing is 0 if there are no outgoing
232 221 changesets and 1 if there are, but we refuse to push them
233 222 (e.g. would create new remote heads).
234 223
235 224 Otherwise, return a tuple (changegroup, remoteheads), where
236 225 changegroup is a readable file-like object whose read() returns
237 226 successive changegroup chunks ready to be sent over the wire and
238 227 remoteheads is the list of remote heads.'''
239 228 common = {}
240 229 remote_heads = remote.heads()
241 230 inc = findincoming(repo, remote, common, remote_heads, force=force)
242 231
243 232 cl = repo.changelog
244 update, updated_heads = findoutgoing(repo, remote, common, remote_heads)
233 update = findoutgoing(repo, remote, common, remote_heads)
245 234 outg, bases, heads = cl.nodesbetween(update, revs)
246 235
247 236 if not bases:
248 237 repo.ui.status(_("no changes found\n"))
249 238 return None, 1
250 239
251 240 if not force and remote_heads != [nullid]:
252 241
253 242 def fail_multiple_heads(unsynced, branch=None):
254 243 if branch:
255 244 msg = _("push creates new remote heads "
256 245 "on branch '%s'!") % branch
257 246 else:
258 247 msg = _("push creates new remote heads!")
259 248
260 249 if unsynced:
261 250 hint = _("you should pull and merge or use push -f to force")
262 251 else:
263 252 hint = _("did you forget to merge? use push -f to force")
264 253 raise util.Abort(msg, hint=hint)
265 254
266 255 if remote.capable('branchmap'):
267 256 # Check for each named branch if we're creating new remote heads.
268 257 # To be a remote head after push, node must be either:
269 258 # - unknown locally
270 259 # - a local outgoing head descended from update
271 260 # - a remote head that's known locally and not
272 261 # ancestral to an outgoing head
273 262 #
274 263 # New named branches cannot be created without --force.
275 264
276 265 # 1. Create set of branches involved in the push.
277 266 branches = set(repo[n].branch() for n in outg)
278 267
279 268 # 2. Check for new branches on the remote.
280 269 remotemap = remote.branchmap()
281 270 newbranches = branches - set(remotemap)
282 271 if newbranches and not newbranch: # new branch requires --new-branch
283 272 branchnames = ', '.join(sorted(newbranches))
284 273 raise util.Abort(_("push creates new remote branches: %s!")
285 274 % branchnames,
286 275 hint=_("use 'hg push --new-branch' to create"
287 276 " new remote branches"))
288 277 branches.difference_update(newbranches)
289 278
290 279 # 3. Construct the initial oldmap and newmap dicts.
291 280 # They contain information about the remote heads before and
292 281 # after the push, respectively.
293 282 # Heads not found locally are not included in either dict,
294 283 # since they won't be affected by the push.
295 284 # unsynced contains all branches with incoming changesets.
296 285 oldmap = {}
297 286 newmap = {}
298 287 unsynced = set()
299 288 for branch in branches:
300 289 remoteheads = remotemap[branch]
301 290 prunedheads = [h for h in remoteheads if h in cl.nodemap]
302 291 oldmap[branch] = prunedheads
303 292 newmap[branch] = list(prunedheads)
304 293 if len(remoteheads) > len(prunedheads):
305 294 unsynced.add(branch)
306 295
307 296 # 4. Update newmap with outgoing changes.
308 297 # This will possibly add new heads and remove existing ones.
309 298 ctxgen = (repo[n] for n in outg)
310 299 repo._updatebranchcache(newmap, ctxgen)
311 300
312 301 # 5. Check for new heads.
313 302 # If there are more heads after the push than before, a suitable
314 303 # warning, depending on unsynced status, is displayed.
315 304 for branch in branches:
316 305 if len(newmap[branch]) > len(oldmap[branch]):
317 306 return fail_multiple_heads(branch in unsynced, branch)
318 307
319 308 # 6. Check for unsynced changes on involved branches.
320 309 if unsynced:
321 310 repo.ui.warn(_("note: unsynced remote changes!\n"))
322 311
323 312 else:
324 313 # Old servers: Check for new topological heads.
325 314 # Code based on _updatebranchcache.
326 315 newheads = set(h for h in remote_heads if h in cl.nodemap)
327 316 oldheadcnt = len(newheads)
328 317 newheads.update(outg)
329 318 if len(newheads) > 1:
330 319 for latest in reversed(outg):
331 320 if latest not in newheads:
332 321 continue
333 322 minhrev = min(cl.rev(h) for h in newheads)
334 323 reachable = cl.reachable(latest, cl.node(minhrev))
335 324 reachable.remove(latest)
336 325 newheads.difference_update(reachable)
337 326 if len(newheads) > oldheadcnt:
338 327 return fail_multiple_heads(inc)
339 328 if inc:
340 329 repo.ui.warn(_("note: unsynced remote changes!\n"))
341 330
342 331 if revs is None:
343 332 # use the fast path, no race possible on push
344 333 nodes = repo.changelog.findmissing(common.keys())
345 334 cg = repo._changegroup(nodes, 'push')
346 335 else:
347 336 cg = repo.changegroupsubset(update, revs, 'push')
348 337 return cg, remote_heads
General Comments 0
You need to be logged in to leave comments. Login now