##// END OF EJS Templates
discovery: remove unused "base" argument from find.*incoming()
Benoit Boissinot -
r12760:b41e8dfe default
parent child Browse files
Show More
@@ -1,335 +1,320 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, error
10 import util, error
11
11
12 def findincoming(repo, remote, base=None, heads=None, force=False):
12 def findincoming(repo, remote, heads=None, force=False):
13 """Return list of roots of the subsets of missing nodes from remote
13 """Return list of roots of the subsets of missing nodes from remote
14
14
15 If base dict is specified, assume that these nodes and their parents
16 exist on the remote side and that no child of a node of base exists
17 in both remote and repo.
18 Furthermore base will be updated to include the nodes that exists
19 in repo and remote but no children exists in repo and remote.
20 If a list of heads is specified, return only nodes which are heads
15 If a list of heads is specified, return only nodes which are heads
21 or ancestors of these heads.
16 or ancestors of these heads.
22
17
23 All the ancestors of base are in repo and in remote.
18 All the ancestors of the list returned are in repo and in remote.
24 All the descendants of the list returned are missing in repo.
19 All the descendants of the list returned are missing in repo.
25 (and so we know that the rest of the nodes are missing in remote, see
20 (and so we know that the rest of the nodes are missing in remote, see
26 outgoing)
21 outgoing)
27 """
22 """
28 return findcommonincoming(repo, remote, base, heads, force)[1]
23 return findcommonincoming(repo, remote, heads, force)[1]
29
24
30 def findcommonincoming(repo, remote, base=None, heads=None, force=False):
25 def findcommonincoming(repo, remote, heads=None, force=False):
31 """Return a tuple (common, missing roots, heads) used to identify
26 """Return a tuple (common, missing roots, heads) used to identify
32 missing nodes from remote.
27 missing nodes from remote.
33
28
34 If base dict is specified, assume that these nodes and their parents
35 exist on the remote side and that no child of a node of base exists
36 in both remote and repo.
37 Furthermore base will be updated to include the nodes that exists
38 in repo and remote but no children exists in both repo and remote.
39 In other words, base is the set of heads of the DAG resulting from
40 the intersection of the nodes from repo and remote.
41 If a list of heads is specified, return only nodes which are heads
29 If a list of heads is specified, return only nodes which are heads
42 or ancestors of these heads.
30 or ancestors of these heads.
43
44 All the ancestors of base are in repo and in remote.
45 """
31 """
46 m = repo.changelog.nodemap
32 m = repo.changelog.nodemap
47 search = []
33 search = []
48 fetch = set()
34 fetch = set()
49 seen = set()
35 seen = set()
50 seenbranch = set()
36 seenbranch = set()
51 if base is None:
52 base = {}
37 base = {}
53
38
54 if not heads:
39 if not heads:
55 heads = remote.heads()
40 heads = remote.heads()
56
41
57 if repo.changelog.tip() == nullid:
42 if repo.changelog.tip() == nullid:
58 base[nullid] = 1
43 base[nullid] = 1
59 if heads != [nullid]:
44 if heads != [nullid]:
60 return [nullid], [nullid], list(heads)
45 return [nullid], [nullid], list(heads)
61 return [nullid], [], []
46 return [nullid], [], []
62
47
63 # assume we're closer to the tip than the root
48 # assume we're closer to the tip than the root
64 # and start by examining the heads
49 # and start by examining the heads
65 repo.ui.status(_("searching for changes\n"))
50 repo.ui.status(_("searching for changes\n"))
66
51
67 unknown = []
52 unknown = []
68 for h in heads:
53 for h in heads:
69 if h not in m:
54 if h not in m:
70 unknown.append(h)
55 unknown.append(h)
71 else:
56 else:
72 base[h] = 1
57 base[h] = 1
73
58
74 heads = unknown
59 heads = unknown
75 if not unknown:
60 if not unknown:
76 return base.keys(), [], []
61 return base.keys(), [], []
77
62
78 req = set(unknown)
63 req = set(unknown)
79 reqcnt = 0
64 reqcnt = 0
80
65
81 # search through remote branches
66 # search through remote branches
82 # a 'branch' here is a linear segment of history, with four parts:
67 # a 'branch' here is a linear segment of history, with four parts:
83 # head, root, first parent, second parent
68 # head, root, first parent, second parent
84 # (a branch always has two parents (or none) by definition)
69 # (a branch always has two parents (or none) by definition)
85 unknown = remote.branches(unknown)
70 unknown = remote.branches(unknown)
86 while unknown:
71 while unknown:
87 r = []
72 r = []
88 while unknown:
73 while unknown:
89 n = unknown.pop(0)
74 n = unknown.pop(0)
90 if n[0] in seen:
75 if n[0] in seen:
91 continue
76 continue
92
77
93 repo.ui.debug("examining %s:%s\n"
78 repo.ui.debug("examining %s:%s\n"
94 % (short(n[0]), short(n[1])))
79 % (short(n[0]), short(n[1])))
95 if n[0] == nullid: # found the end of the branch
80 if n[0] == nullid: # found the end of the branch
96 pass
81 pass
97 elif n in seenbranch:
82 elif n in seenbranch:
98 repo.ui.debug("branch already found\n")
83 repo.ui.debug("branch already found\n")
99 continue
84 continue
100 elif n[1] and n[1] in m: # do we know the base?
85 elif n[1] and n[1] in m: # do we know the base?
101 repo.ui.debug("found incomplete branch %s:%s\n"
86 repo.ui.debug("found incomplete branch %s:%s\n"
102 % (short(n[0]), short(n[1])))
87 % (short(n[0]), short(n[1])))
103 search.append(n[0:2]) # schedule branch range for scanning
88 search.append(n[0:2]) # schedule branch range for scanning
104 seenbranch.add(n)
89 seenbranch.add(n)
105 else:
90 else:
106 if n[1] not in seen and n[1] not in fetch:
91 if n[1] not in seen and n[1] not in fetch:
107 if n[2] in m and n[3] in m:
92 if n[2] in m and n[3] in m:
108 repo.ui.debug("found new changeset %s\n" %
93 repo.ui.debug("found new changeset %s\n" %
109 short(n[1]))
94 short(n[1]))
110 fetch.add(n[1]) # earliest unknown
95 fetch.add(n[1]) # earliest unknown
111 for p in n[2:4]:
96 for p in n[2:4]:
112 if p in m:
97 if p in m:
113 base[p] = 1 # latest known
98 base[p] = 1 # latest known
114
99
115 for p in n[2:4]:
100 for p in n[2:4]:
116 if p not in req and p not in m:
101 if p not in req and p not in m:
117 r.append(p)
102 r.append(p)
118 req.add(p)
103 req.add(p)
119 seen.add(n[0])
104 seen.add(n[0])
120
105
121 if r:
106 if r:
122 reqcnt += 1
107 reqcnt += 1
123 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
108 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
124 repo.ui.debug("request %d: %s\n" %
109 repo.ui.debug("request %d: %s\n" %
125 (reqcnt, " ".join(map(short, r))))
110 (reqcnt, " ".join(map(short, r))))
126 for p in xrange(0, len(r), 10):
111 for p in xrange(0, len(r), 10):
127 for b in remote.branches(r[p:p + 10]):
112 for b in remote.branches(r[p:p + 10]):
128 repo.ui.debug("received %s:%s\n" %
113 repo.ui.debug("received %s:%s\n" %
129 (short(b[0]), short(b[1])))
114 (short(b[0]), short(b[1])))
130 unknown.append(b)
115 unknown.append(b)
131
116
132 # do binary search on the branches we found
117 # do binary search on the branches we found
133 while search:
118 while search:
134 newsearch = []
119 newsearch = []
135 reqcnt += 1
120 reqcnt += 1
136 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
121 repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
137 for n, l in zip(search, remote.between(search)):
122 for n, l in zip(search, remote.between(search)):
138 l.append(n[1])
123 l.append(n[1])
139 p = n[0]
124 p = n[0]
140 f = 1
125 f = 1
141 for i in l:
126 for i in l:
142 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
127 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
143 if i in m:
128 if i in m:
144 if f <= 2:
129 if f <= 2:
145 repo.ui.debug("found new branch changeset %s\n" %
130 repo.ui.debug("found new branch changeset %s\n" %
146 short(p))
131 short(p))
147 fetch.add(p)
132 fetch.add(p)
148 base[i] = 1
133 base[i] = 1
149 else:
134 else:
150 repo.ui.debug("narrowed branch search to %s:%s\n"
135 repo.ui.debug("narrowed branch search to %s:%s\n"
151 % (short(p), short(i)))
136 % (short(p), short(i)))
152 newsearch.append((p, i))
137 newsearch.append((p, i))
153 break
138 break
154 p, f = i, f * 2
139 p, f = i, f * 2
155 search = newsearch
140 search = newsearch
156
141
157 # sanity check our fetch list
142 # sanity check our fetch list
158 for f in fetch:
143 for f in fetch:
159 if f in m:
144 if f in m:
160 raise error.RepoError(_("already have changeset ")
145 raise error.RepoError(_("already have changeset ")
161 + short(f[:4]))
146 + short(f[:4]))
162
147
163 if base.keys() == [nullid]:
148 if base.keys() == [nullid]:
164 if force:
149 if force:
165 repo.ui.warn(_("warning: repository is unrelated\n"))
150 repo.ui.warn(_("warning: repository is unrelated\n"))
166 else:
151 else:
167 raise util.Abort(_("repository is unrelated"))
152 raise util.Abort(_("repository is unrelated"))
168
153
169 repo.ui.debug("found new changesets starting at " +
154 repo.ui.debug("found new changesets starting at " +
170 " ".join([short(f) for f in fetch]) + "\n")
155 " ".join([short(f) for f in fetch]) + "\n")
171
156
172 repo.ui.progress(_('searching'), None)
157 repo.ui.progress(_('searching'), None)
173 repo.ui.debug("%d total queries\n" % reqcnt)
158 repo.ui.debug("%d total queries\n" % reqcnt)
174
159
175 return base.keys(), list(fetch), heads
160 return base.keys(), list(fetch), heads
176
161
177 def findoutgoing(repo, remote, base=None, remoteheads=None, force=False):
162 def findoutgoing(repo, remote, base=None, remoteheads=None, force=False):
178 """Return list of nodes that are roots of subsets not in remote
163 """Return list of nodes that are roots of subsets not in remote
179
164
180 If base dict is specified, assume that these nodes and their parents
165 If base dict is specified, assume that these nodes and their parents
181 exist on the remote side.
166 exist on the remote side.
182 If remotehead is specified, assume it is the list of the heads from
167 If remotehead is specified, assume it is the list of the heads from
183 the remote repository.
168 the remote repository.
184 """
169 """
185 if base is None:
170 if base is None:
186 base = findcommonincoming(repo, remote, heads=remoteheads,
171 base = findcommonincoming(repo, remote, heads=remoteheads,
187 force=force)[0]
172 force=force)[0]
188 else:
173 else:
189 base = list(base)
174 base = list(base)
190
175
191 repo.ui.debug("common changesets up to "
176 repo.ui.debug("common changesets up to "
192 + " ".join(map(short, base)) + "\n")
177 + " ".join(map(short, base)) + "\n")
193
178
194 remain = set(repo.changelog.nodemap)
179 remain = set(repo.changelog.nodemap)
195
180
196 # prune everything remote has from the tree
181 # prune everything remote has from the tree
197 remain.remove(nullid)
182 remain.remove(nullid)
198 remove = base
183 remove = base
199 while remove:
184 while remove:
200 n = remove.pop(0)
185 n = remove.pop(0)
201 if n in remain:
186 if n in remain:
202 remain.remove(n)
187 remain.remove(n)
203 for p in repo.changelog.parents(n):
188 for p in repo.changelog.parents(n):
204 remove.append(p)
189 remove.append(p)
205
190
206 # find every node whose parents have been pruned
191 # find every node whose parents have been pruned
207 subset = []
192 subset = []
208 # find every remote head that will get new children
193 # find every remote head that will get new children
209 for n in remain:
194 for n in remain:
210 p1, p2 = repo.changelog.parents(n)
195 p1, p2 = repo.changelog.parents(n)
211 if p1 not in remain and p2 not in remain:
196 if p1 not in remain and p2 not in remain:
212 subset.append(n)
197 subset.append(n)
213
198
214 return subset
199 return subset
215
200
216 def prepush(repo, remote, force, revs, newbranch):
201 def prepush(repo, remote, force, revs, newbranch):
217 '''Analyze the local and remote repositories and determine which
202 '''Analyze the local and remote repositories and determine which
218 changesets need to be pushed to the remote. Return value depends
203 changesets need to be pushed to the remote. Return value depends
219 on circumstances:
204 on circumstances:
220
205
221 If we are not going to push anything, return a tuple (None,
206 If we are not going to push anything, return a tuple (None,
222 outgoing) where outgoing is 0 if there are no outgoing
207 outgoing) where outgoing is 0 if there are no outgoing
223 changesets and 1 if there are, but we refuse to push them
208 changesets and 1 if there are, but we refuse to push them
224 (e.g. would create new remote heads).
209 (e.g. would create new remote heads).
225
210
226 Otherwise, return a tuple (changegroup, remoteheads), where
211 Otherwise, return a tuple (changegroup, remoteheads), where
227 changegroup is a readable file-like object whose read() returns
212 changegroup is a readable file-like object whose read() returns
228 successive changegroup chunks ready to be sent over the wire and
213 successive changegroup chunks ready to be sent over the wire and
229 remoteheads is the list of remote heads.'''
214 remoteheads is the list of remote heads.'''
230 remoteheads = remote.heads()
215 remoteheads = remote.heads()
231 common, inc, rheads = findcommonincoming(repo, remote, heads=remoteheads,
216 common, inc, rheads = findcommonincoming(repo, remote, heads=remoteheads,
232 force=force)
217 force=force)
233
218
234 cl = repo.changelog
219 cl = repo.changelog
235 update = findoutgoing(repo, remote, common, remoteheads)
220 update = findoutgoing(repo, remote, common, remoteheads)
236 outg, bases, heads = cl.nodesbetween(update, revs)
221 outg, bases, heads = cl.nodesbetween(update, revs)
237
222
238 if not bases:
223 if not bases:
239 repo.ui.status(_("no changes found\n"))
224 repo.ui.status(_("no changes found\n"))
240 return None, 1
225 return None, 1
241
226
242 if not force and remoteheads != [nullid]:
227 if not force and remoteheads != [nullid]:
243 if remote.capable('branchmap'):
228 if remote.capable('branchmap'):
244 # Check for each named branch if we're creating new remote heads.
229 # Check for each named branch if we're creating new remote heads.
245 # To be a remote head after push, node must be either:
230 # To be a remote head after push, node must be either:
246 # - unknown locally
231 # - unknown locally
247 # - a local outgoing head descended from update
232 # - a local outgoing head descended from update
248 # - a remote head that's known locally and not
233 # - a remote head that's known locally and not
249 # ancestral to an outgoing head
234 # ancestral to an outgoing head
250 #
235 #
251 # New named branches cannot be created without --force.
236 # New named branches cannot be created without --force.
252
237
253 # 1. Create set of branches involved in the push.
238 # 1. Create set of branches involved in the push.
254 branches = set(repo[n].branch() for n in outg)
239 branches = set(repo[n].branch() for n in outg)
255
240
256 # 2. Check for new branches on the remote.
241 # 2. Check for new branches on the remote.
257 remotemap = remote.branchmap()
242 remotemap = remote.branchmap()
258 newbranches = branches - set(remotemap)
243 newbranches = branches - set(remotemap)
259 if newbranches and not newbranch: # new branch requires --new-branch
244 if newbranches and not newbranch: # new branch requires --new-branch
260 branchnames = ', '.join(sorted(newbranches))
245 branchnames = ', '.join(sorted(newbranches))
261 raise util.Abort(_("push creates new remote branches: %s!")
246 raise util.Abort(_("push creates new remote branches: %s!")
262 % branchnames,
247 % branchnames,
263 hint=_("use 'hg push --new-branch' to create"
248 hint=_("use 'hg push --new-branch' to create"
264 " new remote branches"))
249 " new remote branches"))
265 branches.difference_update(newbranches)
250 branches.difference_update(newbranches)
266
251
267 # 3. Construct the initial oldmap and newmap dicts.
252 # 3. Construct the initial oldmap and newmap dicts.
268 # They contain information about the remote heads before and
253 # They contain information about the remote heads before and
269 # after the push, respectively.
254 # after the push, respectively.
270 # Heads not found locally are not included in either dict,
255 # Heads not found locally are not included in either dict,
271 # since they won't be affected by the push.
256 # since they won't be affected by the push.
272 # unsynced contains all branches with incoming changesets.
257 # unsynced contains all branches with incoming changesets.
273 oldmap = {}
258 oldmap = {}
274 newmap = {}
259 newmap = {}
275 unsynced = set()
260 unsynced = set()
276 for branch in branches:
261 for branch in branches:
277 remotebrheads = remotemap[branch]
262 remotebrheads = remotemap[branch]
278 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
263 prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
279 oldmap[branch] = prunedbrheads
264 oldmap[branch] = prunedbrheads
280 newmap[branch] = list(prunedbrheads)
265 newmap[branch] = list(prunedbrheads)
281 if len(remotebrheads) > len(prunedbrheads):
266 if len(remotebrheads) > len(prunedbrheads):
282 unsynced.add(branch)
267 unsynced.add(branch)
283
268
284 # 4. Update newmap with outgoing changes.
269 # 4. Update newmap with outgoing changes.
285 # This will possibly add new heads and remove existing ones.
270 # This will possibly add new heads and remove existing ones.
286 ctxgen = (repo[n] for n in outg)
271 ctxgen = (repo[n] for n in outg)
287 repo._updatebranchcache(newmap, ctxgen)
272 repo._updatebranchcache(newmap, ctxgen)
288
273
289 else:
274 else:
290 # 1-4b. old servers: Check for new topological heads.
275 # 1-4b. old servers: Check for new topological heads.
291 # Construct {old,new}map with branch = None (topological branch).
276 # Construct {old,new}map with branch = None (topological branch).
292 # (code based on _updatebranchcache)
277 # (code based on _updatebranchcache)
293 oldheads = set(h for h in remoteheads if h in cl.nodemap)
278 oldheads = set(h for h in remoteheads if h in cl.nodemap)
294 newheads = oldheads.union(outg)
279 newheads = oldheads.union(outg)
295 if len(newheads) > 1:
280 if len(newheads) > 1:
296 for latest in reversed(outg):
281 for latest in reversed(outg):
297 if latest not in newheads:
282 if latest not in newheads:
298 continue
283 continue
299 minhrev = min(cl.rev(h) for h in newheads)
284 minhrev = min(cl.rev(h) for h in newheads)
300 reachable = cl.reachable(latest, cl.node(minhrev))
285 reachable = cl.reachable(latest, cl.node(minhrev))
301 reachable.remove(latest)
286 reachable.remove(latest)
302 newheads.difference_update(reachable)
287 newheads.difference_update(reachable)
303 branches = set([None])
288 branches = set([None])
304 newmap = {None: newheads}
289 newmap = {None: newheads}
305 oldmap = {None: oldheads}
290 oldmap = {None: oldheads}
306 unsynced = inc and branches or set()
291 unsynced = inc and branches or set()
307
292
308 # 5. Check for new heads.
293 # 5. Check for new heads.
309 # If there are more heads after the push than before, a suitable
294 # If there are more heads after the push than before, a suitable
310 # warning, depending on unsynced status, is displayed.
295 # warning, depending on unsynced status, is displayed.
311 for branch in branches:
296 for branch in branches:
312 if len(newmap[branch]) > len(oldmap[branch]):
297 if len(newmap[branch]) > len(oldmap[branch]):
313 if branch:
298 if branch:
314 msg = _("push creates new remote heads "
299 msg = _("push creates new remote heads "
315 "on branch '%s'!") % branch
300 "on branch '%s'!") % branch
316 else:
301 else:
317 msg = _("push creates new remote heads!")
302 msg = _("push creates new remote heads!")
318
303
319 if branch in unsynced:
304 if branch in unsynced:
320 hint = _("you should pull and merge or use push -f to force")
305 hint = _("you should pull and merge or use push -f to force")
321 else:
306 else:
322 hint = _("did you forget to merge? use push -f to force")
307 hint = _("did you forget to merge? use push -f to force")
323 raise util.Abort(msg, hint=hint)
308 raise util.Abort(msg, hint=hint)
324
309
325 # 6. Check for unsynced changes on involved branches.
310 # 6. Check for unsynced changes on involved branches.
326 if unsynced:
311 if unsynced:
327 repo.ui.warn(_("note: unsynced remote changes!\n"))
312 repo.ui.warn(_("note: unsynced remote changes!\n"))
328
313
329 if revs is None:
314 if revs is None:
330 # use the fast path, no race possible on push
315 # use the fast path, no race possible on push
331 nodes = repo.changelog.findmissing(common)
316 nodes = repo.changelog.findmissing(common)
332 cg = repo._changegroup(nodes, 'push')
317 cg = repo._changegroup(nodes, 'push')
333 else:
318 else:
334 cg = repo.changegroupsubset(update, revs, 'push')
319 cg = repo.changegroupsubset(update, revs, 'push')
335 return cg, remoteheads
320 return cg, remoteheads
General Comments 0
You need to be logged in to leave comments. Login now