##// END OF EJS Templates
discovery: fix invalid comment about extinct being ignored
Patrick Mezard -
r17254:0deb66d4 stable
parent child Browse files
Show More
@@ -1,370 +1,369 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, setdiscovery, treediscovery, phases, obsolete
10 import util, setdiscovery, treediscovery, phases, obsolete
11
11
12 def findcommonincoming(repo, remote, heads=None, force=False):
12 def findcommonincoming(repo, remote, heads=None, force=False):
13 """Return a tuple (common, anyincoming, heads) used to identify the common
13 """Return a tuple (common, anyincoming, heads) used to identify the common
14 subset of nodes between repo and remote.
14 subset of nodes between repo and remote.
15
15
16 "common" is a list of (at least) the heads of the common subset.
16 "common" is a list of (at least) the heads of the common subset.
17 "anyincoming" is testable as a boolean indicating if any nodes are missing
17 "anyincoming" is testable as a boolean indicating if any nodes are missing
18 locally. If remote does not support getbundle, this actually is a list of
18 locally. If remote does not support getbundle, this actually is a list of
19 roots of the nodes that would be incoming, to be supplied to
19 roots of the nodes that would be incoming, to be supplied to
20 changegroupsubset. No code except for pull should be relying on this fact
20 changegroupsubset. No code except for pull should be relying on this fact
21 any longer.
21 any longer.
22 "heads" is either the supplied heads, or else the remote's heads.
22 "heads" is either the supplied heads, or else the remote's heads.
23
23
24 If you pass heads and they are all known locally, the reponse lists justs
24 If you pass heads and they are all known locally, the reponse lists justs
25 these heads in "common" and in "heads".
25 these heads in "common" and in "heads".
26
26
27 Please use findcommonoutgoing to compute the set of outgoing nodes to give
27 Please use findcommonoutgoing to compute the set of outgoing nodes to give
28 extensions a good hook into outgoing.
28 extensions a good hook into outgoing.
29 """
29 """
30
30
31 if not remote.capable('getbundle'):
31 if not remote.capable('getbundle'):
32 return treediscovery.findcommonincoming(repo, remote, heads, force)
32 return treediscovery.findcommonincoming(repo, remote, heads, force)
33
33
34 if heads:
34 if heads:
35 allknown = True
35 allknown = True
36 nm = repo.changelog.nodemap
36 nm = repo.changelog.nodemap
37 for h in heads:
37 for h in heads:
38 if nm.get(h) is None:
38 if nm.get(h) is None:
39 allknown = False
39 allknown = False
40 break
40 break
41 if allknown:
41 if allknown:
42 return (heads, False, heads)
42 return (heads, False, heads)
43
43
44 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
44 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
45 abortwhenunrelated=not force)
45 abortwhenunrelated=not force)
46 common, anyinc, srvheads = res
46 common, anyinc, srvheads = res
47 return (list(common), anyinc, heads or list(srvheads))
47 return (list(common), anyinc, heads or list(srvheads))
48
48
49 class outgoing(object):
49 class outgoing(object):
50 '''Represents the set of nodes present in a local repo but not in a
50 '''Represents the set of nodes present in a local repo but not in a
51 (possibly) remote one.
51 (possibly) remote one.
52
52
53 Members:
53 Members:
54
54
55 missing is a list of all nodes present in local but not in remote.
55 missing is a list of all nodes present in local but not in remote.
56 common is a list of all nodes shared between the two repos.
56 common is a list of all nodes shared between the two repos.
57 excluded is the list of missing changeset that shouldn't be sent remotely.
57 excluded is the list of missing changeset that shouldn't be sent remotely.
58 missingheads is the list of heads of missing.
58 missingheads is the list of heads of missing.
59 commonheads is the list of heads of common.
59 commonheads is the list of heads of common.
60
60
61 The sets are computed on demand from the heads, unless provided upfront
61 The sets are computed on demand from the heads, unless provided upfront
62 by discovery.'''
62 by discovery.'''
63
63
64 def __init__(self, revlog, commonheads, missingheads):
64 def __init__(self, revlog, commonheads, missingheads):
65 self.commonheads = commonheads
65 self.commonheads = commonheads
66 self.missingheads = missingheads
66 self.missingheads = missingheads
67 self._revlog = revlog
67 self._revlog = revlog
68 self._common = None
68 self._common = None
69 self._missing = None
69 self._missing = None
70 self.excluded = []
70 self.excluded = []
71
71
72 def _computecommonmissing(self):
72 def _computecommonmissing(self):
73 sets = self._revlog.findcommonmissing(self.commonheads,
73 sets = self._revlog.findcommonmissing(self.commonheads,
74 self.missingheads)
74 self.missingheads)
75 self._common, self._missing = sets
75 self._common, self._missing = sets
76
76
77 @util.propertycache
77 @util.propertycache
78 def common(self):
78 def common(self):
79 if self._common is None:
79 if self._common is None:
80 self._computecommonmissing()
80 self._computecommonmissing()
81 return self._common
81 return self._common
82
82
83 @util.propertycache
83 @util.propertycache
84 def missing(self):
84 def missing(self):
85 if self._missing is None:
85 if self._missing is None:
86 self._computecommonmissing()
86 self._computecommonmissing()
87 return self._missing
87 return self._missing
88
88
89 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
89 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
90 commoninc=None, portable=False):
90 commoninc=None, portable=False):
91 '''Return an outgoing instance to identify the nodes present in repo but
91 '''Return an outgoing instance to identify the nodes present in repo but
92 not in other.
92 not in other.
93
93
94 If onlyheads is given, only nodes ancestral to nodes in onlyheads
94 If onlyheads is given, only nodes ancestral to nodes in onlyheads
95 (inclusive) are included. If you already know the local repo's heads,
95 (inclusive) are included. If you already know the local repo's heads,
96 passing them in onlyheads is faster than letting them be recomputed here.
96 passing them in onlyheads is faster than letting them be recomputed here.
97
97
98 If commoninc is given, it must be the result of a prior call to
98 If commoninc is given, it must be the result of a prior call to
99 findcommonincoming(repo, other, force) to avoid recomputing it here.
99 findcommonincoming(repo, other, force) to avoid recomputing it here.
100
100
101 If portable is given, compute more conservative common and missingheads,
101 If portable is given, compute more conservative common and missingheads,
102 to make bundles created from the instance more portable.'''
102 to make bundles created from the instance more portable.'''
103 # declare an empty outgoing object to be filled later
103 # declare an empty outgoing object to be filled later
104 og = outgoing(repo.changelog, None, None)
104 og = outgoing(repo.changelog, None, None)
105
105
106 # get common set if not provided
106 # get common set if not provided
107 if commoninc is None:
107 if commoninc is None:
108 commoninc = findcommonincoming(repo, other, force=force)
108 commoninc = findcommonincoming(repo, other, force=force)
109 og.commonheads, _any, _hds = commoninc
109 og.commonheads, _any, _hds = commoninc
110
110
111 # compute outgoing
111 # compute outgoing
112 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
112 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
113 if not mayexclude:
113 if not mayexclude:
114 og.missingheads = onlyheads or repo.heads()
114 og.missingheads = onlyheads or repo.heads()
115 elif onlyheads is None:
115 elif onlyheads is None:
116 # use visible heads as it should be cached
116 # use visible heads as it should be cached
117 og.missingheads = visibleheads(repo)
117 og.missingheads = visibleheads(repo)
118 # extinct changesets are silently ignored
119 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
118 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
120 else:
119 else:
121 # compute common, missing and exclude secret stuff
120 # compute common, missing and exclude secret stuff
122 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
121 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
123 og._common, allmissing = sets
122 og._common, allmissing = sets
124 og._missing = missing = []
123 og._missing = missing = []
125 og.excluded = excluded = []
124 og.excluded = excluded = []
126 for node in allmissing:
125 for node in allmissing:
127 ctx = repo[node]
126 ctx = repo[node]
128 if ctx.phase() >= phases.secret or ctx.extinct():
127 if ctx.phase() >= phases.secret or ctx.extinct():
129 excluded.append(node)
128 excluded.append(node)
130 else:
129 else:
131 missing.append(node)
130 missing.append(node)
132 if len(missing) == len(allmissing):
131 if len(missing) == len(allmissing):
133 missingheads = onlyheads
132 missingheads = onlyheads
134 else: # update missing heads
133 else: # update missing heads
135 missingheads = phases.newheads(repo, onlyheads, excluded)
134 missingheads = phases.newheads(repo, onlyheads, excluded)
136 og.missingheads = missingheads
135 og.missingheads = missingheads
137 if portable:
136 if portable:
138 # recompute common and missingheads as if -r<rev> had been given for
137 # recompute common and missingheads as if -r<rev> had been given for
139 # each head of missing, and --base <rev> for each head of the proper
138 # each head of missing, and --base <rev> for each head of the proper
140 # ancestors of missing
139 # ancestors of missing
141 og._computecommonmissing()
140 og._computecommonmissing()
142 cl = repo.changelog
141 cl = repo.changelog
143 missingrevs = set(cl.rev(n) for n in og._missing)
142 missingrevs = set(cl.rev(n) for n in og._missing)
144 og._common = set(cl.ancestors(missingrevs)) - missingrevs
143 og._common = set(cl.ancestors(missingrevs)) - missingrevs
145 commonheads = set(og.commonheads)
144 commonheads = set(og.commonheads)
146 og.missingheads = [h for h in og.missingheads if h not in commonheads]
145 og.missingheads = [h for h in og.missingheads if h not in commonheads]
147
146
148 return og
147 return og
149
148
150 def _headssummary(repo, remote, outgoing):
149 def _headssummary(repo, remote, outgoing):
151 """compute a summary of branch and heads status before and after push
150 """compute a summary of branch and heads status before and after push
152
151
153 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
152 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
154
153
155 - branch: the branch name
154 - branch: the branch name
156 - remoteheads: the list of remote heads known locally
155 - remoteheads: the list of remote heads known locally
157 None is the branch is new
156 None is the branch is new
158 - newheads: the new remote heads (known locally) with outgoing pushed
157 - newheads: the new remote heads (known locally) with outgoing pushed
159 - unsyncedheads: the list of remote heads unknown locally.
158 - unsyncedheads: the list of remote heads unknown locally.
160 """
159 """
161 cl = repo.changelog
160 cl = repo.changelog
162 headssum = {}
161 headssum = {}
163 # A. Create set of branches involved in the push.
162 # A. Create set of branches involved in the push.
164 branches = set(repo[n].branch() for n in outgoing.missing)
163 branches = set(repo[n].branch() for n in outgoing.missing)
165 remotemap = remote.branchmap()
164 remotemap = remote.branchmap()
166 newbranches = branches - set(remotemap)
165 newbranches = branches - set(remotemap)
167 branches.difference_update(newbranches)
166 branches.difference_update(newbranches)
168
167
169 # A. register remote heads
168 # A. register remote heads
170 remotebranches = set()
169 remotebranches = set()
171 for branch, heads in remote.branchmap().iteritems():
170 for branch, heads in remote.branchmap().iteritems():
172 remotebranches.add(branch)
171 remotebranches.add(branch)
173 known = []
172 known = []
174 unsynced = []
173 unsynced = []
175 for h in heads:
174 for h in heads:
176 if h in cl.nodemap:
175 if h in cl.nodemap:
177 known.append(h)
176 known.append(h)
178 else:
177 else:
179 unsynced.append(h)
178 unsynced.append(h)
180 headssum[branch] = (known, list(known), unsynced)
179 headssum[branch] = (known, list(known), unsynced)
181 # B. add new branch data
180 # B. add new branch data
182 missingctx = list(repo[n] for n in outgoing.missing)
181 missingctx = list(repo[n] for n in outgoing.missing)
183 touchedbranches = set()
182 touchedbranches = set()
184 for ctx in missingctx:
183 for ctx in missingctx:
185 branch = ctx.branch()
184 branch = ctx.branch()
186 touchedbranches.add(branch)
185 touchedbranches.add(branch)
187 if branch not in headssum:
186 if branch not in headssum:
188 headssum[branch] = (None, [], [])
187 headssum[branch] = (None, [], [])
189
188
190 # C drop data about untouched branches:
189 # C drop data about untouched branches:
191 for branch in remotebranches - touchedbranches:
190 for branch in remotebranches - touchedbranches:
192 del headssum[branch]
191 del headssum[branch]
193
192
194 # D. Update newmap with outgoing changes.
193 # D. Update newmap with outgoing changes.
195 # This will possibly add new heads and remove existing ones.
194 # This will possibly add new heads and remove existing ones.
196 newmap = dict((branch, heads[1]) for branch, heads in headssum.iteritems()
195 newmap = dict((branch, heads[1]) for branch, heads in headssum.iteritems()
197 if heads[0] is not None)
196 if heads[0] is not None)
198 repo._updatebranchcache(newmap, missingctx)
197 repo._updatebranchcache(newmap, missingctx)
199 for branch, newheads in newmap.iteritems():
198 for branch, newheads in newmap.iteritems():
200 headssum[branch][1][:] = newheads
199 headssum[branch][1][:] = newheads
201 return headssum
200 return headssum
202
201
203 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
202 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
204 """Compute branchmapsummary for repo without branchmap support"""
203 """Compute branchmapsummary for repo without branchmap support"""
205
204
206 cl = repo.changelog
205 cl = repo.changelog
207 # 1-4b. old servers: Check for new topological heads.
206 # 1-4b. old servers: Check for new topological heads.
208 # Construct {old,new}map with branch = None (topological branch).
207 # Construct {old,new}map with branch = None (topological branch).
209 # (code based on _updatebranchcache)
208 # (code based on _updatebranchcache)
210 oldheads = set(h for h in remoteheads if h in cl.nodemap)
209 oldheads = set(h for h in remoteheads if h in cl.nodemap)
211 # all nodes in outgoing.missing are children of either:
210 # all nodes in outgoing.missing are children of either:
212 # - an element of oldheads
211 # - an element of oldheads
213 # - another element of outgoing.missing
212 # - another element of outgoing.missing
214 # - nullrev
213 # - nullrev
215 # This explains why the new head are very simple to compute.
214 # This explains why the new head are very simple to compute.
216 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
215 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
217 newheads = list(c.node() for c in r)
216 newheads = list(c.node() for c in r)
218 unsynced = inc and set([None]) or set()
217 unsynced = inc and set([None]) or set()
219 return {None: (oldheads, newheads, unsynced)}
218 return {None: (oldheads, newheads, unsynced)}
220
219
221 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
220 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
222 """Check that a push won't add any outgoing head
221 """Check that a push won't add any outgoing head
223
222
224 raise Abort error and display ui message as needed.
223 raise Abort error and display ui message as needed.
225 """
224 """
226 # Check for each named branch if we're creating new remote heads.
225 # Check for each named branch if we're creating new remote heads.
227 # To be a remote head after push, node must be either:
226 # To be a remote head after push, node must be either:
228 # - unknown locally
227 # - unknown locally
229 # - a local outgoing head descended from update
228 # - a local outgoing head descended from update
230 # - a remote head that's known locally and not
229 # - a remote head that's known locally and not
231 # ancestral to an outgoing head
230 # ancestral to an outgoing head
232 if remoteheads == [nullid]:
231 if remoteheads == [nullid]:
233 # remote is empty, nothing to check.
232 # remote is empty, nothing to check.
234 return
233 return
235
234
236 if remote.capable('branchmap'):
235 if remote.capable('branchmap'):
237 headssum = _headssummary(repo, remote, outgoing)
236 headssum = _headssummary(repo, remote, outgoing)
238 else:
237 else:
239 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
238 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
240 newbranches = [branch for branch, heads in headssum.iteritems()
239 newbranches = [branch for branch, heads in headssum.iteritems()
241 if heads[0] is None]
240 if heads[0] is None]
242 # 1. Check for new branches on the remote.
241 # 1. Check for new branches on the remote.
243 if newbranches and not newbranch: # new branch requires --new-branch
242 if newbranches and not newbranch: # new branch requires --new-branch
244 branchnames = ', '.join(sorted(newbranches))
243 branchnames = ', '.join(sorted(newbranches))
245 raise util.Abort(_("push creates new remote branches: %s!")
244 raise util.Abort(_("push creates new remote branches: %s!")
246 % branchnames,
245 % branchnames,
247 hint=_("use 'hg push --new-branch' to create"
246 hint=_("use 'hg push --new-branch' to create"
248 " new remote branches"))
247 " new remote branches"))
249
248
250 # 2 compute newly pushed bookmarks. We
249 # 2 compute newly pushed bookmarks. We
251 # we don't warned about bookmarked heads.
250 # we don't warned about bookmarked heads.
252 localbookmarks = repo._bookmarks
251 localbookmarks = repo._bookmarks
253 remotebookmarks = remote.listkeys('bookmarks')
252 remotebookmarks = remote.listkeys('bookmarks')
254 bookmarkedheads = set()
253 bookmarkedheads = set()
255 for bm in localbookmarks:
254 for bm in localbookmarks:
256 rnode = remotebookmarks.get(bm)
255 rnode = remotebookmarks.get(bm)
257 if rnode and rnode in repo:
256 if rnode and rnode in repo:
258 lctx, rctx = repo[bm], repo[rnode]
257 lctx, rctx = repo[bm], repo[rnode]
259 if rctx == lctx.ancestor(rctx):
258 if rctx == lctx.ancestor(rctx):
260 bookmarkedheads.add(lctx.node())
259 bookmarkedheads.add(lctx.node())
261
260
262 # 3. Check for new heads.
261 # 3. Check for new heads.
263 # If there are more heads after the push than before, a suitable
262 # If there are more heads after the push than before, a suitable
264 # error message, depending on unsynced status, is displayed.
263 # error message, depending on unsynced status, is displayed.
265 error = None
264 error = None
266 unsynced = False
265 unsynced = False
267 allmissing = set(outgoing.missing)
266 allmissing = set(outgoing.missing)
268 for branch, heads in headssum.iteritems():
267 for branch, heads in headssum.iteritems():
269 if heads[0] is None:
268 if heads[0] is None:
270 # Maybe we should abort if we push more that one head
269 # Maybe we should abort if we push more that one head
271 # for new branches ?
270 # for new branches ?
272 continue
271 continue
273 if heads[2]:
272 if heads[2]:
274 unsynced = True
273 unsynced = True
275 oldhs = set(heads[0])
274 oldhs = set(heads[0])
276 candidate_newhs = set(heads[1])
275 candidate_newhs = set(heads[1])
277 # add unsynced data
276 # add unsynced data
278 oldhs.update(heads[2])
277 oldhs.update(heads[2])
279 candidate_newhs.update(heads[2])
278 candidate_newhs.update(heads[2])
280 dhs = None
279 dhs = None
281 if repo.obsstore:
280 if repo.obsstore:
282 # remove future heads which are actually obsolete by another
281 # remove future heads which are actually obsolete by another
283 # pushed element:
282 # pushed element:
284 #
283 #
285 # XXX There is several case this case does not handle properly
284 # XXX There is several case this case does not handle properly
286 #
285 #
287 # (1) if <nh> is public, it won't be affected by obsolete marker
286 # (1) if <nh> is public, it won't be affected by obsolete marker
288 # and a new is created
287 # and a new is created
289 #
288 #
290 # (2) if the new heads have ancestors which are not obsolete and
289 # (2) if the new heads have ancestors which are not obsolete and
291 # not ancestors of any other heads we will have a new head too.
290 # not ancestors of any other heads we will have a new head too.
292 #
291 #
293 # This two case will be easy to handle for know changeset but much
292 # This two case will be easy to handle for know changeset but much
294 # more tricky for unsynced changes.
293 # more tricky for unsynced changes.
295 newhs = set()
294 newhs = set()
296 for nh in candidate_newhs:
295 for nh in candidate_newhs:
297 for suc in obsolete.anysuccessors(repo.obsstore, nh):
296 for suc in obsolete.anysuccessors(repo.obsstore, nh):
298 if suc != nh and suc in allmissing:
297 if suc != nh and suc in allmissing:
299 break
298 break
300 else:
299 else:
301 newhs.add(nh)
300 newhs.add(nh)
302 else:
301 else:
303 newhs = candidate_newhs
302 newhs = candidate_newhs
304 if len(newhs) > len(oldhs):
303 if len(newhs) > len(oldhs):
305 # strip updates to existing remote heads from the new heads list
304 # strip updates to existing remote heads from the new heads list
306 dhs = list(newhs - bookmarkedheads - oldhs)
305 dhs = list(newhs - bookmarkedheads - oldhs)
307 if dhs:
306 if dhs:
308 if error is None:
307 if error is None:
309 if branch not in ('default', None):
308 if branch not in ('default', None):
310 error = _("push creates new remote head %s "
309 error = _("push creates new remote head %s "
311 "on branch '%s'!") % (short(dhs[0]), branch)
310 "on branch '%s'!") % (short(dhs[0]), branch)
312 else:
311 else:
313 error = _("push creates new remote head %s!"
312 error = _("push creates new remote head %s!"
314 ) % short(dhs[0])
313 ) % short(dhs[0])
315 if heads[2]: # unsynced
314 if heads[2]: # unsynced
316 hint = _("you should pull and merge or "
315 hint = _("you should pull and merge or "
317 "use push -f to force")
316 "use push -f to force")
318 else:
317 else:
319 hint = _("did you forget to merge? "
318 hint = _("did you forget to merge? "
320 "use push -f to force")
319 "use push -f to force")
321 if branch is not None:
320 if branch is not None:
322 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
321 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
323 for h in dhs:
322 for h in dhs:
324 repo.ui.note(_("new remote head %s\n") % short(h))
323 repo.ui.note(_("new remote head %s\n") % short(h))
325 if error:
324 if error:
326 raise util.Abort(error, hint=hint)
325 raise util.Abort(error, hint=hint)
327
326
328 # 6. Check for unsynced changes on involved branches.
327 # 6. Check for unsynced changes on involved branches.
329 if unsynced:
328 if unsynced:
330 repo.ui.warn(_("note: unsynced remote changes!\n"))
329 repo.ui.warn(_("note: unsynced remote changes!\n"))
331
330
332 def visibleheads(repo):
331 def visibleheads(repo):
333 """return the set of visible head of this repo"""
332 """return the set of visible head of this repo"""
334 # XXX we want a cache on this
333 # XXX we want a cache on this
335 sroots = repo._phasecache.phaseroots[phases.secret]
334 sroots = repo._phasecache.phaseroots[phases.secret]
336 if sroots or repo.obsstore:
335 if sroots or repo.obsstore:
337 # XXX very slow revset. storing heads or secret "boundary"
336 # XXX very slow revset. storing heads or secret "boundary"
338 # would help.
337 # would help.
339 revset = repo.set('heads(not (%ln:: + extinct()))', sroots)
338 revset = repo.set('heads(not (%ln:: + extinct()))', sroots)
340
339
341 vheads = [ctx.node() for ctx in revset]
340 vheads = [ctx.node() for ctx in revset]
342 if not vheads:
341 if not vheads:
343 vheads.append(nullid)
342 vheads.append(nullid)
344 else:
343 else:
345 vheads = repo.heads()
344 vheads = repo.heads()
346 return vheads
345 return vheads
347
346
348
347
349 def visiblebranchmap(repo):
348 def visiblebranchmap(repo):
350 """return a branchmap for the visible set"""
349 """return a branchmap for the visible set"""
351 # XXX Recomputing this data on the fly is very slow. We should build a
350 # XXX Recomputing this data on the fly is very slow. We should build a
352 # XXX cached version while computin the standard branchmap version.
351 # XXX cached version while computin the standard branchmap version.
353 sroots = repo._phasecache.phaseroots[phases.secret]
352 sroots = repo._phasecache.phaseroots[phases.secret]
354 if sroots or repo.obsstore:
353 if sroots or repo.obsstore:
355 vbranchmap = {}
354 vbranchmap = {}
356 for branch, nodes in repo.branchmap().iteritems():
355 for branch, nodes in repo.branchmap().iteritems():
357 # search for secret heads.
356 # search for secret heads.
358 for n in nodes:
357 for n in nodes:
359 if repo[n].phase() >= phases.secret:
358 if repo[n].phase() >= phases.secret:
360 nodes = None
359 nodes = None
361 break
360 break
362 # if secret heads were found we must compute them again
361 # if secret heads were found we must compute them again
363 if nodes is None:
362 if nodes is None:
364 s = repo.set('heads(branch(%s) - secret() - extinct())',
363 s = repo.set('heads(branch(%s) - secret() - extinct())',
365 branch)
364 branch)
366 nodes = [c.node() for c in s]
365 nodes = [c.node() for c in s]
367 vbranchmap[branch] = nodes
366 vbranchmap[branch] = nodes
368 else:
367 else:
369 vbranchmap = repo.branchmap()
368 vbranchmap = repo.branchmap()
370 return vbranchmap
369 return vbranchmap
General Comments 0
You need to be logged in to leave comments. Login now