##// END OF EJS Templates
checkheads: extract bookmark computation from the branch loop...
Pierre-Yves David -
r17212:246131d6 default
parent child Browse files
Show More
@@ -1,343 +1,345 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, setdiscovery, treediscovery, phases
10 import util, setdiscovery, treediscovery, phases
11
11
12 def findcommonincoming(repo, remote, heads=None, force=False):
12 def findcommonincoming(repo, remote, heads=None, force=False):
13 """Return a tuple (common, anyincoming, heads) used to identify the common
13 """Return a tuple (common, anyincoming, heads) used to identify the common
14 subset of nodes between repo and remote.
14 subset of nodes between repo and remote.
15
15
16 "common" is a list of (at least) the heads of the common subset.
16 "common" is a list of (at least) the heads of the common subset.
17 "anyincoming" is testable as a boolean indicating if any nodes are missing
17 "anyincoming" is testable as a boolean indicating if any nodes are missing
18 locally. If remote does not support getbundle, this actually is a list of
18 locally. If remote does not support getbundle, this actually is a list of
19 roots of the nodes that would be incoming, to be supplied to
19 roots of the nodes that would be incoming, to be supplied to
20 changegroupsubset. No code except for pull should be relying on this fact
20 changegroupsubset. No code except for pull should be relying on this fact
21 any longer.
21 any longer.
22 "heads" is either the supplied heads, or else the remote's heads.
22 "heads" is either the supplied heads, or else the remote's heads.
23
23
24 If you pass heads and they are all known locally, the reponse lists justs
24 If you pass heads and they are all known locally, the reponse lists justs
25 these heads in "common" and in "heads".
25 these heads in "common" and in "heads".
26
26
27 Please use findcommonoutgoing to compute the set of outgoing nodes to give
27 Please use findcommonoutgoing to compute the set of outgoing nodes to give
28 extensions a good hook into outgoing.
28 extensions a good hook into outgoing.
29 """
29 """
30
30
31 if not remote.capable('getbundle'):
31 if not remote.capable('getbundle'):
32 return treediscovery.findcommonincoming(repo, remote, heads, force)
32 return treediscovery.findcommonincoming(repo, remote, heads, force)
33
33
34 if heads:
34 if heads:
35 allknown = True
35 allknown = True
36 nm = repo.changelog.nodemap
36 nm = repo.changelog.nodemap
37 for h in heads:
37 for h in heads:
38 if nm.get(h) is None:
38 if nm.get(h) is None:
39 allknown = False
39 allknown = False
40 break
40 break
41 if allknown:
41 if allknown:
42 return (heads, False, heads)
42 return (heads, False, heads)
43
43
44 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
44 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
45 abortwhenunrelated=not force)
45 abortwhenunrelated=not force)
46 common, anyinc, srvheads = res
46 common, anyinc, srvheads = res
47 return (list(common), anyinc, heads or list(srvheads))
47 return (list(common), anyinc, heads or list(srvheads))
48
48
49 class outgoing(object):
49 class outgoing(object):
50 '''Represents the set of nodes present in a local repo but not in a
50 '''Represents the set of nodes present in a local repo but not in a
51 (possibly) remote one.
51 (possibly) remote one.
52
52
53 Members:
53 Members:
54
54
55 missing is a list of all nodes present in local but not in remote.
55 missing is a list of all nodes present in local but not in remote.
56 common is a list of all nodes shared between the two repos.
56 common is a list of all nodes shared between the two repos.
57 excluded is the list of missing changeset that shouldn't be sent remotely.
57 excluded is the list of missing changeset that shouldn't be sent remotely.
58 missingheads is the list of heads of missing.
58 missingheads is the list of heads of missing.
59 commonheads is the list of heads of common.
59 commonheads is the list of heads of common.
60
60
61 The sets are computed on demand from the heads, unless provided upfront
61 The sets are computed on demand from the heads, unless provided upfront
62 by discovery.'''
62 by discovery.'''
63
63
64 def __init__(self, revlog, commonheads, missingheads):
64 def __init__(self, revlog, commonheads, missingheads):
65 self.commonheads = commonheads
65 self.commonheads = commonheads
66 self.missingheads = missingheads
66 self.missingheads = missingheads
67 self._revlog = revlog
67 self._revlog = revlog
68 self._common = None
68 self._common = None
69 self._missing = None
69 self._missing = None
70 self.excluded = []
70 self.excluded = []
71
71
72 def _computecommonmissing(self):
72 def _computecommonmissing(self):
73 sets = self._revlog.findcommonmissing(self.commonheads,
73 sets = self._revlog.findcommonmissing(self.commonheads,
74 self.missingheads)
74 self.missingheads)
75 self._common, self._missing = sets
75 self._common, self._missing = sets
76
76
77 @util.propertycache
77 @util.propertycache
78 def common(self):
78 def common(self):
79 if self._common is None:
79 if self._common is None:
80 self._computecommonmissing()
80 self._computecommonmissing()
81 return self._common
81 return self._common
82
82
83 @util.propertycache
83 @util.propertycache
84 def missing(self):
84 def missing(self):
85 if self._missing is None:
85 if self._missing is None:
86 self._computecommonmissing()
86 self._computecommonmissing()
87 return self._missing
87 return self._missing
88
88
89 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
89 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
90 commoninc=None, portable=False):
90 commoninc=None, portable=False):
91 '''Return an outgoing instance to identify the nodes present in repo but
91 '''Return an outgoing instance to identify the nodes present in repo but
92 not in other.
92 not in other.
93
93
94 If onlyheads is given, only nodes ancestral to nodes in onlyheads
94 If onlyheads is given, only nodes ancestral to nodes in onlyheads
95 (inclusive) are included. If you already know the local repo's heads,
95 (inclusive) are included. If you already know the local repo's heads,
96 passing them in onlyheads is faster than letting them be recomputed here.
96 passing them in onlyheads is faster than letting them be recomputed here.
97
97
98 If commoninc is given, it must the the result of a prior call to
98 If commoninc is given, it must the the result of a prior call to
99 findcommonincoming(repo, other, force) to avoid recomputing it here.
99 findcommonincoming(repo, other, force) to avoid recomputing it here.
100
100
101 If portable is given, compute more conservative common and missingheads,
101 If portable is given, compute more conservative common and missingheads,
102 to make bundles created from the instance more portable.'''
102 to make bundles created from the instance more portable.'''
103 # declare an empty outgoing object to be filled later
103 # declare an empty outgoing object to be filled later
104 og = outgoing(repo.changelog, None, None)
104 og = outgoing(repo.changelog, None, None)
105
105
106 # get common set if not provided
106 # get common set if not provided
107 if commoninc is None:
107 if commoninc is None:
108 commoninc = findcommonincoming(repo, other, force=force)
108 commoninc = findcommonincoming(repo, other, force=force)
109 og.commonheads, _any, _hds = commoninc
109 og.commonheads, _any, _hds = commoninc
110
110
111 # compute outgoing
111 # compute outgoing
112 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
112 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
113 if not mayexclude:
113 if not mayexclude:
114 og.missingheads = onlyheads or repo.heads()
114 og.missingheads = onlyheads or repo.heads()
115 elif onlyheads is None:
115 elif onlyheads is None:
116 # use visible heads as it should be cached
116 # use visible heads as it should be cached
117 og.missingheads = visibleheads(repo)
117 og.missingheads = visibleheads(repo)
118 # extinct changesets are silently ignored
118 # extinct changesets are silently ignored
119 og.excluded = [ctx.node() for ctx in repo.set('secret()')]
119 og.excluded = [ctx.node() for ctx in repo.set('secret()')]
120 else:
120 else:
121 # compute common, missing and exclude secret stuff
121 # compute common, missing and exclude secret stuff
122 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
122 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
123 og._common, allmissing = sets
123 og._common, allmissing = sets
124 og._missing = missing = []
124 og._missing = missing = []
125 og.excluded = excluded = []
125 og.excluded = excluded = []
126 for node in allmissing:
126 for node in allmissing:
127 ctx = repo[node]
127 ctx = repo[node]
128 if not ctx.extinct():
128 if not ctx.extinct():
129 # extinct changesets are silently ignored
129 # extinct changesets are silently ignored
130 if ctx.phase() >= phases.secret:
130 if ctx.phase() >= phases.secret:
131 excluded.append(node)
131 excluded.append(node)
132 else:
132 else:
133 missing.append(node)
133 missing.append(node)
134 if len(missing) == len(allmissing):
134 if len(missing) == len(allmissing):
135 missingheads = onlyheads
135 missingheads = onlyheads
136 else: # update missing heads
136 else: # update missing heads
137 missingheads = phases.newheads(repo, onlyheads, excluded)
137 missingheads = phases.newheads(repo, onlyheads, excluded)
138 og.missingheads = missingheads
138 og.missingheads = missingheads
139 if portable:
139 if portable:
140 # recompute common and missingheads as if -r<rev> had been given for
140 # recompute common and missingheads as if -r<rev> had been given for
141 # each head of missing, and --base <rev> for each head of the proper
141 # each head of missing, and --base <rev> for each head of the proper
142 # ancestors of missing
142 # ancestors of missing
143 og._computecommonmissing()
143 og._computecommonmissing()
144 cl = repo.changelog
144 cl = repo.changelog
145 missingrevs = set(cl.rev(n) for n in og._missing)
145 missingrevs = set(cl.rev(n) for n in og._missing)
146 og._common = set(cl.ancestors(missingrevs)) - missingrevs
146 og._common = set(cl.ancestors(missingrevs)) - missingrevs
147 commonheads = set(og.commonheads)
147 commonheads = set(og.commonheads)
148 og.missingheads = [h for h in og.missingheads if h not in commonheads]
148 og.missingheads = [h for h in og.missingheads if h not in commonheads]
149
149
150 return og
150 return og
151
151
152 def _headssummary(repo, remote, outgoing):
152 def _headssummary(repo, remote, outgoing):
153 """compute a summary of branch and heads status before and after push
153 """compute a summary of branch and heads status before and after push
154
154
155 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
155 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
156
156
157 - branch: the branch name
157 - branch: the branch name
158 - remoteheads: the list of remote heads known locally
158 - remoteheads: the list of remote heads known locally
159 None is the branch is new
159 None is the branch is new
160 - newheads: the new remote heads (known locally) with outgoing pushed
160 - newheads: the new remote heads (known locally) with outgoing pushed
161 - unsyncedheads: the list of remote heads unknown locally.
161 - unsyncedheads: the list of remote heads unknown locally.
162 """
162 """
163 cl = repo.changelog
163 cl = repo.changelog
164 headssum = {}
164 headssum = {}
165 # A. Create set of branches involved in the push.
165 # A. Create set of branches involved in the push.
166 branches = set(repo[n].branch() for n in outgoing.missing)
166 branches = set(repo[n].branch() for n in outgoing.missing)
167 remotemap = remote.branchmap()
167 remotemap = remote.branchmap()
168 newbranches = branches - set(remotemap)
168 newbranches = branches - set(remotemap)
169 branches.difference_update(newbranches)
169 branches.difference_update(newbranches)
170
170
171 # A. register remote heads
171 # A. register remote heads
172 remotebranches = set()
172 remotebranches = set()
173 for branch, heads in remote.branchmap().iteritems():
173 for branch, heads in remote.branchmap().iteritems():
174 remotebranches.add(branch)
174 remotebranches.add(branch)
175 known = []
175 known = []
176 unsynced = []
176 unsynced = []
177 for h in heads:
177 for h in heads:
178 if h in cl.nodemap:
178 if h in cl.nodemap:
179 known.append(h)
179 known.append(h)
180 else:
180 else:
181 unsynced.append(h)
181 unsynced.append(h)
182 headssum[branch] = (known, list(known), unsynced)
182 headssum[branch] = (known, list(known), unsynced)
183 # B. add new branch data
183 # B. add new branch data
184 missingctx = list(repo[n] for n in outgoing.missing)
184 missingctx = list(repo[n] for n in outgoing.missing)
185 touchedbranches = set()
185 touchedbranches = set()
186 for ctx in missingctx:
186 for ctx in missingctx:
187 branch = ctx.branch()
187 branch = ctx.branch()
188 touchedbranches.add(branch)
188 touchedbranches.add(branch)
189 if branch not in headssum:
189 if branch not in headssum:
190 headssum[branch] = (None, [], [])
190 headssum[branch] = (None, [], [])
191
191
192 # C drop data about untouched branches:
192 # C drop data about untouched branches:
193 for branch in remotebranches - touchedbranches:
193 for branch in remotebranches - touchedbranches:
194 del headssum[branch]
194 del headssum[branch]
195
195
196 # D. Update newmap with outgoing changes.
196 # D. Update newmap with outgoing changes.
197 # This will possibly add new heads and remove existing ones.
197 # This will possibly add new heads and remove existing ones.
198 newmap = dict((branch, heads[1]) for branch, heads in headssum.iteritems()
198 newmap = dict((branch, heads[1]) for branch, heads in headssum.iteritems()
199 if heads[0] is not None)
199 if heads[0] is not None)
200 repo._updatebranchcache(newmap, missingctx)
200 repo._updatebranchcache(newmap, missingctx)
201 for branch, newheads in newmap.iteritems():
201 for branch, newheads in newmap.iteritems():
202 headssum[branch][1][:] = newheads
202 headssum[branch][1][:] = newheads
203 return headssum
203 return headssum
204
204
205 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
205 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
206 """Compute branchmapsummary for repo without branchmap support"""
206 """Compute branchmapsummary for repo without branchmap support"""
207
207
208 cl = repo.changelog
208 cl = repo.changelog
209 # 1-4b. old servers: Check for new topological heads.
209 # 1-4b. old servers: Check for new topological heads.
210 # Construct {old,new}map with branch = None (topological branch).
210 # Construct {old,new}map with branch = None (topological branch).
211 # (code based on _updatebranchcache)
211 # (code based on _updatebranchcache)
212 oldheads = set(h for h in remoteheads if h in cl.nodemap)
212 oldheads = set(h for h in remoteheads if h in cl.nodemap)
213 # all nodes in outgoing.missing are children of either:
213 # all nodes in outgoing.missing are children of either:
214 # - an element of oldheads
214 # - an element of oldheads
215 # - another element of outgoing.missing
215 # - another element of outgoing.missing
216 # - nullrev
216 # - nullrev
217 # This explains why the new head are very simple to compute.
217 # This explains why the new head are very simple to compute.
218 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
218 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
219 newheads = list(c.node() for c in r)
219 newheads = list(c.node() for c in r)
220 unsynced = inc and set([None]) or set()
220 unsynced = inc and set([None]) or set()
221 return {None: (oldheads, newheads, unsynced)}
221 return {None: (oldheads, newheads, unsynced)}
222
222
223 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
223 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
224 """Check that a push won't add any outgoing head
224 """Check that a push won't add any outgoing head
225
225
226 raise Abort error and display ui message as needed.
226 raise Abort error and display ui message as needed.
227 """
227 """
228 # Check for each named branch if we're creating new remote heads.
228 # Check for each named branch if we're creating new remote heads.
229 # To be a remote head after push, node must be either:
229 # To be a remote head after push, node must be either:
230 # - unknown locally
230 # - unknown locally
231 # - a local outgoing head descended from update
231 # - a local outgoing head descended from update
232 # - a remote head that's known locally and not
232 # - a remote head that's known locally and not
233 # ancestral to an outgoing head
233 # ancestral to an outgoing head
234 if remoteheads == [nullid]:
234 if remoteheads == [nullid]:
235 # remote is empty, nothing to check.
235 # remote is empty, nothing to check.
236 return
236 return
237
237
238 if remote.capable('branchmap'):
238 if remote.capable('branchmap'):
239 headssum = _headssummary(repo, remote, outgoing)
239 headssum = _headssummary(repo, remote, outgoing)
240 else:
240 else:
241 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
241 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
242 newbranches = [branch for branch, heads in headssum.iteritems()
242 newbranches = [branch for branch, heads in headssum.iteritems()
243 if heads[0] is None]
243 if heads[0] is None]
244 # 1. Check for new branches on the remote.
244 # 1. Check for new branches on the remote.
245 if newbranches and not newbranch: # new branch requires --new-branch
245 if newbranches and not newbranch: # new branch requires --new-branch
246 branchnames = ', '.join(sorted(newbranches))
246 branchnames = ', '.join(sorted(newbranches))
247 raise util.Abort(_("push creates new remote branches: %s!")
247 raise util.Abort(_("push creates new remote branches: %s!")
248 % branchnames,
248 % branchnames,
249 hint=_("use 'hg push --new-branch' to create"
249 hint=_("use 'hg push --new-branch' to create"
250 " new remote branches"))
250 " new remote branches"))
251
251
252 # 2. Check for new heads.
252 # 2 compute newly pushed bookmarks. We
253 # we don't warned about bookmarked heads.
254 localbookmarks = repo._bookmarks
255 remotebookmarks = remote.listkeys('bookmarks')
256 bookmarkedheads = set()
257 for bm in localbookmarks:
258 rnode = remotebookmarks.get(bm)
259 if rnode and rnode in repo:
260 lctx, rctx = repo[bm], repo[rnode]
261 if rctx == lctx.ancestor(rctx):
262 bookmarkedheads.add(lctx.node())
263
264 # 3. Check for new heads.
253 # If there are more heads after the push than before, a suitable
265 # If there are more heads after the push than before, a suitable
254 # error message, depending on unsynced status, is displayed.
266 # error message, depending on unsynced status, is displayed.
255 error = None
267 error = None
256 localbookmarks = repo._bookmarks
257
258 unsynced = False
268 unsynced = False
259 for branch, heads in headssum.iteritems():
269 for branch, heads in headssum.iteritems():
260 if heads[0] is None:
270 if heads[0] is None:
261 # Maybe we should abort if we push more that one head
271 # Maybe we should abort if we push more that one head
262 # for new branches ?
272 # for new branches ?
263 continue
273 continue
264 if heads[2]:
274 if heads[2]:
265 unsynced = True
275 unsynced = True
266 oldhs = set(heads[0])
276 oldhs = set(heads[0])
267 newhs = set(heads[1])
277 newhs = set(heads[1])
268 dhs = None
278 dhs = None
269 if len(newhs) > len(oldhs):
279 if len(newhs) > len(oldhs):
270 remotebookmarks = remote.listkeys('bookmarks')
271 bookmarkedheads = set()
272 for bm in localbookmarks:
273 rnode = remotebookmarks.get(bm)
274 if rnode and rnode in repo:
275 lctx, rctx = repo[bm], repo[rnode]
276 if rctx == lctx.ancestor(rctx):
277 bookmarkedheads.add(lctx.node())
278 # strip updates to existing remote heads from the new heads list
280 # strip updates to existing remote heads from the new heads list
279 dhs = list(newhs - bookmarkedheads - oldhs)
281 dhs = list(newhs - bookmarkedheads - oldhs)
280 if dhs:
282 if dhs:
281 if error is None:
283 if error is None:
282 if branch not in ('default', None):
284 if branch not in ('default', None):
283 error = _("push creates new remote head %s "
285 error = _("push creates new remote head %s "
284 "on branch '%s'!") % (short(dhs[0]), branch)
286 "on branch '%s'!") % (short(dhs[0]), branch)
285 else:
287 else:
286 error = _("push creates new remote head %s!"
288 error = _("push creates new remote head %s!"
287 ) % short(dhs[0])
289 ) % short(dhs[0])
288 if heads[2]: # unsynced
290 if heads[2]: # unsynced
289 hint = _("you should pull and merge or "
291 hint = _("you should pull and merge or "
290 "use push -f to force")
292 "use push -f to force")
291 else:
293 else:
292 hint = _("did you forget to merge? "
294 hint = _("did you forget to merge? "
293 "use push -f to force")
295 "use push -f to force")
294 if branch is not None:
296 if branch is not None:
295 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
297 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
296 for h in dhs:
298 for h in dhs:
297 repo.ui.note(_("new remote head %s\n") % short(h))
299 repo.ui.note(_("new remote head %s\n") % short(h))
298 if error:
300 if error:
299 raise util.Abort(error, hint=hint)
301 raise util.Abort(error, hint=hint)
300
302
301 # 6. Check for unsynced changes on involved branches.
303 # 6. Check for unsynced changes on involved branches.
302 if unsynced:
304 if unsynced:
303 repo.ui.warn(_("note: unsynced remote changes!\n"))
305 repo.ui.warn(_("note: unsynced remote changes!\n"))
304
306
305 def visibleheads(repo):
307 def visibleheads(repo):
306 """return the set of visible head of this repo"""
308 """return the set of visible head of this repo"""
307 # XXX we want a cache on this
309 # XXX we want a cache on this
308 sroots = repo._phasecache.phaseroots[phases.secret]
310 sroots = repo._phasecache.phaseroots[phases.secret]
309 if sroots or repo.obsstore:
311 if sroots or repo.obsstore:
310 # XXX very slow revset. storing heads or secret "boundary"
312 # XXX very slow revset. storing heads or secret "boundary"
311 # would help.
313 # would help.
312 revset = repo.set('heads(not (%ln:: + extinct()))', sroots)
314 revset = repo.set('heads(not (%ln:: + extinct()))', sroots)
313
315
314 vheads = [ctx.node() for ctx in revset]
316 vheads = [ctx.node() for ctx in revset]
315 if not vheads:
317 if not vheads:
316 vheads.append(nullid)
318 vheads.append(nullid)
317 else:
319 else:
318 vheads = repo.heads()
320 vheads = repo.heads()
319 return vheads
321 return vheads
320
322
321
323
322 def visiblebranchmap(repo):
324 def visiblebranchmap(repo):
323 """return a branchmap for the visible set"""
325 """return a branchmap for the visible set"""
324 # XXX Recomputing this data on the fly is very slow. We should build a
326 # XXX Recomputing this data on the fly is very slow. We should build a
325 # XXX cached version while computin the standard branchmap version.
327 # XXX cached version while computin the standard branchmap version.
326 sroots = repo._phasecache.phaseroots[phases.secret]
328 sroots = repo._phasecache.phaseroots[phases.secret]
327 if sroots or repo.obsstore:
329 if sroots or repo.obsstore:
328 vbranchmap = {}
330 vbranchmap = {}
329 for branch, nodes in repo.branchmap().iteritems():
331 for branch, nodes in repo.branchmap().iteritems():
330 # search for secret heads.
332 # search for secret heads.
331 for n in nodes:
333 for n in nodes:
332 if repo[n].phase() >= phases.secret:
334 if repo[n].phase() >= phases.secret:
333 nodes = None
335 nodes = None
334 break
336 break
335 # if secret heads were found we must compute them again
337 # if secret heads were found we must compute them again
336 if nodes is None:
338 if nodes is None:
337 s = repo.set('heads(branch(%s) - secret() - extinct())',
339 s = repo.set('heads(branch(%s) - secret() - extinct())',
338 branch)
340 branch)
339 nodes = [c.node() for c in s]
341 nodes = [c.node() for c in s]
340 vbranchmap[branch] = nodes
342 vbranchmap[branch] = nodes
341 else:
343 else:
342 vbranchmap = repo.branchmap()
344 vbranchmap = repo.branchmap()
343 return vbranchmap
345 return vbranchmap
General Comments 0
You need to be logged in to leave comments. Login now