##// END OF EJS Templates
dirstate: handle dangling junctions on windows (issue2579)
dirstate: handle dangling junctions on windows (issue2579)

File last commit:

r17827:612db9d7 default
r17879:7b0b1da4 stable
Show More
discovery.py
377 lines | 14.7 KiB | text/x-python | PythonLexer
Dirkjan Ochtman
discovery: fix description line
r11313 # discovery.py - protocol changeset discovery functions
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301 #
Dirkjan Ochtman
discovery: fix description line
r11313 # Copyright 2010 Matt Mackall <mpm@selenic.com>
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301 #
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from node import nullid, short
from i18n import _
Pierre-Yves David
bookmarks: extract valid destination logic in a dedicated function...
r17550 import util, setdiscovery, treediscovery, phases, obsolete, bookmarks
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Peter Arrenbrecht
discovery: drop findoutgoing and simplify findcommonincoming's api...
r14073 def findcommonincoming(repo, remote, heads=None, force=False):
"""Return a tuple (common, anyincoming, heads) used to identify the common
subset of nodes between repo and remote.
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Peter Arrenbrecht
discovery: drop findoutgoing and simplify findcommonincoming's api...
r14073 "common" is a list of (at least) the heads of the common subset.
"anyincoming" is testable as a boolean indicating if any nodes are missing
locally. If remote does not support getbundle, this actually is a list of
roots of the nodes that would be incoming, to be supplied to
changegroupsubset. No code except for pull should be relying on this fact
any longer.
"heads" is either the supplied heads, or else the remote's heads.
Peter Arrenbrecht
discovery: add new set-based discovery...
r14164
Mads Kiilerich
fix trivial spelling errors
r17424 If you pass heads and they are all known locally, the response lists just
Peter Arrenbrecht
discovery: add new set-based discovery...
r14164 these heads in "common" and in "heads".
Peter Arrenbrecht
discovery: resurrect findoutgoing as findcommonoutgoing for extension hooks...
r14213
Please use findcommonoutgoing to compute the set of outgoing nodes to give
extensions a good hook into outgoing.
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301 """
Peter Arrenbrecht
discovery: drop findoutgoing and simplify findcommonincoming's api...
r14073
Peter Arrenbrecht
discovery: add new set-based discovery...
r14164 if not remote.capable('getbundle'):
return treediscovery.findcommonincoming(repo, remote, heads, force)
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Peter Arrenbrecht
discovery: add new set-based discovery...
r14164 if heads:
allknown = True
nm = repo.changelog.nodemap
for h in heads:
if nm.get(h) is None:
allknown = False
break
if allknown:
return (heads, False, heads)
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Peter Arrenbrecht
discovery: add new set-based discovery...
r14164 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
abortwhenunrelated=not force)
common, anyinc, srvheads = res
return (list(common), anyinc, heads or list(srvheads))
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Pierre-Yves David
discovery: introduce outgoing object for result of findcommonoutgoing...
r15837 class outgoing(object):
'''Represents the set of nodes present in a local repo but not in a
(possibly) remote one.
Members:
missing is a list of all nodes present in local but not in remote.
common is a list of all nodes shared between the two repos.
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 excluded is the list of missing changeset that shouldn't be sent remotely.
Pierre-Yves David
discovery: introduce outgoing object for result of findcommonoutgoing...
r15837 missingheads is the list of heads of missing.
commonheads is the list of heads of common.
The sets are computed on demand from the heads, unless provided upfront
by discovery.'''
def __init__(self, revlog, commonheads, missingheads):
self.commonheads = commonheads
self.missingheads = missingheads
self._revlog = revlog
self._common = None
self._missing = None
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 self.excluded = []
Pierre-Yves David
discovery: introduce outgoing object for result of findcommonoutgoing...
r15837
def _computecommonmissing(self):
sets = self._revlog.findcommonmissing(self.commonheads,
self.missingheads)
self._common, self._missing = sets
@util.propertycache
def common(self):
if self._common is None:
self._computecommonmissing()
return self._common
@util.propertycache
def missing(self):
if self._missing is None:
self._computecommonmissing()
return self._missing
Brodie Rao
cleanup: eradicate long lines
r16683 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
Matt Mackall
merge with stable
r16746 commoninc=None, portable=False):
Pierre-Yves David
discovery: introduce outgoing object for result of findcommonoutgoing...
r15837 '''Return an outgoing instance to identify the nodes present in repo but
not in other.
Peter Arrenbrecht
discovery: resurrect findoutgoing as findcommonoutgoing for extension hooks...
r14213
Brodie Rao
cleanup: eradicate long lines
r16683 If onlyheads is given, only nodes ancestral to nodes in onlyheads
(inclusive) are included. If you already know the local repo's heads,
passing them in onlyheads is faster than letting them be recomputed here.
Peter Arrenbrecht
discovery: resurrect findoutgoing as findcommonoutgoing for extension hooks...
r14213
Mads Kiilerich
help: fix some instances of 'the the'
r17251 If commoninc is given, it must be the result of a prior call to
Sune Foldager
bundle: make bundles more portable (isue3441)...
r16736 findcommonincoming(repo, other, force) to avoid recomputing it here.
If portable is given, compute more conservative common and missingheads,
to make bundles created from the instance more portable.'''
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 # declare an empty outgoing object to be filled later
og = outgoing(repo.changelog, None, None)
# get common set if not provided
if commoninc is None:
commoninc = findcommonincoming(repo, other, force=force)
og.commonheads, _any, _hds = commoninc
# compute outgoing
Pierre-Yves David
obsolete: do not exchange extinct changesets...
r17206 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
if not mayexclude:
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 og.missingheads = onlyheads or repo.heads()
elif onlyheads is None:
# use visible heads as it should be cached
Pierre-Yves David
repo: move visibleheads and visiblebranchmap logic in discovery...
r17205 og.missingheads = visibleheads(repo)
Patrick Mezard
discovery: add extinct changesets to outgoing.excluded...
r17248 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 else:
# compute common, missing and exclude secret stuff
sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
og._common, allmissing = sets
og._missing = missing = []
Pierre-Yves David
phases: properly register excluded changeset when revision are specified...
r15951 og.excluded = excluded = []
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 for node in allmissing:
Pierre-Yves David
obsolete: do not exchange extinct changesets...
r17206 ctx = repo[node]
Patrick Mezard
discovery: add extinct changesets to outgoing.excluded...
r17248 if ctx.phase() >= phases.secret or ctx.extinct():
excluded.append(node)
else:
missing.append(node)
Pierre-Yves David
obsolete: do not exchange extinct changesets...
r17206 if len(missing) == len(allmissing):
missingheads = onlyheads
else: # update missing heads
Pierre-Yves David
discovery: ensure that missingheads are always heads of everything we tried...
r15955 missingheads = phases.newheads(repo, onlyheads, excluded)
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 og.missingheads = missingheads
Sune Foldager
bundle: make bundles more portable (isue3441)...
r16736 if portable:
# recompute common and missingheads as if -r<rev> had been given for
# each head of missing, and --base <rev> for each head of the proper
# ancestors of missing
og._computecommonmissing()
cl = repo.changelog
missingrevs = set(cl.rev(n) for n in og._missing)
Bryan O'Sullivan
revlog: ancestors(*revs) becomes ancestors(revs) (API)...
r16866 og._common = set(cl.ancestors(missingrevs)) - missingrevs
Sune Foldager
bundle: make bundles more portable (isue3441)...
r16736 commonheads = set(og.commonheads)
og.missingheads = [h for h in og.missingheads if h not in commonheads]
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 return og
Peter Arrenbrecht
discovery: resurrect findoutgoing as findcommonoutgoing for extension hooks...
r14213
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 def _headssummary(repo, remote, outgoing):
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 """compute a summary of branch and heads status before and after push
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
- branch: the branch name
- remoteheads: the list of remote heads known locally
None is the branch is new
- newheads: the new remote heads (known locally) with outgoing pushed
- unsyncedheads: the list of remote heads unknown locally.
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 """
cl = repo.changelog
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 headssum = {}
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 # A. Create set of branches involved in the push.
branches = set(repo[n].branch() for n in outgoing.missing)
remotemap = remote.branchmap()
newbranches = branches - set(remotemap)
branches.difference_update(newbranches)
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 # A. register remote heads
remotebranches = set()
for branch, heads in remote.branchmap().iteritems():
remotebranches.add(branch)
known = []
unsynced = []
for h in heads:
if h in cl.nodemap:
known.append(h)
else:
unsynced.append(h)
headssum[branch] = (known, list(known), unsynced)
# B. add new branch data
missingctx = list(repo[n] for n in outgoing.missing)
touchedbranches = set()
for ctx in missingctx:
branch = ctx.branch()
touchedbranches.add(branch)
if branch not in headssum:
headssum[branch] = (None, [], [])
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 # C drop data about untouched branches:
for branch in remotebranches - touchedbranches:
del headssum[branch]
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 # D. Update newmap with outgoing changes.
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 # This will possibly add new heads and remove existing ones.
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 newmap = dict((branch, heads[1]) for branch, heads in headssum.iteritems()
if heads[0] is not None)
repo._updatebranchcache(newmap, missingctx)
for branch, newheads in newmap.iteritems():
headssum[branch][1][:] = newheads
return headssum
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 """Compute branchmapsummary for repo without branchmap support"""
cl = repo.changelog
# 1-4b. old servers: Check for new topological heads.
# Construct {old,new}map with branch = None (topological branch).
# (code based on _updatebranchcache)
oldheads = set(h for h in remoteheads if h in cl.nodemap)
# all nodes in outgoing.missing are children of either:
# - an element of oldheads
# - another element of outgoing.missing
# - nullrev
# This explains why the new head are very simple to compute.
r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 newheads = list(c.node() for c in r)
unsynced = inc and set([None]) or set()
return {None: (oldheads, newheads, unsynced)}
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209
Pierre-Yves David
discovery: fix regression when checking heads for pre 1.4 client (issue3218)...
r15986 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 """Check that a push won't add any outgoing head
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 raise Abort error and display ui message as needed.
"""
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 # Check for each named branch if we're creating new remote heads.
# To be a remote head after push, node must be either:
# - unknown locally
# - a local outgoing head descended from update
# - a remote head that's known locally and not
# ancestral to an outgoing head
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 if remoteheads == [nullid]:
# remote is empty, nothing to check.
return
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 if remote.capable('branchmap'):
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 headssum = _headssummary(repo, remote, outgoing)
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 else:
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
newbranches = [branch for branch, heads in headssum.iteritems()
if heads[0] is None]
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 # 1. Check for new branches on the remote.
if newbranches and not newbranch: # new branch requires --new-branch
branchnames = ', '.join(sorted(newbranches))
raise util.Abort(_("push creates new remote branches: %s!")
% branchnames,
hint=_("use 'hg push --new-branch' to create"
" new remote branches"))
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Pierre-Yves David
checkheads: extract bookmark computation from the branch loop...
r17212 # 2 compute newly pushed bookmarks. We
# we don't warned about bookmarked heads.
localbookmarks = repo._bookmarks
remotebookmarks = remote.listkeys('bookmarks')
bookmarkedheads = set()
for bm in localbookmarks:
rnode = remotebookmarks.get(bm)
if rnode and rnode in repo:
lctx, rctx = repo[bm], repo[rnode]
Pierre-Yves David
bookmarks: extract valid destination logic in a dedicated function...
r17550 if bookmarks.validdest(repo, rctx, lctx):
Pierre-Yves David
checkheads: extract bookmark computation from the branch loop...
r17212 bookmarkedheads.add(lctx.node())
# 3. Check for new heads.
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 # If there are more heads after the push than before, a suitable
# error message, depending on unsynced status, is displayed.
error = None
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 unsynced = False
Pierre-Yves David
checkheads: take future obsoleted heads into account...
r17214 allmissing = set(outgoing.missing)
Pierre-Yves David
checkheads: check successors for new heads in both missing and common...
r17548 allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))
allfuturecommon.update(allmissing)
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 for branch, heads in headssum.iteritems():
if heads[0] is None:
# Maybe we should abort if we push more that one head
# for new branches ?
continue
Pierre-Yves David
checkheads: take future obsoleted heads into account...
r17214 candidate_newhs = set(heads[1])
# add unsynced data
Pierre-Yves David
checkheads: don't warn about unsynced changes that we ill obsolete...
r17549 oldhs = set(heads[0])
Pierre-Yves David
checkheads: take future obsoleted heads into account...
r17214 oldhs.update(heads[2])
candidate_newhs.update(heads[2])
Levi Bard
bookmarks: allow existing remote bookmarks to become heads when pushing
r16835 dhs = None
Pierre-Yves David
checkheads: don't warn about unsynced changes that we ill obsolete...
r17549 discardedheads = set()
Pierre-Yves David
checkheads: take future obsoleted heads into account...
r17214 if repo.obsstore:
# remove future heads which are actually obsolete by another
# pushed element:
#
Pierre-Yves David
checkheads: don't warn about unsynced changes that we ill obsolete...
r17549 # XXX as above, There are several cases this case does not handle
# XXX properly
Pierre-Yves David
checkheads: take future obsoleted heads into account...
r17214 #
# (1) if <nh> is public, it won't be affected by obsolete marker
# and a new is created
#
# (2) if the new heads have ancestors which are not obsolete and
# not ancestors of any other heads we will have a new head too.
#
# This two case will be easy to handle for know changeset but much
# more tricky for unsynced changes.
newhs = set()
for nh in candidate_newhs:
Pierre-Yves David
checkheads: check successors for new heads in both missing and common...
r17548 if nh in repo and repo[nh].phase() <= phases.public:
Pierre-Yves David
checkheads: attend to phases when computing new heads with obsolete...
r17547 newhs.add(nh)
Pierre-Yves David
checkheads: take future obsoleted heads into account...
r17214 else:
Pierre-Yves David
obsolete: have `allsuccessors` takes a list of nodes...
r17827 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
Pierre-Yves David
checkheads: check successors for new heads in both missing and common...
r17548 if suc != nh and suc in allfuturecommon:
Pierre-Yves David
checkheads: don't warn about unsynced changes that we ill obsolete...
r17549 discardedheads.add(nh)
Pierre-Yves David
checkheads: attend to phases when computing new heads with obsolete...
r17547 break
else:
newhs.add(nh)
Pierre-Yves David
checkheads: take future obsoleted heads into account...
r17214 else:
newhs = candidate_newhs
Pierre-Yves David
checkheads: don't warn about unsynced changes that we ill obsolete...
r17549 if [h for h in heads[2] if h not in discardedheads]:
unsynced = True
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 if len(newhs) > len(oldhs):
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 # strip updates to existing remote heads from the new heads list
Levi Bard
bookmarks: allow existing remote bookmarks to become heads when pushing
r16835 dhs = list(newhs - bookmarkedheads - oldhs)
if dhs:
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 if error is None:
if branch not in ('default', None):
error = _("push creates new remote head %s "
"on branch '%s'!") % (short(dhs[0]), branch)
else:
error = _("push creates new remote head %s!"
) % short(dhs[0])
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 if heads[2]: # unsynced
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 hint = _("you should pull and merge or "
"use push -f to force")
else:
hint = _("did you forget to merge? "
"use push -f to force")
if branch is not None:
repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
for h in dhs:
repo.ui.note(_("new remote head %s\n") % short(h))
if error:
raise util.Abort(error, hint=hint)
Benoit Boissinot
discovery: remove duplication for new remote head discovery
r11578
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 # 6. Check for unsynced changes on involved branches.
if unsynced:
repo.ui.warn(_("note: unsynced remote changes!\n"))
Pierre-Yves David
repo: move visibleheads and visiblebranchmap logic in discovery...
r17205
def visibleheads(repo):
"""return the set of visible head of this repo"""
# XXX we want a cache on this
sroots = repo._phasecache.phaseroots[phases.secret]
Pierre-Yves David
obsolete: do not exchange extinct changesets...
r17206 if sroots or repo.obsstore:
# XXX very slow revset. storing heads or secret "boundary"
# would help.
revset = repo.set('heads(not (%ln:: + extinct()))', sroots)
Pierre-Yves David
repo: move visibleheads and visiblebranchmap logic in discovery...
r17205
vheads = [ctx.node() for ctx in revset]
if not vheads:
vheads.append(nullid)
else:
vheads = repo.heads()
return vheads
Pierre-Yves David
obsolete: do not exchange extinct changesets...
r17206
Pierre-Yves David
repo: move visibleheads and visiblebranchmap logic in discovery...
r17205 def visiblebranchmap(repo):
"""return a branchmap for the visible set"""
# XXX Recomputing this data on the fly is very slow. We should build a
Mads Kiilerich
fix trivial spelling errors
r17424 # XXX cached version while computing the standard branchmap version.
Pierre-Yves David
repo: move visibleheads and visiblebranchmap logic in discovery...
r17205 sroots = repo._phasecache.phaseroots[phases.secret]
Pierre-Yves David
obsolete: do not exchange extinct changesets...
r17206 if sroots or repo.obsstore:
Pierre-Yves David
repo: move visibleheads and visiblebranchmap logic in discovery...
r17205 vbranchmap = {}
for branch, nodes in repo.branchmap().iteritems():
# search for secret heads.
for n in nodes:
if repo[n].phase() >= phases.secret:
nodes = None
break
Pierre-Yves David
obsolete: do not exchange extinct changesets...
r17206 # if secret heads were found we must compute them again
Pierre-Yves David
repo: move visibleheads and visiblebranchmap logic in discovery...
r17205 if nodes is None:
Pierre-Yves David
obsolete: do not exchange extinct changesets...
r17206 s = repo.set('heads(branch(%s) - secret() - extinct())',
branch)
Pierre-Yves David
repo: move visibleheads and visiblebranchmap logic in discovery...
r17205 nodes = [c.node() for c in s]
vbranchmap[branch] = nodes
else:
vbranchmap = repo.branchmap()
return vbranchmap