##// END OF EJS Templates
diffutil: remove diffopts() in favor of diffallopts()...
diffutil: remove diffopts() in favor of diffallopts() patch.diffopts() exists only for backward compatibility. We don't need it in new module.

File last commit:

r37655:1964d2d1 default
r38606:b62000a2 default
Show More
discovery.py
533 lines | 20.6 KiB | text/x-python | PythonLexer
Dirkjan Ochtman
discovery: fix description line
r11313 # discovery.py - protocol changeset discovery functions
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301 #
Dirkjan Ochtman
discovery: fix description line
r11313 # Copyright 2010 Matt Mackall <mpm@selenic.com>
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301 #
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
Gregory Szorc
discovery: use absolute_import
r25944 from __future__ import absolute_import
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 import functools
Gregory Szorc
discovery: use absolute_import
r25944 from .i18n import _
from .node import (
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 hex,
Gregory Szorc
discovery: use absolute_import
r25944 nullid,
short,
)
from . import (
bookmarks,
branchmap,
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 error,
Gregory Szorc
discovery: use absolute_import
r25944 phases,
Boris Feld
scmutil: extra utility to display a reasonable amount of nodes...
r35185 scmutil,
Gregory Szorc
discovery: use absolute_import
r25944 setdiscovery,
treediscovery,
util,
)
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Boris Feld
push: restrict common discovery to the pushed set...
r35306 def findcommonincoming(repo, remote, heads=None, force=False, ancestorsof=None):
Peter Arrenbrecht
discovery: drop findoutgoing and simplify findcommonincoming's api...
r14073 """Return a tuple (common, anyincoming, heads) used to identify the common
subset of nodes between repo and remote.
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Peter Arrenbrecht
discovery: drop findoutgoing and simplify findcommonincoming's api...
r14073 "common" is a list of (at least) the heads of the common subset.
"anyincoming" is testable as a boolean indicating if any nodes are missing
locally. If remote does not support getbundle, this actually is a list of
roots of the nodes that would be incoming, to be supplied to
changegroupsubset. No code except for pull should be relying on this fact
any longer.
"heads" is either the supplied heads, or else the remote's heads.
Boris Feld
push: restrict common discovery to the pushed set...
r35306 "ancestorsof" if not None, restrict the discovery to a subset defined by
these nodes. Changeset outside of this set won't be considered (and
won't appears in "common")
Peter Arrenbrecht
discovery: add new set-based discovery...
r14164
Mads Kiilerich
fix trivial spelling errors
r17424 If you pass heads and they are all known locally, the response lists just
Peter Arrenbrecht
discovery: add new set-based discovery...
r14164 these heads in "common" and in "heads".
Peter Arrenbrecht
discovery: resurrect findoutgoing as findcommonoutgoing for extension hooks...
r14213
Please use findcommonoutgoing to compute the set of outgoing nodes to give
extensions a good hook into outgoing.
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301 """
Peter Arrenbrecht
discovery: drop findoutgoing and simplify findcommonincoming's api...
r14073
Peter Arrenbrecht
discovery: add new set-based discovery...
r14164 if not remote.capable('getbundle'):
return treediscovery.findcommonincoming(repo, remote, heads, force)
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Peter Arrenbrecht
discovery: add new set-based discovery...
r14164 if heads:
Pierre-Yves David
discovery: stop using nodemap for membership testing...
r20225 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
Martin von Zweigbergk
discovery: don't reimplement all()...
r35897 if all(knownnode(h) for h in heads):
Peter Arrenbrecht
discovery: add new set-based discovery...
r14164 return (heads, False, heads)
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Martin von Zweigbergk
setdiscovery: back out changeset 5cfdf6137af8 (issue5809)...
r36732 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
Boris Feld
push: restrict common discovery to the pushed set...
r35306 abortwhenunrelated=not force,
ancestorsof=ancestorsof)
Peter Arrenbrecht
discovery: add new set-based discovery...
r14164 common, anyinc, srvheads = res
return (list(common), anyinc, heads or list(srvheads))
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Pierre-Yves David
discovery: introduce outgoing object for result of findcommonoutgoing...
r15837 class outgoing(object):
'''Represents the set of nodes present in a local repo but not in a
(possibly) remote one.
Members:
missing is a list of all nodes present in local but not in remote.
common is a list of all nodes shared between the two repos.
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 excluded is the list of missing changeset that shouldn't be sent remotely.
Pierre-Yves David
discovery: introduce outgoing object for result of findcommonoutgoing...
r15837 missingheads is the list of heads of missing.
commonheads is the list of heads of common.
The sets are computed on demand from the heads, unless provided upfront
by discovery.'''
Pierre-Yves David
outgoing: add a 'missingroots' argument...
r29806 def __init__(self, repo, commonheads=None, missingheads=None,
missingroots=None):
# at least one of them must not be set
assert None in (commonheads, missingroots)
Pierre-Yves David
outgoing: adds some default value for argument...
r29805 cl = repo.changelog
Ryan McElroy
discovery: explicitly check for None in outgoing init...
r29901 if missingheads is None:
Pierre-Yves David
outgoing: adds some default value for argument...
r29805 missingheads = cl.heads()
Pierre-Yves David
outgoing: add a 'missingroots' argument...
r29806 if missingroots:
discbases = []
for n in missingroots:
discbases.extend([p for p in cl.parents(n) if p != nullid])
# TODO remove call to nodesbetween.
# TODO populate attributes on outgoing instance instead of setting
# discbases.
csets, roots, heads = cl.nodesbetween(missingroots, missingheads)
included = set(csets)
missingheads = heads
commonheads = [n for n in discbases if n not in included]
elif not commonheads:
Pierre-Yves David
outgoing: adds some default value for argument...
r29805 commonheads = [nullid]
Pierre-Yves David
discovery: introduce outgoing object for result of findcommonoutgoing...
r15837 self.commonheads = commonheads
self.missingheads = missingheads
Pierre-Yves David
outgoing: adds some default value for argument...
r29805 self._revlog = cl
Pierre-Yves David
discovery: introduce outgoing object for result of findcommonoutgoing...
r15837 self._common = None
self._missing = None
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 self.excluded = []
Pierre-Yves David
discovery: introduce outgoing object for result of findcommonoutgoing...
r15837
def _computecommonmissing(self):
sets = self._revlog.findcommonmissing(self.commonheads,
self.missingheads)
self._common, self._missing = sets
@util.propertycache
def common(self):
if self._common is None:
self._computecommonmissing()
return self._common
@util.propertycache
def missing(self):
if self._missing is None:
self._computecommonmissing()
return self._missing
Brodie Rao
cleanup: eradicate long lines
r16683 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
Matt Mackall
merge with stable
r16746 commoninc=None, portable=False):
Pierre-Yves David
discovery: introduce outgoing object for result of findcommonoutgoing...
r15837 '''Return an outgoing instance to identify the nodes present in repo but
not in other.
Peter Arrenbrecht
discovery: resurrect findoutgoing as findcommonoutgoing for extension hooks...
r14213
Brodie Rao
cleanup: eradicate long lines
r16683 If onlyheads is given, only nodes ancestral to nodes in onlyheads
(inclusive) are included. If you already know the local repo's heads,
passing them in onlyheads is faster than letting them be recomputed here.
Peter Arrenbrecht
discovery: resurrect findoutgoing as findcommonoutgoing for extension hooks...
r14213
Mads Kiilerich
help: fix some instances of 'the the'
r17251 If commoninc is given, it must be the result of a prior call to
Sune Foldager
bundle: make bundles more portable (isue3441)...
r16736 findcommonincoming(repo, other, force) to avoid recomputing it here.
If portable is given, compute more conservative common and missingheads,
to make bundles created from the instance more portable.'''
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 # declare an empty outgoing object to be filled later
Pierre-Yves David
outgoing: pass a repo object to the constructor...
r29804 og = outgoing(repo, None, None)
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838
# get common set if not provided
if commoninc is None:
Boris Feld
push: restrict common discovery to the pushed set...
r35306 commoninc = findcommonincoming(repo, other, force=force,
ancestorsof=onlyheads)
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 og.commonheads, _any, _hds = commoninc
# compute outgoing
Pierre-Yves David
obsolete: do not exchange extinct changesets...
r17206 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
if not mayexclude:
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 og.missingheads = onlyheads or repo.heads()
elif onlyheads is None:
# use visible heads as it should be cached
Kevin Bullock
filtering: rename filters to their antonyms...
r18382 og.missingheads = repo.filtered("served").heads()
Patrick Mezard
discovery: add extinct changesets to outgoing.excluded...
r17248 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 else:
# compute common, missing and exclude secret stuff
sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
og._common, allmissing = sets
og._missing = missing = []
Pierre-Yves David
phases: properly register excluded changeset when revision are specified...
r15951 og.excluded = excluded = []
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 for node in allmissing:
Pierre-Yves David
obsolete: do not exchange extinct changesets...
r17206 ctx = repo[node]
Patrick Mezard
discovery: add extinct changesets to outgoing.excluded...
r17248 if ctx.phase() >= phases.secret or ctx.extinct():
excluded.append(node)
else:
missing.append(node)
Pierre-Yves David
obsolete: do not exchange extinct changesets...
r17206 if len(missing) == len(allmissing):
missingheads = onlyheads
else: # update missing heads
Pierre-Yves David
discovery: ensure that missingheads are always heads of everything we tried...
r15955 missingheads = phases.newheads(repo, onlyheads, excluded)
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 og.missingheads = missingheads
Sune Foldager
bundle: make bundles more portable (isue3441)...
r16736 if portable:
# recompute common and missingheads as if -r<rev> had been given for
# each head of missing, and --base <rev> for each head of the proper
# ancestors of missing
og._computecommonmissing()
cl = repo.changelog
missingrevs = set(cl.rev(n) for n in og._missing)
Bryan O'Sullivan
revlog: ancestors(*revs) becomes ancestors(revs) (API)...
r16866 og._common = set(cl.ancestors(missingrevs)) - missingrevs
Sune Foldager
bundle: make bundles more portable (isue3441)...
r16736 commonheads = set(og.commonheads)
og.missingheads = [h for h in og.missingheads if h not in commonheads]
Pierre-Yves David
phases: make outgoing object and discovery aware of exclusion...
r15838 return og
Peter Arrenbrecht
discovery: resurrect findoutgoing as findcommonoutgoing for extension hooks...
r14213
headssummary: directly feed the function with the 'pushop' object...
r32706 def _headssummary(pushop):
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 """compute a summary of branch and heads status before and after push
headsummary: expose the 'discardedheads' set in the headssummary...
r32708 return {'branch': ([remoteheads], [newheads],
[unsyncedheads], [discardedheads])} mapping
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211
headsummary: expose the 'discardedheads' set in the headssummary...
r32708 - branch: the branch name,
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 - remoteheads: the list of remote heads known locally
headsummary: expose the 'discardedheads' set in the headssummary...
r32708 None if the branch is new,
- newheads: the new remote heads (known locally) with outgoing pushed,
- unsyncedheads: the list of remote heads unknown locally,
- discardedheads: the list of heads made obsolete by the push.
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 """
headssummary: directly feed the function with the 'pushop' object...
r32706 repo = pushop.repo.unfiltered()
remote = pushop.remote
outgoing = pushop.outgoing
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 cl = repo.changelog
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 headssum = {}
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 # A. Create set of branches involved in the push.
branches = set(repo[n].branch() for n in outgoing.missing)
Gregory Szorc
discovery: use command executor interface...
r37655
with remote.commandexecutor() as e:
remotemap = e.callcommand('branchmap', {}).result()
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 newbranches = branches - set(remotemap)
branches.difference_update(newbranches)
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 # A. register remote heads
remotebranches = set()
Gregory Szorc
discovery: don't redundantly call branchmap...
r37654 for branch, heads in remotemap.iteritems():
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 remotebranches.add(branch)
known = []
unsynced = []
Pierre-Yves David
discovery: stop using nodemap for membership testing...
r20225 knownnode = cl.hasnode # do not use nodemap until it is filtered
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 for h in heads:
Pierre-Yves David
discovery: stop using nodemap for membership testing...
r20225 if knownnode(h):
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 known.append(h)
else:
unsynced.append(h)
headssum[branch] = (known, list(known), unsynced)
# B. add new branch data
missingctx = list(repo[n] for n in outgoing.missing)
touchedbranches = set()
for ctx in missingctx:
branch = ctx.branch()
touchedbranches.add(branch)
if branch not in headssum:
headssum[branch] = (None, [], [])
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 # C drop data about untouched branches:
for branch in remotebranches - touchedbranches:
del headssum[branch]
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 # D. Update newmap with outgoing changes.
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 # This will possibly add new heads and remove existing ones.
Pierre-Yves David
branchmap: store branchcache in a dedicated object...
r18124 newmap = branchmap.branchcache((branch, heads[1])
for branch, heads in headssum.iteritems()
if heads[0] is not None)
Pierre-Yves David
branchmap: pass revision insteads of changectx to the update function...
r18305 newmap.update(repo, (ctx.rev() for ctx in missingctx))
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 for branch, newheads in newmap.iteritems():
headssum[branch][1][:] = newheads
headssummary: ensure all returned lists are sorted...
r32672 for branch, items in headssum.iteritems():
for l in items:
if l is not None:
l.sort()
headsummary: expose the 'discardedheads' set in the headssummary...
r32708 headssum[branch] = items + ([],)
checkheads: perform obsolescence post processing directly in _headssummary...
r32707 # If there are no obsstore, no post processing are needed.
if repo.obsstore:
checkheads: use "revnum" in the "allfuturecommon" set...
r32791 torev = repo.changelog.rev
checkheads: use a "lazyancestors" object for allfuturecommon...
r32792 futureheads = set(torev(h) for h in outgoing.missingheads)
futureheads |= set(torev(h) for h in outgoing.commonheads)
allfuturecommon = repo.changelog.ancestors(futureheads, inclusive=True)
checkheads: perform obsolescence post processing directly in _headssummary...
r32707 for branch, heads in sorted(headssum.iteritems()):
headsummary: expose the 'discardedheads' set in the headssummary...
r32708 remoteheads, newheads, unsyncedheads, placeholder = heads
checkheads: perform obsolescence post processing directly in _headssummary...
r32707 result = _postprocessobsolete(pushop, allfuturecommon, newheads)
headsummary: expose the 'discardedheads' set in the headssummary...
r32708 headssum[branch] = (remoteheads, sorted(result[0]), unsyncedheads,
sorted(result[1]))
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 return headssum
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 """Compute branchmapsummary for repo without branchmap support"""
# 1-4b. old servers: Check for new topological heads.
# Construct {old,new}map with branch = None (topological branch).
Pierre-Yves David
branchmap: extract _updatebranchcache from repo
r18120 # (code based on update)
Pierre-Yves David
discovery: stop using nodemap for membership testing...
r20225 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
headssummary: ensure all returned lists are sorted...
r32672 oldheads = sorted(h for h in remoteheads if knownnode(h))
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 # all nodes in outgoing.missing are children of either:
# - an element of oldheads
# - another element of outgoing.missing
# - nullrev
# This explains why the new head are very simple to compute.
r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
headssummary: ensure all returned lists are sorted...
r32672 newheads = sorted(c.node() for c in r)
Pierre-Yves David
discovery: prevent crash on unknown remote heads with old repo (issue4337)...
r22178 # set some unsynced head to issue the "unsynced changes" warning
Jordi Gutiérrez Hermoso
style: kill ersatz if-else ternary operators...
r24306 if inc:
discovery: also use lists for the returns of '_oldheadssummary'...
r32671 unsynced = [None]
Jordi Gutiérrez Hermoso
style: kill ersatz if-else ternary operators...
r24306 else:
discovery: also use lists for the returns of '_oldheadssummary'...
r32671 unsynced = []
headsummary: expose the 'discardedheads' set in the headssummary...
r32708 return {None: (oldheads, newheads, unsynced, [])}
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209
Ryan McElroy
discovery: pass pushop to _nowarnheads...
r26936 def _nowarnheads(pushop):
Ryan McElroy
discovery: factor out calculation of heads to not warn about...
r26862 # Compute newly pushed bookmarks. We don't warn about bookmarked heads.
Ryan McElroy
discovery: pass pushop to _nowarnheads...
r26936 repo = pushop.repo.unfiltered()
remote = pushop.remote
Ryan McElroy
discovery: factor out calculation of heads to not warn about...
r26862 localbookmarks = repo._bookmarks
Gregory Szorc
discovery: use command executor interface...
r37655
with remote.commandexecutor() as e:
remotebookmarks = e.callcommand('listkeys', {
'namespace': 'bookmarks',
}).result()
Ryan McElroy
discovery: factor out calculation of heads to not warn about...
r26862 bookmarkedheads = set()
liscju
bookmarks: allow pushing active bookmark on new remote head (issue5236)...
r29229
# internal config: bookmarks.pushing
newbookmarks = [localbookmarks.expandname(b)
for b in pushop.ui.configlist('bookmarks', 'pushing')]
Ryan McElroy
discovery: factor out calculation of heads to not warn about...
r26862 for bm in localbookmarks:
rnode = remotebookmarks.get(bm)
if rnode and rnode in repo:
Martin von Zweigbergk
discovery: look up bookmarks only among bookmarks...
r37469 lctx, rctx = localbookmarks.changectx(bm), repo[rnode]
Ryan McElroy
discovery: factor out calculation of heads to not warn about...
r26862 if bookmarks.validdest(repo, rctx, lctx):
bookmarkedheads.add(lctx.node())
else:
if bm in newbookmarks and bm not in remotebookmarks:
Martin von Zweigbergk
discovery: look up bookmarks only among bookmarks...
r37469 bookmarkedheads.add(localbookmarks[bm])
Ryan McElroy
discovery: factor out calculation of heads to not warn about...
r26862
return bookmarkedheads
Ryan McElroy
exchange: pass pushop to discovery.checkheads...
r26935 def checkheads(pushop):
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 """Check that a push won't add any outgoing head
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 raise Abort error and display ui message as needed.
"""
Ryan McElroy
exchange: pass pushop to discovery.checkheads...
r26935
repo = pushop.repo.unfiltered()
remote = pushop.remote
outgoing = pushop.outgoing
remoteheads = pushop.remoteheads
newbranch = pushop.newbranch
inc = bool(pushop.incoming)
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 # Check for each named branch if we're creating new remote heads.
# To be a remote head after push, node must be either:
# - unknown locally
# - a local outgoing head descended from update
# - a remote head that's known locally and not
# ancestral to an outgoing head
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 if remoteheads == [nullid]:
# remote is empty, nothing to check.
return
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 if remote.capable('branchmap'):
headssummary: directly feed the function with the 'pushop' object...
r32706 headssum = _headssummary(pushop)
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 else:
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
push: add a way to allow concurrent pushes on unrelated heads...
r32709 pushop.pushbranchmap = headssum
Pierre-Yves David
checkheads: simplify the structure build by preprocessing...
r17211 newbranches = [branch for branch, heads in headssum.iteritems()
if heads[0] is None]
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 # 1. Check for new branches on the remote.
if newbranches and not newbranch: # new branch requires --new-branch
branchnames = ', '.join(sorted(newbranches))
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(_("push creates new remote branches: %s!")
Pierre-Yves David
checkheads: extract branchmap preprocessing...
r17209 % branchnames,
hint=_("use 'hg push --new-branch' to create"
" new remote branches"))
Dirkjan Ochtman
move discovery methods from localrepo into new discovery module
r11301
Ryan McElroy
discovery: factor out calculation of heads to not warn about...
r26862 # 2. Find heads that we need not warn about
Ryan McElroy
discovery: pass pushop to _nowarnheads...
r26936 nowarnheads = _nowarnheads(pushop)
Pierre-Yves David
checkheads: extract bookmark computation from the branch loop...
r17212
# 3. Check for new heads.
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 # If there are more heads after the push than before, a suitable
# error message, depending on unsynced status, is displayed.
Pierre-Yves David
discovery: rename 'error' to 'errormsg'...
r26585 errormsg = None
Mads Kiilerich
discovery: process heads in sorted order
r18361 for branch, heads in sorted(headssum.iteritems()):
headsummary: expose the 'discardedheads' set in the headssummary...
r32708 remoteheads, newheads, unsyncedheads, discardedheads = heads
Pierre-Yves David
checkheads: take future obsoleted heads into account...
r17214 # add unsynced data
Mads Kiilerich
discovery: cleanup of variable names and comments
r20381 if remoteheads is None:
FUJIWARA Katsunori
discovery: abort also when pushing multiple headed new branch...
r19840 oldhs = set()
else:
Mads Kiilerich
discovery: cleanup of variable names and comments
r20381 oldhs = set(remoteheads)
oldhs.update(unsyncedheads)
dhs = None # delta heads, the new heads on branch
checkheads: simplify the code around obsolescence post-processing...
r32675 newhs = set(newheads)
checkheads: clarify that we no longer touch the head unknown locally...
r32673 newhs.update(unsyncedheads)
checkheads: drop now unused filtering of 'unsyncedheads'...
r32674 if unsyncedheads:
if None in unsyncedheads:
Pierre-Yves David
discovery: prevent crash on unknown remote heads with old repo (issue4337)...
r22178 # old remote, no heads data
heads = None
Mads Kiilerich
discovery: don't report all "unsynced" remote heads (issue4230)...
r21198 else:
Boris Feld
scmutil: extra utility to display a reasonable amount of nodes...
r35185 heads = scmutil.nodesummaries(repo, unsyncedheads)
Pierre-Yves David
discovery: prevent crash on unknown remote heads with old repo (issue4337)...
r22178 if heads is None:
repo.ui.status(_("remote has heads that are "
"not known locally\n"))
elif branch is None:
Mads Kiilerich
discovery: make "note: unsynced remote changes!" less serious than a warning...
r20502 repo.ui.status(_("remote has heads that are "
"not known locally: %s\n") % heads)
Mads Kiilerich
discovery: improve "note: unsynced remote changes!" warning...
r20501 else:
Mads Kiilerich
discovery: make "note: unsynced remote changes!" less serious than a warning...
r20502 repo.ui.status(_("remote has heads on branch '%s' that are "
"not known locally: %s\n") % (branch, heads))
Mads Kiilerich
discovery: cleanup of variable names and comments
r20381 if remoteheads is None:
if len(newhs) > 1:
FUJIWARA Katsunori
discovery: abort also when pushing multiple headed new branch...
r19840 dhs = list(newhs)
Pierre-Yves David
discovery: rename 'error' to 'errormsg'...
r26585 if errormsg is None:
errormsg = (_("push creates new branch '%s' "
"with multiple heads") % (branch))
FUJIWARA Katsunori
discovery: abort also when pushing multiple headed new branch...
r19840 hint = _("merge or"
timeless
discovery: use single quotes in use warning
r29973 " see 'hg help push' for details about"
FUJIWARA Katsunori
discovery: abort also when pushing multiple headed new branch...
r19840 " pushing new heads")
elif len(newhs) > len(oldhs):
Mads Kiilerich
discovery: cleanup of variable names and comments
r20381 # remove bookmarked or existing remote heads from the new heads list
Ryan McElroy
discovery: factor out calculation of heads to not warn about...
r26862 dhs = sorted(newhs - nowarnheads - oldhs)
Levi Bard
bookmarks: allow existing remote bookmarks to become heads when pushing
r16835 if dhs:
Pierre-Yves David
discovery: rename 'error' to 'errormsg'...
r26585 if errormsg is None:
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 if branch not in ('default', None):
Pierre-Yves David
discovery: rename 'error' to 'errormsg'...
r26585 errormsg = _("push creates new remote head %s "
"on branch '%s'!") % (short(dhs[0]), branch)
Stephen Lee
discovery: if a push would create a new head, mention the bookmark name if any
r21580 elif repo[dhs[0]].bookmarks():
Pierre-Yves David
discovery: rename 'error' to 'errormsg'...
r26585 errormsg = _("push creates new remote head %s "
"with bookmark '%s'!") % (
short(dhs[0]), repo[dhs[0]].bookmarks()[0])
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 else:
Pierre-Yves David
discovery: rename 'error' to 'errormsg'...
r26585 errormsg = _("push creates new remote head %s!"
) % short(dhs[0])
Mads Kiilerich
discovery: cleanup of variable names and comments
r20381 if unsyncedheads:
FUJIWARA Katsunori
push: hide description about "-f" in the hint to prevent from using it easily...
r19934 hint = _("pull and merge or"
timeless
discovery: use single quotes in use warning
r29973 " see 'hg help push' for details about"
FUJIWARA Katsunori
push: hide description about "-f" in the hint to prevent from using it easily...
r19934 " pushing new heads")
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 else:
FUJIWARA Katsunori
push: hide description about "-f" in the hint to prevent from using it easily...
r19934 hint = _("merge or"
timeless
discovery: use single quotes in use warning
r29973 " see 'hg help push' for details about"
FUJIWARA Katsunori
push: hide description about "-f" in the hint to prevent from using it easily...
r19934 " pushing new heads")
Mads Kiilerich
discovery: make note messages for new heads more readable
r20051 if branch is None:
repo.ui.note(_("new remote heads:\n"))
else:
repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
Pierre-Yves David
discovery: diet discovery.prepush from non-discovery code...
r15932 for h in dhs:
Mads Kiilerich
discovery: make note messages for new heads more readable
r20051 repo.ui.note((" %s\n") % short(h))
Pierre-Yves David
discovery: rename 'error' to 'errormsg'...
r26585 if errormsg:
Pierre-Yves David
error: get Abort from 'error' instead of 'util'...
r26587 raise error.Abort(errormsg, hint=hint)
Pierre-Yves David
checkheads: extract obsolete post processing in its own function...
r31586
def _postprocessobsolete(pushop, futurecommon, candidate_newhs):
"""post process the list of new heads with obsolescence information
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 Exists as a sub-function to contain the complexity and allow extensions to
Pierre-Yves David
checkheads: extract obsolete post processing in its own function...
r31586 experiment with smarter logic.
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009
Pierre-Yves David
checkheads: extract obsolete post processing in its own function...
r31586 Returns (newheads, discarded_heads) tuple
"""
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 # known issue
Pierre-Yves David
checkheads: extract obsolete post processing in its own function...
r31586 #
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 # * We "silently" skip processing on all changeset unknown locally
Pierre-Yves David
checkheads: extract obsolete post processing in its own function...
r31586 #
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 # * if <nh> is public on the remote, it won't be affected by obsolete
# marker and a new is created
# define various utilities and containers
Pierre-Yves David
checkheads: extract obsolete post processing in its own function...
r31586 repo = pushop.repo
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 unfi = repo.unfiltered()
tonode = unfi.changelog.node
checkheads: use 'nodemap.get' to convert nodes to revs...
r32790 torev = unfi.changelog.nodemap.get
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 public = phases.public
getphase = unfi._phasecache.phase
ispublic = (lambda r: getphase(unfi, r) == public)
checkheads: use "revnum" in the "allfuturecommon" set...
r32791 ispushed = (lambda n: torev(n) in futurecommon)
checkheads: pass "ispushed" function to the obsmarkers logic...
r32789 hasoutmarker = functools.partial(pushingmarkerfor, unfi.obsstore, ispushed)
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 successorsmarkers = unfi.obsstore.successors
newhs = set() # final set of new heads
discarded = set() # new head of fully replaced branch
localcandidate = set() # candidate heads known locally
unknownheads = set() # candidate heads unknown locally
for h in candidate_newhs:
if h in unfi:
localcandidate.add(h)
else:
if successorsmarkers.get(h) is not None:
msg = ('checkheads: remote head unknown locally has'
' local marker: %s\n')
repo.ui.debug(msg % hex(h))
unknownheads.add(h)
# fast path the simple case
if len(localcandidate) == 1:
return unknownheads | set(candidate_newhs), set()
# actually process branch replacement
while localcandidate:
nh = localcandidate.pop()
# run this check early to skip the evaluation of the whole branch
checkheads: use "revnum" in the "allfuturecommon" set...
r32791 if (torev(nh) in futurecommon or ispublic(torev(nh))):
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 newhs.add(nh)
continue
# Get all revs/nodes on the branch exclusive to this head
# (already filtered heads are "ignored"))
branchrevs = unfi.revs('only(%n, (%ln+%ln))',
nh, localcandidate, newhs)
branchnodes = [tonode(r) for r in branchrevs]
# The branch won't be hidden on the remote if
# * any part of it is public,
# * any part of it is considered part of the result by previous logic,
# * if we have no markers to push to obsolete it.
if (any(ispublic(r) for r in branchrevs)
checkheads: use "revnum" in the "allfuturecommon" set...
r32791 or any(torev(n) in futurecommon for n in branchnodes)
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 or any(not hasoutmarker(n) for n in branchnodes)):
Pierre-Yves David
checkheads: extract obsolete post processing in its own function...
r31586 newhs.add(nh)
else:
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 # note: there is a corner case if there is a merge in the branch.
# we might end up with -more- heads. However, these heads are not
# "added" by the push, but more by the "removal" on the remote so I
# think is a okay to ignore them,
discarded.add(nh)
newhs |= unknownheads
Pierre-Yves David
checkheads: extract obsolete post processing in its own function...
r31586 return newhs, discarded
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009
checkheads: pass "ispushed" function to the obsmarkers logic...
r32789 def pushingmarkerfor(obsstore, ispushed, node):
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 """true if some markers are to be pushed for node
We cannot just look in to the pushed obsmarkers from the pushop because
discovery might have filtered relevant markers. In addition listing all
markers relevant to all changesets in the pushed set would be too expensive
(O(len(repo)))
(note: There are cache opportunity in this function. but it would requires
a two dimensional stack.)
"""
successorsmarkers = obsstore.successors
stack = [node]
seen = set(stack)
while stack:
current = stack.pop()
checkheads: pass "ispushed" function to the obsmarkers logic...
r32789 if ispushed(current):
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 return True
markers = successorsmarkers.get(current, ())
# markers fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
for m in markers:
nexts = m[1] # successors
if not nexts: # this is a prune marker
Yuya Nishihara
discovery: prevent crash caused by prune marker having no parent data...
r32096 nexts = m[5] or () # parents
Pierre-Yves David
checkheads: upgrade the obsolescence postprocessing logic (issue4354)...
r32009 for n in nexts:
if n not in seen:
seen.add(n)
stack.append(n)
return False