##// END OF EJS Templates
localrepo: add unbundle support...
localrepo: add unbundle support Localrepo now supports the unbundle method of pushing changegroups. We plan to use the unbundle call for bundle2 so it is important that all peers supports it. The `peer.unbundle` and `peer.addchangegroup` code path have small difference so cause some test output changes. None of those changes seems problematic.

File last commit:

r20969:7a679918 default
r20969:7a679918 default
Show More
exchange.py
649 lines | 25.3 KiB | text/x-python | PythonLexer
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 # exchange.py - utily to exchange data between repo.
#
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
Pierre-Yves David
unbundle: extract the core logic in another function...
r20968 import sys
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 from i18n import _
Pierre-Yves David
exchange: extract pull function from localrepo...
r20469 from node import hex, nullid
Pierre-Yves David
bundle2: allow pulling changegroups using bundle2...
r20955 import cStringIO
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 import errno
Pierre-Yves David
pull: move obsolescence marker exchange in the exchange module...
r20476 import util, scmutil, changegroup, base85
Pierre-Yves David
bundle2: allow pulling changegroups using bundle2...
r20955 import discovery, phases, obsolete, bookmarks, bundle2
Pierre-Yves David
exchange: extract push function from localrepo...
r20345
Pierre-Yves David
push: introduce a pushoperation object...
r20346
class pushoperation(object):
"""A object that represent a single push operation
It purpose is to carry push related state and very common operation.
A new should be created at the begining of each push and discarded
afterward.
"""
Pierre-Yves David
push: move `newbranch` argument into the push object...
r20351 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
Pierre-Yves David
push: introduce a pushoperation object...
r20346 # repo we push from
self.repo = repo
Pierre-Yves David
push: ease access to current ui object...
r20347 self.ui = repo.ui
Pierre-Yves David
push: move `remote` argument in the push object...
r20348 # repo we push to
self.remote = remote
Pierre-Yves David
push: move `force` argument into the push object...
r20349 # force option provided
self.force = force
Pierre-Yves David
push: move `revs` argument into the push object...
r20350 # revs to be pushed (None is "all")
self.revs = revs
Pierre-Yves David
push: move `newbranch` argument into the push object...
r20351 # allow push of new branch
self.newbranch = newbranch
Pierre-Yves David
push: move local lock logic in pushoperation...
r20436 # did a local lock get acquired?
self.locallocked = None
Pierre-Yves David
push: move push return value in the push object...
r20439 # Integer version of the push result
# - None means nothing to push
# - 0 means HTTP error
# - 1 means we pushed and remote head count is unchanged *or*
# we have outgoing changesets but refused to push
# - other values as described by addchangegroup()
self.ret = None
Pierre-Yves David
push: move outgoing object in the push object...
r20440 # discover.outgoing object (contains common and outgoin data)
self.outgoing = None
Pierre-Yves David
push: move `remoteheads` into the push object...
r20462 # all remote heads before the push
self.remoteheads = None
Pierre-Yves David
push: move `incoming` into the push object...
r20464 # testable as a boolean indicating if any nodes are missing locally.
self.incoming = None
Pierre-Yves David
push: move `commonheads` into the push object...
r20467 # set of all heads common after changeset bundle push
self.commonheads = None
Pierre-Yves David
push: introduce a pushoperation object...
r20346
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 def push(repo, remote, force=False, revs=None, newbranch=False):
'''Push outgoing changesets (limited by revs) from a local
repository to remote. Return an integer:
- None means nothing to push
- 0 means HTTP error
- 1 means we pushed and remote head count is unchanged *or*
we have outgoing changesets but refused to push
- other values as described by addchangegroup()
'''
Pierre-Yves David
push: move `newbranch` argument into the push object...
r20351 pushop = pushoperation(repo, remote, force, revs, newbranch)
Pierre-Yves David
push: move `remote` argument in the push object...
r20348 if pushop.remote.local():
missing = (set(pushop.repo.requirements)
- pushop.remote.local().supported)
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 if missing:
msg = _("required features are not"
" supported in the destination:"
" %s") % (', '.join(sorted(missing)))
raise util.Abort(msg)
# there are two ways to push to remote repo:
#
# addchangegroup assumes local user can lock remote
# repo (local filesystem, old ssh servers).
#
# unbundle assumes local user cannot lock remote repo (new ssh
# servers, http servers).
Pierre-Yves David
push: move `remote` argument in the push object...
r20348 if not pushop.remote.canpush():
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 raise util.Abort(_("destination does not support push"))
# get local lock as we might write phase data
locallock = None
try:
Pierre-Yves David
push: introduce a pushoperation object...
r20346 locallock = pushop.repo.lock()
Pierre-Yves David
push: move local lock logic in pushoperation...
r20436 pushop.locallocked = True
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 except IOError, err:
Pierre-Yves David
push: move local lock logic in pushoperation...
r20436 pushop.locallocked = False
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 if err.errno != errno.EACCES:
raise
# source repo cannot be locked.
# We do not abort the push, but just disable the local phase
# synchronisation.
msg = 'cannot lock source repository: %s\n' % err
Pierre-Yves David
push: ease access to current ui object...
r20347 pushop.ui.debug(msg)
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 try:
Pierre-Yves David
push: pass a `pushoperation` object to localrepo.checkpush...
r20924 pushop.repo.checkpush(pushop)
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 lock = None
Pierre-Yves David
push: move `remote` argument in the push object...
r20348 unbundle = pushop.remote.capable('unbundle')
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 if not unbundle:
Pierre-Yves David
push: move `remote` argument in the push object...
r20348 lock = pushop.remote.lock()
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 try:
Pierre-Yves David
push: move discovery in its own function...
r20466 _pushdiscovery(pushop)
Pierre-Yves David
push: move outgoing check logic in its own function...
r20465 if _pushcheckoutgoing(pushop):
Pierre-Yves David
push: move changeset push logic in its own function...
r20463 _pushchangeset(pushop)
Pierre-Yves David
push: extract new common set computation from phase synchronisation...
r20468 _pushcomputecommonheads(pushop)
Pierre-Yves David
push: move phases synchronisation function in its own function...
r20441 _pushsyncphase(pushop)
Pierre-Yves David
push: feed pushoperation object to _pushobsolete function...
r20433 _pushobsolete(pushop)
Pierre-Yves David
exchange: extract push function from localrepo...
r20345 finally:
if lock is not None:
lock.release()
finally:
if locallock is not None:
locallock.release()
Pierre-Yves David
push: feed pushoperation object to _pushbookmark function...
r20431 _pushbookmark(pushop)
Pierre-Yves David
push: move push return value in the push object...
r20439 return pushop.ret
Pierre-Yves David
push: move bookmarks exchange in the exchange module...
r20352
Pierre-Yves David
push: move discovery in its own function...
r20466 def _pushdiscovery(pushop):
# discovery
unfi = pushop.repo.unfiltered()
fci = discovery.findcommonincoming
commoninc = fci(unfi, pushop.remote, force=pushop.force)
common, inc, remoteheads = commoninc
fco = discovery.findcommonoutgoing
outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
commoninc=commoninc, force=pushop.force)
pushop.outgoing = outgoing
pushop.remoteheads = remoteheads
pushop.incoming = inc
Pierre-Yves David
push: move outgoing check logic in its own function...
r20465 def _pushcheckoutgoing(pushop):
outgoing = pushop.outgoing
unfi = pushop.repo.unfiltered()
if not outgoing.missing:
# nothing to push
scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
return False
# something to push
if not pushop.force:
# if repo.obsstore == False --> no obsolete
# then, save the iteration
if unfi.obsstore:
# this message are here for 80 char limit reason
mso = _("push includes obsolete changeset: %s!")
mst = "push includes %s changeset: %s!"
# plain versions for i18n tool to detect them
_("push includes unstable changeset: %s!")
_("push includes bumped changeset: %s!")
_("push includes divergent changeset: %s!")
# If we are to push if there is at least one
# obsolete or unstable changeset in missing, at
# least one of the missinghead will be obsolete or
# unstable. So checking heads only is ok
for node in outgoing.missingheads:
ctx = unfi[node]
if ctx.obsolete():
raise util.Abort(mso % ctx)
elif ctx.troubled():
raise util.Abort(_(mst)
% (ctx.troubles()[0],
ctx))
newbm = pushop.ui.configlist('bookmarks', 'pushing')
discovery.checkheads(unfi, pushop.remote, outgoing,
pushop.remoteheads,
pushop.newbranch,
bool(pushop.incoming),
newbm)
return True
Pierre-Yves David
push: move changeset push logic in its own function...
r20463 def _pushchangeset(pushop):
"""Make the actual push of changeset bundle to remote repo"""
outgoing = pushop.outgoing
unbundle = pushop.remote.capable('unbundle')
# TODO: get bundlecaps from remote
bundlecaps = None
# create a changegroup from local
if pushop.revs is None and not (outgoing.excluded
or pushop.repo.changelog.filteredrevs):
# push everything,
# use the fast path, no race possible on push
bundler = changegroup.bundle10(pushop.repo, bundlecaps)
Pierre-Yves David
localrepo: move the _changegroupsubset method in changegroup module...
r20925 cg = changegroup.getsubset(pushop.repo,
outgoing,
bundler,
'push',
fastpath=True)
Pierre-Yves David
push: move changeset push logic in its own function...
r20463 else:
Pierre-Yves David
localrepo: move the getlocalbundle method in changegroup module...
r20928 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
bundlecaps)
Pierre-Yves David
push: move changeset push logic in its own function...
r20463
# apply changegroup to remote
if unbundle:
# local repo finds heads on server, finds out what
# revs it must push. once revs transferred, if server
# finds it has different heads (someone else won
# commit/push race), server aborts.
if pushop.force:
remoteheads = ['force']
else:
remoteheads = pushop.remoteheads
# ssh: return remote's addchangegroup()
# http: return remote's addchangegroup() or 0 for error
pushop.ret = pushop.remote.unbundle(cg, remoteheads,
'push')
else:
# we return an integer indicating remote head count
# change
pushop.ret = pushop.remote.addchangegroup(cg, 'push',
pushop.repo.url())
Pierre-Yves David
push: extract new common set computation from phase synchronisation...
r20468 def _pushcomputecommonheads(pushop):
unfi = pushop.repo.unfiltered()
if pushop.ret:
# push succeed, synchronize target of the push
cheads = pushop.outgoing.missingheads
elif pushop.revs is None:
# All out push fails. synchronize all common
cheads = pushop.outgoing.commonheads
else:
# I want cheads = heads(::missingheads and ::commonheads)
# (missingheads is revs with secret changeset filtered out)
#
# This can be expressed as:
# cheads = ( (missingheads and ::commonheads)
# + (commonheads and ::missingheads))"
# )
#
# while trying to push we already computed the following:
# common = (::commonheads)
# missing = ((commonheads::missingheads) - commonheads)
#
# We can pick:
# * missingheads part of common (::commonheads)
common = set(pushop.outgoing.common)
nm = pushop.repo.changelog.nodemap
cheads = [node for node in pushop.revs if nm[node] in common]
# and
# * commonheads parents on missing
revset = unfi.set('%ln and parents(roots(%ln))',
pushop.outgoing.commonheads,
pushop.outgoing.missing)
cheads.extend(c.node() for c in revset)
pushop.commonheads = cheads
Pierre-Yves David
push: move phases synchronisation function in its own function...
r20441 def _pushsyncphase(pushop):
"""synchronise phase information locally and remotly"""
unfi = pushop.repo.unfiltered()
Pierre-Yves David
push: extract new common set computation from phase synchronisation...
r20468 cheads = pushop.commonheads
Pierre-Yves David
push: move phases synchronisation function in its own function...
r20441 if pushop.ret:
# push succeed, synchronize target of the push
cheads = pushop.outgoing.missingheads
elif pushop.revs is None:
# All out push fails. synchronize all common
cheads = pushop.outgoing.commonheads
else:
# I want cheads = heads(::missingheads and ::commonheads)
# (missingheads is revs with secret changeset filtered out)
#
# This can be expressed as:
# cheads = ( (missingheads and ::commonheads)
# + (commonheads and ::missingheads))"
# )
#
# while trying to push we already computed the following:
# common = (::commonheads)
# missing = ((commonheads::missingheads) - commonheads)
#
# We can pick:
# * missingheads part of common (::commonheads)
common = set(pushop.outgoing.common)
nm = pushop.repo.changelog.nodemap
cheads = [node for node in pushop.revs if nm[node] in common]
# and
# * commonheads parents on missing
revset = unfi.set('%ln and parents(roots(%ln))',
pushop.outgoing.commonheads,
pushop.outgoing.missing)
cheads.extend(c.node() for c in revset)
Pierre-Yves David
push: move `commonheads` into the push object...
r20467 pushop.commonheads = cheads
Pierre-Yves David
push: move phases synchronisation function in its own function...
r20441 # even when we don't push, exchanging phase data is useful
remotephases = pushop.remote.listkeys('phases')
if (pushop.ui.configbool('ui', '_usedassubrepo', False)
and remotephases # server supports phases
and pushop.ret is None # nothing was pushed
and remotephases.get('publishing', False)):
# When:
# - this is a subrepo push
# - and remote support phase
# - and no changeset was pushed
# - and remote is publishing
# We may be in issue 3871 case!
# We drop the possible phase synchronisation done by
# courtesy to publish changesets possibly locally draft
# on the remote.
remotephases = {'publishing': 'True'}
if not remotephases: # old server or public only rer
_localphasemove(pushop, cheads)
# don't push any phase data as there is nothing to push
else:
ana = phases.analyzeremotephases(pushop.repo, cheads,
remotephases)
pheads, droots = ana
### Apply remote phase on local
if remotephases.get('publishing', False):
_localphasemove(pushop, cheads)
else: # publish = False
_localphasemove(pushop, pheads)
_localphasemove(pushop, cheads, phases.draft)
### Apply local phase on remote
# Get the list of all revs draft on remote by public here.
# XXX Beware that revset break if droots is not strictly
# XXX root we may want to ensure it is but it is costly
outdated = unfi.set('heads((%ln::%ln) and public())',
droots, cheads)
for newremotehead in outdated:
r = pushop.remote.pushkey('phases',
newremotehead.hex(),
str(phases.draft),
str(phases.public))
if not r:
pushop.ui.warn(_('updating %s to public failed!\n')
% newremotehead)
Pierre-Yves David
push: move local phase move in a normal function...
r20438 def _localphasemove(pushop, nodes, phase=phases.public):
"""move <nodes> to <phase> in the local source repo"""
if pushop.locallocked:
phases.advanceboundary(pushop.repo, phase, nodes)
else:
# repo is not locked, do not change any phases!
# Informs the user that phases should have been moved when
# applicable.
actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
phasestr = phases.phasenames[phase]
if actualmoves:
pushop.ui.status(_('cannot lock source repo, skipping '
'local %s phase update\n') % phasestr)
Pierre-Yves David
push: feed pushoperation object to _pushobsolete function...
r20433 def _pushobsolete(pushop):
Pierre-Yves David
push: drop now outdated comment...
r20434 """utility function to push obsolete markers to a remote"""
Pierre-Yves David
push: move obsolescence related message into _pushobsolescence function...
r20435 pushop.ui.debug('try to push obsolete markers to remote\n')
Pierre-Yves David
push: feed pushoperation object to _pushobsolete function...
r20433 repo = pushop.repo
remote = pushop.remote
Pierre-Yves David
push: move obsolescence marker exchange in the exchange module...
r20432 if (obsolete._enabled and repo.obsstore and
'obsolete' in remote.listkeys('namespaces')):
rslts = []
remotedata = repo.listkeys('obsolete')
for key in sorted(remotedata, reverse=True):
# reverse sort to ensure we end with dump0
data = remotedata[key]
rslts.append(remote.pushkey('obsolete', key, '', data))
if [r for r in rslts if not r]:
msg = _('failed to push some obsolete markers!\n')
repo.ui.warn(msg)
Pierre-Yves David
push: feed pushoperation object to _pushbookmark function...
r20431 def _pushbookmark(pushop):
Pierre-Yves David
push: move bookmarks exchange in the exchange module...
r20352 """Update bookmark position on remote"""
Pierre-Yves David
push: feed pushoperation object to _pushbookmark function...
r20431 ui = pushop.ui
repo = pushop.repo.unfiltered()
remote = pushop.remote
Pierre-Yves David
push: move bookmarks exchange in the exchange module...
r20352 ui.debug("checking for updated bookmarks\n")
Pierre-Yves David
push: feed pushoperation object to _pushbookmark function...
r20431 revnums = map(repo.changelog.rev, pushop.revs or [])
Pierre-Yves David
push: move bookmarks exchange in the exchange module...
r20352 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
(addsrc, adddst, advsrc, advdst, diverge, differ, invalid
) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
srchex=hex)
for b, scid, dcid in advsrc:
if ancestors and repo[scid].rev() not in ancestors:
continue
if remote.pushkey('bookmarks', b, dcid, scid):
ui.status(_("updating bookmark %s\n") % b)
else:
ui.warn(_('updating bookmark %s failed!\n') % b)
Pierre-Yves David
exchange: extract pull function from localrepo...
r20469
Pierre-Yves David
pull: introduce a pulloperation object...
r20472 class pulloperation(object):
"""A object that represent a single pull operation
It purpose is to carry push related state and very common operation.
Siddharth Agarwal
exchange: fix docs for pulloperation...
r20596 A new should be created at the begining of each pull and discarded
Pierre-Yves David
pull: introduce a pulloperation object...
r20472 afterward.
"""
Pierre-Yves David
pull: move `force` argument into pull object...
r20475 def __init__(self, repo, remote, heads=None, force=False):
Siddharth Agarwal
exchange: fix docs for pulloperation...
r20596 # repo we pull into
Pierre-Yves David
pull: introduce a pulloperation object...
r20472 self.repo = repo
Siddharth Agarwal
exchange: fix docs for pulloperation...
r20596 # repo we pull from
Pierre-Yves David
pull: move `remote` argument into pull object...
r20473 self.remote = remote
Pierre-Yves David
pull: move `heads` argument into pull object...
r20474 # revision we try to pull (None is "all")
self.heads = heads
Pierre-Yves David
pull: move `force` argument into pull object...
r20475 # do we force pull?
self.force = force
Pierre-Yves David
pull: move transaction logic into the pull object...
r20477 # the name the pull transaction
self._trname = 'pull\n' + util.hidepassword(remote.url())
# hold the transaction once created
self._tr = None
Pierre-Yves David
pull: make pulled subset a propertycache of the pull object...
r20487 # set of common changeset between local and remote before pull
self.common = None
# set of pulled head
self.rheads = None
Pierre-Yves David
pull: move `fetch` subset into the object...
r20488 # list of missing changeset to fetch remotly
self.fetch = None
Pierre-Yves David
pull: move return code in the pull operation object...
r20898 # result of changegroup pulling (used as returng code by pull)
self.cgresult = None
Pierre-Yves David
pull: add a set of steps that remain to be done during the pull...
r20901 # list of step remaining todo (related to future bundle2 usage)
self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
Pierre-Yves David
pull: make pulled subset a propertycache of the pull object...
r20487
@util.propertycache
def pulledsubset(self):
"""heads of the set of changeset target by the pull"""
# compute target subset
if self.heads is None:
# We pulled every thing possible
# sync on everything common
Pierre-Yves David
pull: prevent duplicated entry in `op.pulledsubset`...
r20878 c = set(self.common)
ret = list(self.common)
for n in self.rheads:
if n not in c:
ret.append(n)
return ret
Pierre-Yves David
pull: make pulled subset a propertycache of the pull object...
r20487 else:
# We pulled a specific subset
# sync on this subset
return self.heads
Pierre-Yves David
pull: move transaction logic into the pull object...
r20477
def gettransaction(self):
"""get appropriate pull transaction, creating it if needed"""
if self._tr is None:
self._tr = self.repo.transaction(self._trname)
return self._tr
def closetransaction(self):
"""close transaction if created"""
if self._tr is not None:
self._tr.close()
def releasetransaction(self):
"""release transaction if created"""
if self._tr is not None:
self._tr.release()
Pierre-Yves David
exchange: extract pull function from localrepo...
r20469
def pull(repo, remote, heads=None, force=False):
Pierre-Yves David
pull: move obsolescence marker exchange in the exchange module...
r20476 pullop = pulloperation(repo, remote, heads, force)
Pierre-Yves David
pull: move `remote` argument into pull object...
r20473 if pullop.remote.local():
missing = set(pullop.remote.requirements) - pullop.repo.supported
Pierre-Yves David
exchange: extract pull function from localrepo...
r20469 if missing:
msg = _("required features are not"
" supported in the destination:"
" %s") % (', '.join(sorted(missing)))
raise util.Abort(msg)
Pierre-Yves David
pull: introduce a pulloperation object...
r20472 lock = pullop.repo.lock()
Pierre-Yves David
exchange: extract pull function from localrepo...
r20469 try:
Pierre-Yves David
pull: put discovery step in its own function...
r20900 _pulldiscovery(pullop)
Pierre-Yves David
bundle2: allow pulling changegroups using bundle2...
r20955 if pullop.remote.capable('bundle2'):
_pullbundle2(pullop)
Pierre-Yves David
pull: add a set of steps that remain to be done during the pull...
r20901 if 'changegroup' in pullop.todosteps:
_pullchangeset(pullop)
if 'phases' in pullop.todosteps:
_pullphase(pullop)
if 'obsmarkers' in pullop.todosteps:
_pullobsolete(pullop)
Pierre-Yves David
pull: move transaction logic into the pull object...
r20477 pullop.closetransaction()
Pierre-Yves David
exchange: extract pull function from localrepo...
r20469 finally:
Pierre-Yves David
pull: move transaction logic into the pull object...
r20477 pullop.releasetransaction()
Pierre-Yves David
exchange: extract pull function from localrepo...
r20469 lock.release()
Pierre-Yves David
pull: move return code in the pull operation object...
r20898 return pullop.cgresult
Pierre-Yves David
pull: move obsolescence marker exchange in the exchange module...
r20476
Pierre-Yves David
pull: put discovery step in its own function...
r20900 def _pulldiscovery(pullop):
"""discovery phase for the pull
Current handle changeset discovery only, will change handle all discovery
at some point."""
tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
pullop.remote,
heads=pullop.heads,
force=pullop.force)
pullop.common, pullop.fetch, pullop.rheads = tmp
Pierre-Yves David
bundle2: allow pulling changegroups using bundle2...
r20955 def _pullbundle2(pullop):
"""pull data using bundle2
For now, the only supported data are changegroup."""
kwargs = {'bundlecaps': set(['HG20'])}
# pulling changegroup
pullop.todosteps.remove('changegroup')
if not pullop.fetch:
pullop.repo.ui.status(_("no changes found\n"))
pullop.cgresult = 0
else:
kwargs['common'] = pullop.common
kwargs['heads'] = pullop.heads or pullop.rheads
if pullop.heads is None and list(pullop.common) == [nullid]:
pullop.repo.ui.status(_("requesting all changes\n"))
if kwargs.keys() == ['format']:
return # nothing to pull
bundle = pullop.remote.getbundle('pull', **kwargs)
try:
op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
except KeyError, exc:
raise util.Abort('missing support for %s' % exc)
assert len(op.records['changegroup']) == 1
pullop.cgresult = op.records['changegroup'][0]['return']
Pierre-Yves David
pull: move changeset pulling in its own function...
r20489 def _pullchangeset(pullop):
"""pull changeset from unbundle into the local repo"""
# We delay the open of the transaction as late as possible so we
# don't open transaction for nothing or you break future useful
# rollback call
Pierre-Yves David
pull: add a set of steps that remain to be done during the pull...
r20901 pullop.todosteps.remove('changegroup')
Pierre-Yves David
pull: move the cgresult logic in _pullchangeset...
r20899 if not pullop.fetch:
pullop.repo.ui.status(_("no changes found\n"))
pullop.cgresult = 0
return
Pierre-Yves David
pull: move changeset pulling in its own function...
r20489 pullop.gettransaction()
if pullop.heads is None and list(pullop.common) == [nullid]:
pullop.repo.ui.status(_("requesting all changes\n"))
elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
# issue1320, avoid a race if remote changed after discovery
pullop.heads = pullop.rheads
if pullop.remote.capable('getbundle'):
# TODO: get bundlecaps from remote
cg = pullop.remote.getbundle('pull', common=pullop.common,
heads=pullop.heads or pullop.rheads)
elif pullop.heads is None:
cg = pullop.remote.changegroup(pullop.fetch, 'pull')
elif not pullop.remote.capable('changegroupsubset'):
raise util.Abort(_("partial pull cannot be done because "
"other repository doesn't support "
"changegroupsubset."))
else:
cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
Pierre-Yves David
localrepo: move the addchangegroup method in changegroup module...
r20933 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
Pierre-Yves David
pull: move the cgresult logic in _pullchangeset...
r20899 pullop.remote.url())
Pierre-Yves David
pull: move changeset pulling in its own function...
r20489
Pierre-Yves David
pull: move phases synchronisation in its own function...
r20486 def _pullphase(pullop):
# Get remote phases data from remote
Pierre-Yves David
pull: add a set of steps that remain to be done during the pull...
r20901 pullop.todosteps.remove('phases')
Pierre-Yves David
pull: move phases synchronisation in its own function...
r20486 remotephases = pullop.remote.listkeys('phases')
publishing = bool(remotephases.get('publishing', False))
if remotephases and not publishing:
# remote is new and unpublishing
pheads, _dr = phases.analyzeremotephases(pullop.repo,
pullop.pulledsubset,
remotephases)
phases.advanceboundary(pullop.repo, phases.public, pheads)
phases.advanceboundary(pullop.repo, phases.draft,
pullop.pulledsubset)
else:
# Remote is old or publishing all common changesets
# should be seen as public
phases.advanceboundary(pullop.repo, phases.public,
pullop.pulledsubset)
Pierre-Yves David
push: feed pulloperation object to _pullobsolete function...
r20478 def _pullobsolete(pullop):
Pierre-Yves David
pull: move obsolescence marker exchange in the exchange module...
r20476 """utility function to pull obsolete markers from a remote
The `gettransaction` is function that return the pull transaction, creating
one if necessary. We return the transaction to inform the calling code that
a new transaction have been created (when applicable).
Exists mostly to allow overriding for experimentation purpose"""
Pierre-Yves David
pull: add a set of steps that remain to be done during the pull...
r20901 pullop.todosteps.remove('obsmarkers')
Pierre-Yves David
pull: move obsolescence marker exchange in the exchange module...
r20476 tr = None
if obsolete._enabled:
Pierre-Yves David
push: feed pulloperation object to _pullobsolete function...
r20478 pullop.repo.ui.debug('fetching remote obsolete markers\n')
remoteobs = pullop.remote.listkeys('obsolete')
Pierre-Yves David
pull: move obsolescence marker exchange in the exchange module...
r20476 if 'dump0' in remoteobs:
Pierre-Yves David
push: feed pulloperation object to _pullobsolete function...
r20478 tr = pullop.gettransaction()
Pierre-Yves David
pull: move obsolescence marker exchange in the exchange module...
r20476 for key in sorted(remoteobs, reverse=True):
if key.startswith('dump'):
data = base85.b85decode(remoteobs[key])
Pierre-Yves David
push: feed pulloperation object to _pullobsolete function...
r20478 pullop.repo.obsstore.mergemarkers(tr, data)
pullop.repo.invalidatevolatilesets()
Pierre-Yves David
pull: move obsolescence marker exchange in the exchange module...
r20476 return tr
Pierre-Yves David
bundle2: add an exchange.getbundle function...
r20954 def getbundle(repo, source, heads=None, common=None, bundlecaps=None):
"""return a full bundle (with potentially multiple kind of parts)
Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
passed. For now, the bundle can contain only changegroup, but this will
changes when more part type will be available for bundle2.
This is different from changegroup.getbundle that only returns an HG10
changegroup bundle. They may eventually get reunited in the future when we
have a clearer idea of the API we what to query different data.
The implementation is at a very early stage and will get massive rework
when the API of bundle is refined.
"""
# build bundle here.
cg = changegroup.getbundle(repo, source, heads=heads,
Durham Goode
exchange: pass bundlecaps through to changegroup...
r20956 common=common, bundlecaps=bundlecaps)
Pierre-Yves David
bundle2: add an exchange.getbundle function...
r20954 if bundlecaps is None or 'HG20' not in bundlecaps:
return cg
# very crude first implementation,
# the bundle API will change and the generation will be done lazily.
bundler = bundle2.bundle20(repo.ui)
tempname = changegroup.writebundle(cg, None, 'HG10UN')
data = open(tempname).read()
part = bundle2.part('changegroup', data=data)
bundler.addpart(part)
temp = cStringIO.StringIO()
for c in bundler.getchunks():
temp.write(c)
temp.seek(0)
return bundle2.unbundle20(repo.ui, temp)
Pierre-Yves David
unbundle: extract checkheads in its own function...
r20967
class PushRaced(RuntimeError):
"""An exception raised during unbunding that indicate a push race"""
def check_heads(repo, their_heads, context):
"""check if the heads of a repo have been modified
Used by peer for unbundling.
"""
heads = repo.heads()
heads_hash = util.sha1(''.join(sorted(heads))).digest()
if not (their_heads == ['force'] or their_heads == heads or
their_heads == ['hashed', heads_hash]):
# someone else committed/pushed/unbundled while we
# were transferring data
raise PushRaced('repository changed while %s - '
'please try again' % context)
Pierre-Yves David
unbundle: extract the core logic in another function...
r20968
def unbundle(repo, cg, heads, source, url):
"""Apply a bundle to a repo.
this function makes sure the repo is locked during the application and have
mechanism to check that no push race occured between the creation of the
bundle and its application.
If the push was raced as PushRaced exception is raised."""
r = 0
lock = repo.lock()
try:
check_heads(repo, heads, 'uploading changes')
# push can proceed
Pierre-Yves David
localrepo: add unbundle support...
r20969 r = changegroup.addchangegroup(repo, cg, source, url)
Pierre-Yves David
unbundle: extract the core logic in another function...
r20968 finally:
lock.release()
return r