exchange.py
3108 lines
| 103.2 KiB
| text/x-python
|
PythonLexer
/ mercurial / exchange.py
Mads Kiilerich
|
r21024 | # exchange.py - utility to exchange data between repos. | ||
Pierre-Yves David
|
r20345 | # | ||
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
Gregory Szorc
|
r27523 | from __future__ import absolute_import | ||
Boris Feld
|
r34323 | import collections | ||
Gregory Szorc
|
r27523 | |||
from .i18n import _ | ||||
from .node import ( | ||||
hex, | ||||
nullid, | ||||
Gregory Szorc
|
r38827 | nullrev, | ||
Gregory Szorc
|
r27523 | ) | ||
Augie Fackler
|
r43346 | from .thirdparty import attr | ||
Gregory Szorc
|
r27523 | from . import ( | ||
bookmarks as bookmod, | ||||
bundle2, | ||||
changegroup, | ||||
discovery, | ||||
error, | ||||
Gregory Szorc
|
r39665 | exchangev2, | ||
Gregory Szorc
|
r27523 | lock as lockmod, | ||
Pulkit Goyal
|
r35348 | logexchange, | ||
Gregory Szorc
|
r38826 | narrowspec, | ||
Gregory Szorc
|
r27523 | obsolete, | ||
Denis Laxalde
|
r43858 | obsutil, | ||
Gregory Szorc
|
r27523 | phases, | ||
pushkey, | ||||
Pulkit Goyal
|
r32896 | pycompat, | ||
Gregory Szorc
|
r27523 | scmutil, | ||
sslutil, | ||||
streamclone, | ||||
url as urlmod, | ||||
util, | ||||
Pulkit Goyal
|
r40527 | wireprototypes, | ||
Gregory Szorc
|
r27523 | ) | ||
Augie Fackler
|
r43346 | from .interfaces import repository | ||
Augie Fackler
|
r44517 | from .utils import ( | ||
hashutil, | ||||
stringutil, | ||||
) | ||||
Pierre-Yves David
|
r20345 | |||
timeless
|
r28883 | urlerr = util.urlerr | ||
urlreq = util.urlreq | ||||
Augie Fackler
|
r43347 | _NARROWACL_SECTION = b'narrowacl' | ||
Gregory Szorc
|
r38825 | |||
Gregory Szorc
|
r26640 | # Maps bundle version human names to changegroup versions. | ||
Augie Fackler
|
r43346 | _bundlespeccgversions = { | ||
Augie Fackler
|
r43347 | b'v1': b'01', | ||
b'v2': b'02', | ||||
b'packed1': b's1', | ||||
b'bundle2': b'02', # legacy | ||||
Augie Fackler
|
r43346 | } | ||
Gregory Szorc
|
r26640 | |||
Boris Feld
|
r37182 | # Maps bundle version with content opts to choose which part to bundle | ||
_bundlespeccontentopts = { | ||||
Augie Fackler
|
r43347 | b'v1': { | ||
b'changegroup': True, | ||||
b'cg.version': b'01', | ||||
b'obsolescence': False, | ||||
b'phases': False, | ||||
b'tagsfnodescache': False, | ||||
b'revbranchcache': False, | ||||
Boris Feld
|
r37182 | }, | ||
Augie Fackler
|
r43347 | b'v2': { | ||
b'changegroup': True, | ||||
b'cg.version': b'02', | ||||
b'obsolescence': False, | ||||
b'phases': False, | ||||
b'tagsfnodescache': True, | ||||
b'revbranchcache': True, | ||||
Boris Feld
|
r37182 | }, | ||
Augie Fackler
|
r43347 | b'packed1': {b'cg.version': b's1'}, | ||
Augie Fackler
|
r43346 | } | ||
Augie Fackler
|
r43347 | _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2'] | ||
Augie Fackler
|
r43346 | |||
_bundlespecvariants = { | ||||
Augie Fackler
|
r43347 | b"streamv2": { | ||
b"changegroup": False, | ||||
b"streamv2": True, | ||||
b"tagsfnodescache": False, | ||||
b"revbranchcache": False, | ||||
Boris Feld
|
r37182 | } | ||
} | ||||
Boris Feld
|
r37185 | |||
Gregory Szorc
|
r31473 | # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE. | ||
Augie Fackler
|
r43347 | _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'} | ||
Gregory Szorc
|
r31473 | |||
Augie Fackler
|
r43346 | |||
Boris Feld
|
r37181 | @attr.s | ||
class bundlespec(object): | ||||
compression = attr.ib() | ||||
Joerg Sonnenberger
|
r37786 | wirecompression = attr.ib() | ||
Boris Feld
|
r37181 | version = attr.ib() | ||
Joerg Sonnenberger
|
r37786 | wireversion = attr.ib() | ||
Boris Feld
|
r37181 | params = attr.ib() | ||
Boris Feld
|
r37182 | contentopts = attr.ib() | ||
Boris Feld
|
r37181 | |||
Augie Fackler
|
r43346 | |||
Joerg Sonnenberger
|
r37786 | def parsebundlespec(repo, spec, strict=True): | ||
Gregory Szorc
|
r26640 | """Parse a bundle string specification into parts. | ||
Bundle specifications denote a well-defined bundle/exchange format. | ||||
The content of a given specification should not change over time in | ||||
order to ensure that bundles produced by a newer version of Mercurial are | ||||
readable from an older version. | ||||
The string currently has the form: | ||||
Gregory Szorc
|
r26639 | |||
Gregory Szorc
|
r26759 | <compression>-<type>[;<parameter0>[;<parameter1>]] | ||
Gregory Szorc
|
r26640 | |||
Where <compression> is one of the supported compression formats | ||||
Gregory Szorc
|
r26759 | and <type> is (currently) a version string. A ";" can follow the type and | ||
Mads Kiilerich
|
r30332 | all text afterwards is interpreted as URI encoded, ";" delimited key=value | ||
Gregory Szorc
|
r26759 | pairs. | ||
Gregory Szorc
|
r26639 | |||
Gregory Szorc
|
r26640 | If ``strict`` is True (the default) <compression> is required. Otherwise, | ||
it is optional. | ||||
Gregory Szorc
|
r26639 | |||
Boris Feld
|
r37181 | Returns a bundlespec object of (compression, version, parameters). | ||
Compression will be ``None`` if not in strict mode and a compression isn't | ||||
defined. | ||||
Gregory Szorc
|
r26639 | |||
Gregory Szorc
|
r26640 | An ``InvalidBundleSpecification`` is raised when the specification is | ||
not syntactically well formed. | ||||
An ``UnsupportedBundleSpecification`` is raised when the compression or | ||||
bundle type/version is not recognized. | ||||
Gregory Szorc
|
r26639 | |||
Gregory Szorc
|
r26640 | Note: this function will likely eventually return a more complex data | ||
structure, including bundle2 part information. | ||||
Gregory Szorc
|
r26639 | """ | ||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r26759 | def parseparams(s): | ||
Augie Fackler
|
r43347 | if b';' not in s: | ||
Gregory Szorc
|
r26759 | return s, {} | ||
params = {} | ||||
Augie Fackler
|
r43347 | version, paramstr = s.split(b';', 1) | ||
for p in paramstr.split(b';'): | ||||
if b'=' not in p: | ||||
Gregory Szorc
|
r26759 | raise error.InvalidBundleSpecification( | ||
Augie Fackler
|
r43346 | _( | ||
Augie Fackler
|
r43347 | b'invalid bundle specification: ' | ||
b'missing "=" in parameter: %s' | ||||
Augie Fackler
|
r43346 | ) | ||
% p | ||||
) | ||||
Gregory Szorc
|
r26759 | |||
Augie Fackler
|
r43347 | key, value = p.split(b'=', 1) | ||
timeless
|
r28883 | key = urlreq.unquote(key) | ||
value = urlreq.unquote(value) | ||||
Gregory Szorc
|
r26759 | params[key] = value | ||
return version, params | ||||
Augie Fackler
|
r43347 | if strict and b'-' not in spec: | ||
Gregory Szorc
|
r26640 | raise error.InvalidBundleSpecification( | ||
Augie Fackler
|
r43346 | _( | ||
Augie Fackler
|
r43347 | b'invalid bundle specification; ' | ||
b'must be prefixed with compression: %s' | ||||
Augie Fackler
|
r43346 | ) | ||
% spec | ||||
) | ||||
Gregory Szorc
|
r26639 | |||
Augie Fackler
|
r43347 | if b'-' in spec: | ||
compression, version = spec.split(b'-', 1) | ||||
Gregory Szorc
|
r26639 | |||
Gregory Szorc
|
r30440 | if compression not in util.compengines.supportedbundlenames: | ||
Gregory Szorc
|
r26640 | raise error.UnsupportedBundleSpecification( | ||
Augie Fackler
|
r43347 | _(b'%s compression is not supported') % compression | ||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r26640 | |||
Gregory Szorc
|
r26759 | version, params = parseparams(version) | ||
Gregory Szorc
|
r26640 | if version not in _bundlespeccgversions: | ||
raise error.UnsupportedBundleSpecification( | ||||
Augie Fackler
|
r43347 | _(b'%s is not a recognized bundle version') % version | ||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r26639 | else: | ||
Gregory Szorc
|
r26640 | # Value could be just the compression or just the version, in which | ||
# case some defaults are assumed (but only when not in strict mode). | ||||
assert not strict | ||||
Gregory Szorc
|
r26639 | |||
Gregory Szorc
|
r26759 | spec, params = parseparams(spec) | ||
Gregory Szorc
|
r30440 | if spec in util.compengines.supportedbundlenames: | ||
Gregory Szorc
|
r26640 | compression = spec | ||
Augie Fackler
|
r43347 | version = b'v1' | ||
Gregory Szorc
|
r31474 | # Generaldelta repos require v2. | ||
Augie Fackler
|
r43347 | if b'generaldelta' in repo.requirements: | ||
version = b'v2' | ||||
Gregory Szorc
|
r31474 | # Modern compression engines require v2. | ||
if compression not in _bundlespecv1compengines: | ||||
Augie Fackler
|
r43347 | version = b'v2' | ||
Gregory Szorc
|
r26640 | elif spec in _bundlespeccgversions: | ||
Augie Fackler
|
r43347 | if spec == b'packed1': | ||
compression = b'none' | ||||
Gregory Szorc
|
r26756 | else: | ||
Augie Fackler
|
r43347 | compression = b'bzip2' | ||
Gregory Szorc
|
r26640 | version = spec | ||
else: | ||||
raise error.UnsupportedBundleSpecification( | ||||
Augie Fackler
|
r43347 | _(b'%s is not a recognized bundle specification') % spec | ||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r26639 | |||
Gregory Szorc
|
r31473 | # Bundle version 1 only supports a known set of compression engines. | ||
Augie Fackler
|
r43347 | if version == b'v1' and compression not in _bundlespecv1compengines: | ||
Gregory Szorc
|
r31473 | raise error.UnsupportedBundleSpecification( | ||
Augie Fackler
|
r43347 | _(b'compression engine %s is not supported on v1 bundles') | ||
Augie Fackler
|
r43346 | % compression | ||
) | ||||
Gregory Szorc
|
r31473 | |||
Gregory Szorc
|
r26760 | # The specification for packed1 can optionally declare the data formats | ||
# required to apply it. If we see this metadata, compare against what the | ||||
# repo supports and error if the bundle isn't compatible. | ||||
Augie Fackler
|
r43347 | if version == b'packed1' and b'requirements' in params: | ||
requirements = set(params[b'requirements'].split(b',')) | ||||
Gregory Szorc
|
r26760 | missingreqs = requirements - repo.supportedformats | ||
if missingreqs: | ||||
raise error.UnsupportedBundleSpecification( | ||||
Augie Fackler
|
r43347 | _(b'missing support for repository features: %s') | ||
% b', '.join(sorted(missingreqs)) | ||||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r26760 | |||
Boris Feld
|
r37182 | # Compute contentopts based on the version | ||
contentopts = _bundlespeccontentopts.get(version, {}).copy() | ||||
Boris Feld
|
r37185 | # Process the variants | ||
Augie Fackler
|
r43347 | if b"stream" in params and params[b"stream"] == b"v2": | ||
variant = _bundlespecvariants[b"streamv2"] | ||||
Boris Feld
|
r37185 | contentopts.update(variant) | ||
Joerg Sonnenberger
|
r37786 | engine = util.compengines.forbundlename(compression) | ||
compression, wirecompression = engine.bundletype() | ||||
wireversion = _bundlespeccgversions[version] | ||||
Boris Feld
|
r37181 | |||
Augie Fackler
|
r43346 | return bundlespec( | ||
compression, wirecompression, version, wireversion, params, contentopts | ||||
) | ||||
Gregory Szorc
|
r26639 | |||
Pierre-Yves David
|
r21064 | def readbundle(ui, fh, fname, vfs=None): | ||
Pierre-Yves David
|
r21065 | header = changegroup.readexactly(fh, 4) | ||
Pierre-Yves David
|
r21063 | |||
Pierre-Yves David
|
r21065 | alg = None | ||
Pierre-Yves David
|
r21063 | if not fname: | ||
Augie Fackler
|
r43347 | fname = b"stream" | ||
if not header.startswith(b'HG') and header.startswith(b'\0'): | ||||
Pierre-Yves David
|
r21063 | fh = changegroup.headerlessfixup(fh, header) | ||
Augie Fackler
|
r43347 | header = b"HG10" | ||
alg = b'UN' | ||||
Pierre-Yves David
|
r21063 | elif vfs: | ||
fname = vfs.join(fname) | ||||
Pierre-Yves David
|
r21065 | magic, version = header[0:2], header[2:4] | ||
Pierre-Yves David
|
r21063 | |||
Augie Fackler
|
r43347 | if magic != b'HG': | ||
raise error.Abort(_(b'%s: not a Mercurial bundle') % fname) | ||||
if version == b'10': | ||||
Pierre-Yves David
|
r21065 | if alg is None: | ||
alg = changegroup.readexactly(fh, 2) | ||||
Sune Foldager
|
r22390 | return changegroup.cg1unpacker(fh, alg) | ||
Augie Fackler
|
r43347 | elif version.startswith(b'2'): | ||
Pierre-Yves David
|
r25640 | return bundle2.getunbundler(ui, fh, magicstring=magic + version) | ||
Augie Fackler
|
r43347 | elif version == b'S1': | ||
Gregory Szorc
|
r26756 | return streamclone.streamcloneapplier(fh) | ||
Pierre-Yves David
|
r21065 | else: | ||
Augie Fackler
|
r43347 | raise error.Abort( | ||
_(b'%s: unknown bundle version %s') % (fname, version) | ||||
) | ||||
Pierre-Yves David
|
r21063 | |||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r27883 | def getbundlespec(ui, fh): | ||
"""Infer the bundlespec from a bundle file handle. | ||||
The input file handle is seeked and the original seek position is not | ||||
restored. | ||||
""" | ||||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r27883 | def speccompression(alg): | ||
Gregory Szorc
|
r30440 | try: | ||
return util.compengines.forbundletype(alg).bundletype()[0] | ||||
except KeyError: | ||||
return None | ||||
Gregory Szorc
|
r27883 | |||
b = readbundle(ui, fh, None) | ||||
if isinstance(b, changegroup.cg1unpacker): | ||||
alg = b._type | ||||
Augie Fackler
|
r43347 | if alg == b'_truncatedBZ': | ||
alg = b'BZ' | ||||
Gregory Szorc
|
r27883 | comp = speccompression(alg) | ||
if not comp: | ||||
Augie Fackler
|
r43347 | raise error.Abort(_(b'unknown compression algorithm: %s') % alg) | ||
return b'%s-v1' % comp | ||||
Gregory Szorc
|
r27883 | elif isinstance(b, bundle2.unbundle20): | ||
Augie Fackler
|
r43347 | if b'Compression' in b.params: | ||
comp = speccompression(b.params[b'Compression']) | ||||
Gregory Szorc
|
r27883 | if not comp: | ||
Augie Fackler
|
r43347 | raise error.Abort( | ||
_(b'unknown compression algorithm: %s') % comp | ||||
) | ||||
Gregory Szorc
|
r27883 | else: | ||
Augie Fackler
|
r43347 | comp = b'none' | ||
Gregory Szorc
|
r27883 | |||
version = None | ||||
for part in b.iterparts(): | ||||
Augie Fackler
|
r43347 | if part.type == b'changegroup': | ||
version = part.params[b'version'] | ||||
if version in (b'01', b'02'): | ||||
version = b'v2' | ||||
Gregory Szorc
|
r27883 | else: | ||
Augie Fackler
|
r43346 | raise error.Abort( | ||
_( | ||||
Augie Fackler
|
r43347 | b'changegroup version %s does not have ' | ||
b'a known bundlespec' | ||||
Augie Fackler
|
r43346 | ) | ||
% version, | ||||
Martin von Zweigbergk
|
r43387 | hint=_(b'try upgrading your Mercurial client'), | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r43347 | elif part.type == b'stream2' and version is None: | ||
Boris Feld
|
r37185 | # A stream2 part requires to be part of a v2 bundle | ||
Augie Fackler
|
r43347 | requirements = urlreq.unquote(part.params[b'requirements']) | ||
Boris Feld
|
r37185 | splitted = requirements.split() | ||
params = bundle2._formatrequirementsparams(splitted) | ||||
Augie Fackler
|
r43347 | return b'none-v2;stream=v2;%s' % params | ||
Gregory Szorc
|
r27883 | |||
if not version: | ||||
Augie Fackler
|
r43346 | raise error.Abort( | ||
Martin von Zweigbergk
|
r43387 | _(b'could not identify changegroup version in bundle') | ||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r27883 | |||
Augie Fackler
|
r43347 | return b'%s-%s' % (comp, version) | ||
Gregory Szorc
|
r27883 | elif isinstance(b, streamclone.streamcloneapplier): | ||
requirements = streamclone.readbundle1header(fh)[2] | ||||
Boris Feld
|
r37184 | formatted = bundle2._formatrequirementsparams(requirements) | ||
Augie Fackler
|
r43347 | return b'none-packed1;%s' % formatted | ||
Gregory Szorc
|
r27883 | else: | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b'unknown bundle type: %s') % b) | ||
Gregory Szorc
|
r27883 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r29808 | def _computeoutgoing(repo, heads, common): | ||
"""Computes which revs are outgoing given a set of common | ||||
and a set of heads. | ||||
This is a separate function so extensions can have access to | ||||
the logic. | ||||
Returns a discovery.outgoing object. | ||||
""" | ||||
cl = repo.changelog | ||||
if common: | ||||
hasnode = cl.hasnode | ||||
common = [n for n in common if hasnode(n)] | ||||
else: | ||||
common = [nullid] | ||||
if not heads: | ||||
heads = cl.heads() | ||||
return discovery.outgoing(repo, common, heads) | ||||
Augie Fackler
|
r43346 | |||
r40803 | def _checkpublish(pushop): | |||
repo = pushop.repo | ||||
ui = repo.ui | ||||
Augie Fackler
|
r43347 | behavior = ui.config(b'experimental', b'auto-publish') | ||
if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'): | ||||
r40803 | return | |||
Augie Fackler
|
r43347 | remotephases = listkeys(pushop.remote, b'phases') | ||
if not remotephases.get(b'publishing', False): | ||||
r40803 | return | |||
if pushop.revs is None: | ||||
Augie Fackler
|
r43347 | published = repo.filtered(b'served').revs(b'not public()') | ||
r40803 | else: | |||
Augie Fackler
|
r43347 | published = repo.revs(b'::%ln - public()', pushop.revs) | ||
r40803 | if published: | |||
Augie Fackler
|
r43347 | if behavior == b'warn': | ||
ui.warn( | ||||
_(b'%i changesets about to be published\n') % len(published) | ||||
) | ||||
elif behavior == b'confirm': | ||||
Augie Fackler
|
r43346 | if ui.promptchoice( | ||
Martin von Zweigbergk
|
r43387 | _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No') | ||
Augie Fackler
|
r43346 | % len(published) | ||
): | ||||
Augie Fackler
|
r43347 | raise error.Abort(_(b'user quit')) | ||
elif behavior == b'abort': | ||||
msg = _(b'push would publish %i changesets') % len(published) | ||||
Augie Fackler
|
r43346 | hint = _( | ||
Augie Fackler
|
r43347 | b"use --publish or adjust 'experimental.auto-publish'" | ||
b" config" | ||||
Augie Fackler
|
r43346 | ) | ||
r40803 | raise error.Abort(msg, hint=hint) | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r29682 | def _forcebundle1(op): | ||
"""return true if a pull/push must use bundle1 | ||||
Pierre-Yves David
|
r24650 | |||
Pierre-Yves David
|
r29683 | This function is used to allow testing of the older bundle version""" | ||
ui = op.repo.ui | ||||
Mads Kiilerich
|
r30332 | # The goal is this config is to allow developer to choose the bundle | ||
Pierre-Yves David
|
r29683 | # version used during exchanged. This is especially handy during test. | ||
# Value is a list of bundle version to be picked from, highest version | ||||
# should be used. | ||||
# | ||||
# developer config: devel.legacy.exchange | ||||
Augie Fackler
|
r43347 | exchange = ui.configlist(b'devel', b'legacy.exchange') | ||
forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange | ||||
return forcebundle1 or not op.remote.capable(b'bundle2') | ||||
Pierre-Yves David
|
r24650 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r20346 | class pushoperation(object): | ||
"""A object that represent a single push operation | ||||
Nathan Goldbaum
|
r28456 | Its purpose is to carry push related state and very common operations. | ||
Pierre-Yves David
|
r20346 | |||
Nathan Goldbaum
|
r28456 | A new pushoperation should be created at the beginning of each push and | ||
discarded afterward. | ||||
Pierre-Yves David
|
r20346 | """ | ||
Augie Fackler
|
r43346 | def __init__( | ||
self, | ||||
repo, | ||||
remote, | ||||
force=False, | ||||
revs=None, | ||||
newbranch=False, | ||||
bookmarks=(), | ||||
publish=False, | ||||
pushvars=None, | ||||
): | ||||
Pierre-Yves David
|
r20346 | # repo we push from | ||
self.repo = repo | ||||
Pierre-Yves David
|
r20347 | self.ui = repo.ui | ||
Pierre-Yves David
|
r20348 | # repo we push to | ||
self.remote = remote | ||||
Pierre-Yves David
|
r20349 | # force option provided | ||
self.force = force | ||||
Pierre-Yves David
|
r20350 | # revs to be pushed (None is "all") | ||
self.revs = revs | ||||
Pierre-Yves David
|
r22623 | # bookmark explicitly pushed | ||
self.bookmarks = bookmarks | ||||
Pierre-Yves David
|
r20351 | # allow push of new branch | ||
self.newbranch = newbranch | ||||
Pierre-Yves David
|
r21901 | # step already performed | ||
# (used to check what steps have been already performed through bundle2) | ||||
self.stepsdone = set() | ||||
Pierre-Yves David
|
r22615 | # Integer version of the changegroup push result | ||
Pierre-Yves David
|
r20439 | # - None means nothing to push | ||
# - 0 means HTTP error | ||||
# - 1 means we pushed and remote head count is unchanged *or* | ||||
# we have outgoing changesets but refused to push | ||||
# - other values as described by addchangegroup() | ||||
Pierre-Yves David
|
r22615 | self.cgresult = None | ||
Pierre-Yves David
|
r22624 | # Boolean value for the bookmark push | ||
self.bkresult = None | ||||
Mads Kiilerich
|
r21024 | # discover.outgoing object (contains common and outgoing data) | ||
Pierre-Yves David
|
r20440 | self.outgoing = None | ||
r32709 | # all remote topological heads before the push | |||
Pierre-Yves David
|
r20462 | self.remoteheads = None | ||
r32709 | # Details of the remote branch pre and post push | |||
# | ||||
# mapping: {'branch': ([remoteheads], | ||||
# [newheads], | ||||
# [unsyncedheads], | ||||
# [discardedheads])} | ||||
# - branch: the branch name | ||||
# - remoteheads: the list of remote heads known locally | ||||
# None if the branch is new | ||||
# - newheads: the new remote heads (known locally) with outgoing pushed | ||||
# - unsyncedheads: the list of remote heads unknown locally. | ||||
# - discardedheads: the list of remote heads made obsolete by the push | ||||
self.pushbranchmap = None | ||||
Pierre-Yves David
|
r20464 | # testable as a boolean indicating if any nodes are missing locally. | ||
self.incoming = None | ||||
Boris Feld
|
r34820 | # summary of the remote phase situation | ||
self.remotephases = None | ||||
Pierre-Yves David
|
r22019 | # phases changes that must be pushed along side the changesets | ||
self.outdatedphases = None | ||||
# phases changes that must be pushed if changeset push fails | ||||
self.fallbackoutdatedphases = None | ||||
Pierre-Yves David
|
r22034 | # outgoing obsmarkers | ||
Pierre-Yves David
|
r22035 | self.outobsmarkers = set() | ||
Valentin Gatien-Baron
|
r43194 | # outgoing bookmarks, list of (bm, oldnode | '', newnode | '') | ||
Pierre-Yves David
|
r22239 | self.outbookmarks = [] | ||
Eric Sumner
|
r23437 | # transaction manager | ||
self.trmanager = None | ||||
Pierre-Yves David
|
r25485 | # map { pushkey partid -> callback handling failure} | ||
# used to handle exception from mandatory pushkey part failure | ||||
self.pkfailcb = {} | ||||
Jun Wu
|
r33886 | # an iterable of pushvars or None | ||
self.pushvars = pushvars | ||||
r40722 | # publish pushed changesets | |||
self.publish = publish | ||||
Pierre-Yves David
|
r20346 | |||
Pierre-Yves David
|
r22014 | @util.propertycache | ||
def futureheads(self): | ||||
"""future remote heads if the changeset push succeeds""" | ||||
return self.outgoing.missingheads | ||||
Pierre-Yves David
|
r22015 | @util.propertycache | ||
def fallbackheads(self): | ||||
"""future remote heads if the changeset push fails""" | ||||
if self.revs is None: | ||||
# not target to push, all common are relevant | ||||
return self.outgoing.commonheads | ||||
unfi = self.repo.unfiltered() | ||||
# I want cheads = heads(::missingheads and ::commonheads) | ||||
# (missingheads is revs with secret changeset filtered out) | ||||
# | ||||
# This can be expressed as: | ||||
# cheads = ( (missingheads and ::commonheads) | ||||
# + (commonheads and ::missingheads))" | ||||
# ) | ||||
# | ||||
# while trying to push we already computed the following: | ||||
# common = (::commonheads) | ||||
# missing = ((commonheads::missingheads) - commonheads) | ||||
# | ||||
# We can pick: | ||||
# * missingheads part of common (::commonheads) | ||||
Durham Goode
|
r26184 | common = self.outgoing.common | ||
r43962 | rev = self.repo.changelog.index.rev | |||
cheads = [node for node in self.revs if rev(node) in common] | ||||
Pierre-Yves David
|
r22015 | # and | ||
# * commonheads parents on missing | ||||
Augie Fackler
|
r43346 | revset = unfi.set( | ||
Augie Fackler
|
r43347 | b'%ln and parents(roots(%ln))', | ||
Augie Fackler
|
r43346 | self.outgoing.commonheads, | ||
self.outgoing.missing, | ||||
) | ||||
Pierre-Yves David
|
r22015 | cheads.extend(c.node() for c in revset) | ||
return cheads | ||||
Pierre-Yves David
|
r22016 | @property | ||
def commonheads(self): | ||||
"""set of all common heads after changeset bundle push""" | ||||
Pierre-Yves David
|
r22615 | if self.cgresult: | ||
Pierre-Yves David
|
r22016 | return self.futureheads | ||
else: | ||||
return self.fallbackheads | ||||
Pierre-Yves David
|
r22015 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22650 | # mapping of message used when pushing bookmark | ||
Augie Fackler
|
r43346 | bookmsgmap = { | ||
Augie Fackler
|
r43347 | b'update': ( | ||
_(b"updating bookmark %s\n"), | ||||
_(b'updating bookmark %s failed!\n'), | ||||
Augie Fackler
|
r43346 | ), | ||
Augie Fackler
|
r43347 | b'export': ( | ||
_(b"exporting bookmark %s\n"), | ||||
_(b'exporting bookmark %s failed!\n'), | ||||
Augie Fackler
|
r43346 | ), | ||
Augie Fackler
|
r43347 | b'delete': ( | ||
_(b"deleting remote bookmark %s\n"), | ||||
_(b'deleting remote bookmark %s failed!\n'), | ||||
Augie Fackler
|
r43346 | ), | ||
} | ||||
def push( | ||||
repo, | ||||
remote, | ||||
force=False, | ||||
revs=None, | ||||
newbranch=False, | ||||
bookmarks=(), | ||||
publish=False, | ||||
opargs=None, | ||||
): | ||||
Pierre-Yves David
|
r20345 | '''Push outgoing changesets (limited by revs) from a local | ||
repository to remote. Return an integer: | ||||
- None means nothing to push | ||||
- 0 means HTTP error | ||||
- 1 means we pushed and remote head count is unchanged *or* | ||||
we have outgoing changesets but refused to push | ||||
- other values as described by addchangegroup() | ||||
''' | ||||
Sean Farley
|
r26729 | if opargs is None: | ||
opargs = {} | ||||
Augie Fackler
|
r43346 | pushop = pushoperation( | ||
repo, | ||||
remote, | ||||
force, | ||||
revs, | ||||
newbranch, | ||||
bookmarks, | ||||
publish, | ||||
**pycompat.strkwargs(opargs) | ||||
) | ||||
Pierre-Yves David
|
r20348 | if pushop.remote.local(): | ||
Augie Fackler
|
r43346 | missing = ( | ||
set(pushop.repo.requirements) - pushop.remote.local().supported | ||||
) | ||||
Pierre-Yves David
|
r20345 | if missing: | ||
Augie Fackler
|
r43346 | msg = _( | ||
Augie Fackler
|
r43347 | b"required features are not" | ||
b" supported in the destination:" | ||||
b" %s" | ||||
) % (b', '.join(sorted(missing))) | ||||
Pierre-Yves David
|
r26587 | raise error.Abort(msg) | ||
Pierre-Yves David
|
r20345 | |||
Pierre-Yves David
|
r20348 | if not pushop.remote.canpush(): | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b"destination does not support push")) | ||
if not pushop.remote.capable(b'unbundle'): | ||||
Augie Fackler
|
r43346 | raise error.Abort( | ||
_( | ||||
Augie Fackler
|
r43347 | b'cannot push: destination does not support the ' | ||
b'unbundle wire protocol command' | ||||
Augie Fackler
|
r43346 | ) | ||
) | ||||
Gregory Szorc
|
r33667 | |||
Martin von Zweigbergk
|
r33788 | # get lock as we might write phase data | ||
wlock = lock = None | ||||
Pierre-Yves David
|
r20345 | try: | ||
Martin von Zweigbergk
|
r42513 | # bundle2 push may receive a reply bundle touching bookmarks | ||
# requiring the wlock. Take it now to ensure proper ordering. | ||||
Augie Fackler
|
r43347 | maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback') | ||
Augie Fackler
|
r43346 | if ( | ||
(not _forcebundle1(pushop)) | ||||
and maypushback | ||||
and not bookmod.bookmarksinstore(repo) | ||||
): | ||||
Martin von Zweigbergk
|
r33788 | wlock = pushop.repo.wlock() | ||
lock = pushop.repo.lock() | ||||
Augie Fackler
|
r43346 | pushop.trmanager = transactionmanager( | ||
Augie Fackler
|
r43347 | pushop.repo, b'push-response', pushop.remote.url() | ||
Augie Fackler
|
r43346 | ) | ||
Yuya Nishihara
|
r38111 | except error.LockUnavailable as err: | ||
Pierre-Yves David
|
r20345 | # source repo cannot be locked. | ||
# We do not abort the push, but just disable the local phase | ||||
# synchronisation. | ||||
Augie Fackler
|
r43347 | msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr( | ||
Augie Fackler
|
r43346 | err | ||
) | ||||
Pierre-Yves David
|
r20347 | pushop.ui.debug(msg) | ||
Martin von Zweigbergk
|
r33789 | |||
Augie Fackler
|
r41926 | with wlock or util.nullcontextmanager(): | ||
with lock or util.nullcontextmanager(): | ||||
with pushop.trmanager or util.nullcontextmanager(): | ||||
pushop.repo.checkpush(pushop) | ||||
_checkpublish(pushop) | ||||
_pushdiscovery(pushop) | ||||
Matt Harbison
|
r44365 | if not pushop.force: | ||
_checksubrepostate(pushop) | ||||
Augie Fackler
|
r41926 | if not _forcebundle1(pushop): | ||
_pushbundle2(pushop) | ||||
_pushchangeset(pushop) | ||||
_pushsyncphase(pushop) | ||||
_pushobsolete(pushop) | ||||
_pushbookmark(pushop) | ||||
Gregory Szorc
|
r33667 | |||
Augie Fackler
|
r43347 | if repo.ui.configbool(b'experimental', b'remotenames'): | ||
Pulkit Goyal
|
r38634 | logexchange.pullremotenames(repo, remote) | ||
Pierre-Yves David
|
r22616 | return pushop | ||
Pierre-Yves David
|
r20352 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22018 | # list of steps to perform discovery before push | ||
pushdiscoveryorder = [] | ||||
# Mapping between step name and function | ||||
# | ||||
# This exists to help extensions wrap steps if necessary | ||||
pushdiscoverymapping = {} | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22018 | def pushdiscovery(stepname): | ||
"""decorator for function performing discovery before push | ||||
The function is added to the step -> function mapping and appended to the | ||||
list of steps. Beware that decorated function will be added in order (this | ||||
may matter). | ||||
You can only use this decorator for a new step, if you want to wrap a step | ||||
from an extension, change the pushdiscovery dictionary directly.""" | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22018 | def dec(func): | ||
assert stepname not in pushdiscoverymapping | ||||
pushdiscoverymapping[stepname] = func | ||||
pushdiscoveryorder.append(stepname) | ||||
return func | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22018 | return dec | ||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r20466 | def _pushdiscovery(pushop): | ||
Pierre-Yves David
|
r22018 | """Run all discovery steps""" | ||
for stepname in pushdiscoveryorder: | ||||
step = pushdiscoverymapping[stepname] | ||||
step(pushop) | ||||
Augie Fackler
|
r43346 | |||
Matt Harbison
|
r44365 | def _checksubrepostate(pushop): | ||
"""Ensure all outgoing referenced subrepo revisions are present locally""" | ||||
for n in pushop.outgoing.missing: | ||||
ctx = pushop.repo[n] | ||||
if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files(): | ||||
for subpath in sorted(ctx.substate): | ||||
sub = ctx.sub(subpath) | ||||
sub.verify(onpush=True) | ||||
Augie Fackler
|
r43347 | @pushdiscovery(b'changeset') | ||
Pierre-Yves David
|
r22018 | def _pushdiscoverychangeset(pushop): | ||
"""discover the changeset that need to be pushed""" | ||||
Pierre-Yves David
|
r20466 | fci = discovery.findcommonincoming | ||
Boris Feld
|
r35306 | if pushop.revs: | ||
Augie Fackler
|
r43346 | commoninc = fci( | ||
pushop.repo, | ||||
pushop.remote, | ||||
force=pushop.force, | ||||
ancestorsof=pushop.revs, | ||||
) | ||||
Boris Feld
|
r35306 | else: | ||
commoninc = fci(pushop.repo, pushop.remote, force=pushop.force) | ||||
Pierre-Yves David
|
r20466 | common, inc, remoteheads = commoninc | ||
fco = discovery.findcommonoutgoing | ||||
Augie Fackler
|
r43346 | outgoing = fco( | ||
pushop.repo, | ||||
pushop.remote, | ||||
onlyheads=pushop.revs, | ||||
commoninc=commoninc, | ||||
force=pushop.force, | ||||
) | ||||
Pierre-Yves David
|
r20466 | pushop.outgoing = outgoing | ||
pushop.remoteheads = remoteheads | ||||
pushop.incoming = inc | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @pushdiscovery(b'phase') | ||
Pierre-Yves David
|
r22019 | def _pushdiscoveryphase(pushop): | ||
"""discover the phase that needs to be pushed | ||||
(computed for both success and failure case for changesets push)""" | ||||
outgoing = pushop.outgoing | ||||
unfi = pushop.repo.unfiltered() | ||||
Augie Fackler
|
r43347 | remotephases = listkeys(pushop.remote, b'phases') | ||
Gregory Szorc
|
r37775 | |||
Augie Fackler
|
r43346 | if ( | ||
Augie Fackler
|
r43347 | pushop.ui.configbool(b'ui', b'_usedassubrepo') | ||
Augie Fackler
|
r43346 | and remotephases # server supports phases | ||
and not pushop.outgoing.missing # no changesets to be pushed | ||||
Augie Fackler
|
r43347 | and remotephases.get(b'publishing', False) | ||
Augie Fackler
|
r43346 | ): | ||
Pierre-Yves David
|
r25337 | # When: | ||
# - this is a subrepo push | ||||
# - and remote support phase | ||||
# - and no changeset are to be pushed | ||||
# - and remote is publishing | ||||
Boris Feld
|
r34818 | # We may be in issue 3781 case! | ||
Pierre-Yves David
|
r25337 | # We drop the possible phase synchronisation done by | ||
# courtesy to publish changesets possibly locally draft | ||||
# on the remote. | ||||
Boris Feld
|
r34819 | pushop.outdatedphases = [] | ||
pushop.fallbackoutdatedphases = [] | ||||
return | ||||
Boris Feld
|
r34820 | |||
Augie Fackler
|
r43346 | pushop.remotephases = phases.remotephasessummary( | ||
pushop.repo, pushop.fallbackheads, remotephases | ||||
) | ||||
Boris Feld
|
r34820 | droots = pushop.remotephases.draftroots | ||
Augie Fackler
|
r43347 | extracond = b'' | ||
Boris Feld
|
r34820 | if not pushop.remotephases.publishing: | ||
Augie Fackler
|
r43347 | extracond = b' and public()' | ||
revset = b'heads((%%ln::%%ln) %s)' % extracond | ||||
Pierre-Yves David
|
r22019 | # Get the list of all revs draft on remote by public here. | ||
# XXX Beware that revset break if droots is not strictly | ||||
# XXX root we may want to ensure it is but it is costly | ||||
fallback = list(unfi.set(revset, droots, pushop.fallbackheads)) | ||||
r40722 | if not pushop.remotephases.publishing and pushop.publish: | |||
Augie Fackler
|
r43346 | future = list( | ||
unfi.set( | ||||
Augie Fackler
|
r43347 | b'%ln and (not public() or %ln::)', pushop.futureheads, droots | ||
Augie Fackler
|
r43346 | ) | ||
) | ||||
r40722 | elif not outgoing.missing: | |||
Pierre-Yves David
|
r22019 | future = fallback | ||
else: | ||||
# adds changeset we are going to push as draft | ||||
# | ||||
Mads Kiilerich
|
r23139 | # should not be necessary for publishing server, but because of an | ||
Pierre-Yves David
|
r22019 | # issue fixed in xxxxx we have to do it anyway. | ||
Augie Fackler
|
r43346 | fdroots = list( | ||
Augie Fackler
|
r43347 | unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots) | ||
Augie Fackler
|
r43346 | ) | ||
Pierre-Yves David
|
r22019 | fdroots = [f.node() for f in fdroots] | ||
future = list(unfi.set(revset, fdroots, pushop.futureheads)) | ||||
pushop.outdatedphases = future | ||||
pushop.fallbackoutdatedphases = fallback | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @pushdiscovery(b'obsmarker') | ||
Pierre-Yves David
|
r22035 | def _pushdiscoveryobsmarkers(pushop): | ||
Gregory Szorc
|
r37775 | if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt): | ||
return | ||||
if not pushop.repo.obsstore: | ||||
return | ||||
Augie Fackler
|
r43347 | if b'obsolete' not in listkeys(pushop.remote, b'namespaces'): | ||
Gregory Szorc
|
r37775 | return | ||
repo = pushop.repo | ||||
# very naive computation, that can be quite expensive on big repo. | ||||
# However: evolution is currently slow on them anyway. | ||||
Augie Fackler
|
r43347 | nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads)) | ||
Gregory Szorc
|
r37775 | pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes) | ||
Pierre-Yves David
|
r22035 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @pushdiscovery(b'bookmarks') | ||
Pierre-Yves David
|
r22239 | def _pushdiscoverybookmarks(pushop): | ||
ui = pushop.ui | ||||
repo = pushop.repo.unfiltered() | ||||
remote = pushop.remote | ||||
Augie Fackler
|
r43347 | ui.debug(b"checking for updated bookmarks\n") | ||
Pierre-Yves David
|
r22239 | ancestors = () | ||
if pushop.revs: | ||||
Yuya Nishihara
|
r38623 | revnums = pycompat.maplist(repo.changelog.rev, pushop.revs) | ||
Pierre-Yves David
|
r22239 | ancestors = repo.changelog.ancestors(revnums, inclusive=True) | ||
Gregory Szorc
|
r37775 | |||
Augie Fackler
|
r43347 | remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks')) | ||
Pierre-Yves David
|
r22239 | |||
Augie Fackler
|
r43346 | explicit = { | ||
repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks | ||||
} | ||||
Pierre-Yves David
|
r22651 | |||
Stanislau Hlebik
|
r30583 | comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark) | ||
Boris Feld
|
r36956 | return _processcompared(pushop, ancestors, explicit, remotebookmark, comp) | ||
Augie Fackler
|
r43346 | |||
Boris Feld
|
r36956 | def _processcompared(pushop, pushed, explicit, remotebms, comp): | ||
Valentin Gatien-Baron
|
r43184 | """take decision on bookmarks to push to the remote repo | ||
Boris Feld
|
r36956 | |||
Valentin Gatien-Baron
|
r43184 | Exists to help extensions alter this behavior. | ||
Boris Feld
|
r36956 | """ | ||
Gregory Szorc
|
r23081 | addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp | ||
Stanislau Hlebik
|
r30583 | |||
Boris Feld
|
r36956 | repo = pushop.repo | ||
Pierre-Yves David
|
r22239 | for b, scid, dcid in advsrc: | ||
Pierre-Yves David
|
r22651 | if b in explicit: | ||
explicit.remove(b) | ||||
Boris Feld
|
r36956 | if not pushed or repo[scid].rev() in pushed: | ||
Pierre-Yves David
|
r22239 | pushop.outbookmarks.append((b, dcid, scid)) | ||
Pierre-Yves David
|
r22651 | # search added bookmark | ||
for b, scid, dcid in addsrc: | ||||
if b in explicit: | ||||
explicit.remove(b) | ||||
Augie Fackler
|
r43347 | pushop.outbookmarks.append((b, b'', scid)) | ||
Pierre-Yves David
|
r22651 | # search for overwritten bookmark | ||
Stanislau Hlebik
|
r30583 | for b, scid, dcid in list(advdst) + list(diverge) + list(differ): | ||
Pierre-Yves David
|
r22651 | if b in explicit: | ||
explicit.remove(b) | ||||
pushop.outbookmarks.append((b, dcid, scid)) | ||||
# search for bookmark to delete | ||||
for b, scid, dcid in adddst: | ||||
if b in explicit: | ||||
explicit.remove(b) | ||||
# treat as "deleted locally" | ||||
Augie Fackler
|
r43347 | pushop.outbookmarks.append((b, dcid, b'')) | ||
Gregory Szorc
|
r23082 | # identical bookmarks shouldn't get reported | ||
for b, scid, dcid in same: | ||||
if b in explicit: | ||||
explicit.remove(b) | ||||
Pierre-Yves David
|
r22651 | |||
if explicit: | ||||
explicit = sorted(explicit) | ||||
# we should probably list all of them | ||||
Augie Fackler
|
r43346 | pushop.ui.warn( | ||
_( | ||||
Augie Fackler
|
r43347 | b'bookmark %s does not exist on the local ' | ||
b'or remote repository!\n' | ||||
Augie Fackler
|
r43346 | ) | ||
% explicit[0] | ||||
) | ||||
Pierre-Yves David
|
r22651 | pushop.bkresult = 2 | ||
pushop.outbookmarks.sort() | ||||
Pierre-Yves David
|
r22239 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r20465 | def _pushcheckoutgoing(pushop): | ||
outgoing = pushop.outgoing | ||||
unfi = pushop.repo.unfiltered() | ||||
if not outgoing.missing: | ||||
# nothing to push | ||||
scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded) | ||||
return False | ||||
# something to push | ||||
if not pushop.force: | ||||
# if repo.obsstore == False --> no obsolete | ||||
# then, save the iteration | ||||
if unfi.obsstore: | ||||
# this message are here for 80 char limit reason | ||||
Augie Fackler
|
r43347 | mso = _(b"push includes obsolete changeset: %s!") | ||
mspd = _(b"push includes phase-divergent changeset: %s!") | ||||
mscd = _(b"push includes content-divergent changeset: %s!") | ||||
Augie Fackler
|
r43346 | mst = { | ||
Augie Fackler
|
r43347 | b"orphan": _(b"push includes orphan changeset: %s!"), | ||
b"phase-divergent": mspd, | ||||
b"content-divergent": mscd, | ||||
Augie Fackler
|
r43346 | } | ||
Pierre-Yves David
|
r20465 | # If we are to push if there is at least one | ||
# obsolete or unstable changeset in missing, at | ||||
# least one of the missinghead will be obsolete or | ||||
# unstable. So checking heads only is ok | ||||
for node in outgoing.missingheads: | ||||
ctx = unfi[node] | ||||
if ctx.obsolete(): | ||||
Pierre-Yves David
|
r26587 | raise error.Abort(mso % ctx) | ||
Boris Feld
|
r33696 | elif ctx.isunstable(): | ||
Boris Feld
|
r33692 | # TODO print more than one instability in the abort | ||
# message | ||||
raise error.Abort(mst[ctx.instabilities()[0]] % ctx) | ||||
Matt Mackall
|
r25836 | |||
Ryan McElroy
|
r26935 | discovery.checkheads(pushop) | ||
Pierre-Yves David
|
r20465 | return True | ||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22017 | # List of names of steps to perform for an outgoing bundle2, order matters. | ||
b2partsgenorder = [] | ||||
# Mapping between step name and function | ||||
# | ||||
# This exists to help extensions wrap steps if necessary | ||||
b2partsgenmapping = {} | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r24731 | def b2partsgenerator(stepname, idx=None): | ||
Pierre-Yves David
|
r22017 | """decorator for function generating bundle2 part | ||
The function is added to the step -> function mapping and appended to the | ||||
list of steps. Beware that decorated functions will be added in order | ||||
(this may matter). | ||||
You can only use this decorator for new steps, if you want to wrap a step | ||||
from an extension, attack the b2partsgenmapping dictionary directly.""" | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22017 | def dec(func): | ||
assert stepname not in b2partsgenmapping | ||||
b2partsgenmapping[stepname] = func | ||||
Pierre-Yves David
|
r24731 | if idx is None: | ||
b2partsgenorder.append(stepname) | ||||
else: | ||||
b2partsgenorder.insert(idx, stepname) | ||||
Pierre-Yves David
|
r22017 | return func | ||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22017 | return dec | ||
Augie Fackler
|
r43346 | |||
Ryan McElroy
|
r26428 | def _pushb2ctxcheckheads(pushop, bundler): | ||
"""Generate race condition checking parts | ||||
Mads Kiilerich
|
r26781 | Exists as an independent function to aid extensions | ||
Ryan McElroy
|
r26428 | """ | ||
r32709 | # * 'force' do not check for push race, | |||
# * if we don't push anything, there are nothing to check. | ||||
if not pushop.force and pushop.outgoing.missingheads: | ||||
Augie Fackler
|
r43347 | allowunrelated = b'related' in bundler.capabilities.get( | ||
b'checkheads', () | ||||
) | ||||
r33133 | emptyremote = pushop.pushbranchmap is None | |||
if not allowunrelated or emptyremote: | ||||
Augie Fackler
|
r43347 | bundler.newpart(b'check:heads', data=iter(pushop.remoteheads)) | ||
r32709 | else: | |||
affected = set() | ||||
Gregory Szorc
|
r43376 | for branch, heads in pycompat.iteritems(pushop.pushbranchmap): | ||
r32709 | remoteheads, newheads, unsyncedheads, discardedheads = heads | |||
if remoteheads is not None: | ||||
remote = set(remoteheads) | ||||
affected |= set(discardedheads) & remote | ||||
affected |= remote - set(newheads) | ||||
if affected: | ||||
data = iter(sorted(affected)) | ||||
Augie Fackler
|
r43347 | bundler.newpart(b'check:updated-heads', data=data) | ||
Ryan McElroy
|
r26428 | |||
Augie Fackler
|
r43346 | |||
Boris Feld
|
r34822 | def _pushing(pushop): | ||
"""return True if we are pushing anything""" | ||||
Augie Fackler
|
r43346 | return bool( | ||
pushop.outgoing.missing | ||||
or pushop.outdatedphases | ||||
or pushop.outobsmarkers | ||||
or pushop.outbookmarks | ||||
) | ||||
Boris Feld
|
r34822 | |||
Augie Fackler
|
r43347 | @b2partsgenerator(b'check-bookmarks') | ||
Boris Feld
|
r35260 | def _pushb2checkbookmarks(pushop, bundler): | ||
"""insert bookmark move checking""" | ||||
if not _pushing(pushop) or pushop.force: | ||||
return | ||||
b2caps = bundle2.bundle2caps(pushop.remote) | ||||
Augie Fackler
|
r43347 | hasbookmarkcheck = b'bookmarks' in b2caps | ||
Boris Feld
|
r35260 | if not (pushop.outbookmarks and hasbookmarkcheck): | ||
return | ||||
data = [] | ||||
for book, old, new in pushop.outbookmarks: | ||||
data.append((book, old)) | ||||
checkdata = bookmod.binaryencode(data) | ||||
Augie Fackler
|
r43347 | bundler.newpart(b'check:bookmarks', data=checkdata) | ||
@b2partsgenerator(b'check-phases') | ||||
Boris Feld
|
r34822 | def _pushb2checkphases(pushop, bundler): | ||
"""insert phase move checking""" | ||||
if not _pushing(pushop) or pushop.force: | ||||
return | ||||
b2caps = bundle2.bundle2caps(pushop.remote) | ||||
Augie Fackler
|
r43347 | hasphaseheads = b'heads' in b2caps.get(b'phases', ()) | ||
Boris Feld
|
r34822 | if pushop.remotephases is not None and hasphaseheads: | ||
# check that the remote phase has not changed | ||||
checks = [[] for p in phases.allphases] | ||||
checks[phases.public].extend(pushop.remotephases.publicheads) | ||||
checks[phases.draft].extend(pushop.remotephases.draftroots) | ||||
if any(checks): | ||||
for nodes in checks: | ||||
nodes.sort() | ||||
checkdata = phases.binaryencode(checks) | ||||
Augie Fackler
|
r43347 | bundler.newpart(b'check:phases', data=checkdata) | ||
@b2partsgenerator(b'changeset') | ||||
Pierre-Yves David
|
r21899 | def _pushb2ctx(pushop, bundler): | ||
"""handle changegroup push through bundle2 | ||||
Pierre-Yves David
|
r22615 | addchangegroup result is stored in the ``pushop.cgresult`` attribute. | ||
Pierre-Yves David
|
r21899 | """ | ||
Augie Fackler
|
r43347 | if b'changesets' in pushop.stepsdone: | ||
Pierre-Yves David
|
r21902 | return | ||
Augie Fackler
|
r43347 | pushop.stepsdone.add(b'changesets') | ||
Pierre-Yves David
|
r21899 | # Send known heads to the server for race detection. | ||
Pierre-Yves David
|
r21903 | if not _pushcheckoutgoing(pushop): | ||
return | ||||
Mads Kiilerich
|
r28876 | pushop.repo.prepushoutgoinghooks(pushop) | ||
Ryan McElroy
|
r26428 | |||
_pushb2ctxcheckheads(pushop, bundler) | ||||
Pierre-Yves David
|
r23180 | b2caps = bundle2.bundle2caps(pushop.remote) | ||
Augie Fackler
|
r43347 | version = b'01' | ||
cgversions = b2caps.get(b'changegroup') | ||||
Martin von Zweigbergk
|
r28668 | if cgversions: # 3.1 and 3.2 ship with an empty value | ||
Augie Fackler
|
r43346 | cgversions = [ | ||
v | ||||
for v in cgversions | ||||
if v in changegroup.supportedoutgoingversions(pushop.repo) | ||||
] | ||||
Pierre-Yves David
|
r23180 | if not cgversions: | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b'no common changegroup version')) | ||
Pierre-Yves David
|
r23180 | version = max(cgversions) | ||
Augie Fackler
|
r43346 | cgstream = changegroup.makestream( | ||
Augie Fackler
|
r43347 | pushop.repo, pushop.outgoing, version, b'push' | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r43347 | cgpart = bundler.newpart(b'changegroup', data=cgstream) | ||
Martin von Zweigbergk
|
r28668 | if cgversions: | ||
Augie Fackler
|
r43347 | cgpart.addparam(b'version', version) | ||
if b'treemanifest' in pushop.repo.requirements: | ||||
cgpart.addparam(b'treemanifest', b'1') | ||||
r43401 | if b'exp-sidedata-flag' in pushop.repo.requirements: | |||
cgpart.addparam(b'exp-sidedata', b'1') | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r21899 | def handlereply(op): | ||
Mads Kiilerich
|
r23139 | """extract addchangegroup returns from server reply""" | ||
Pierre-Yves David
|
r21899 | cgreplies = op.records.getreplies(cgpart.id) | ||
Augie Fackler
|
r43347 | assert len(cgreplies[b'changegroup']) == 1 | ||
pushop.cgresult = cgreplies[b'changegroup'][0][b'return'] | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r21899 | return handlereply | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @b2partsgenerator(b'phase') | ||
Pierre-Yves David
|
r22020 | def _pushb2phases(pushop, bundler): | ||
"""handle phase push through bundle2""" | ||||
Augie Fackler
|
r43347 | if b'phases' in pushop.stepsdone: | ||
Pierre-Yves David
|
r22020 | return | ||
b2caps = bundle2.bundle2caps(pushop.remote) | ||||
Boris Feld
|
r34837 | ui = pushop.repo.ui | ||
Augie Fackler
|
r43347 | legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange') | ||
haspushkey = b'pushkey' in b2caps | ||||
hasphaseheads = b'heads' in b2caps.get(b'phases', ()) | ||||
Boris Feld
|
r34837 | |||
if hasphaseheads and not legacyphase: | ||||
Boris Feld
|
r34911 | return _pushb2phaseheads(pushop, bundler) | ||
Boris Feld
|
r34837 | elif haspushkey: | ||
Boris Feld
|
r34911 | return _pushb2phasespushkey(pushop, bundler) | ||
Boris Feld
|
r34823 | |||
Augie Fackler
|
r43346 | |||
Boris Feld
|
r34837 | def _pushb2phaseheads(pushop, bundler): | ||
"""push phase information through a bundle2 - binary part""" | ||||
Augie Fackler
|
r43347 | pushop.stepsdone.add(b'phases') | ||
Boris Feld
|
r34837 | if pushop.outdatedphases: | ||
updates = [[] for p in phases.allphases] | ||||
updates[0].extend(h.node() for h in pushop.outdatedphases) | ||||
phasedata = phases.binaryencode(updates) | ||||
Augie Fackler
|
r43347 | bundler.newpart(b'phase-heads', data=phasedata) | ||
Boris Feld
|
r34837 | |||
Augie Fackler
|
r43346 | |||
Boris Feld
|
r34823 | def _pushb2phasespushkey(pushop, bundler): | ||
"""push phase information through a bundle2 - pushkey part""" | ||||
Augie Fackler
|
r43347 | pushop.stepsdone.add(b'phases') | ||
Pierre-Yves David
|
r22020 | part2node = [] | ||
Pierre-Yves David
|
r25502 | |||
def handlefailure(pushop, exc): | ||||
targetid = int(exc.partid) | ||||
for partid, node in part2node: | ||||
if partid == targetid: | ||||
Augie Fackler
|
r43347 | raise error.Abort(_(b'updating %s to public failed') % node) | ||
Pierre-Yves David
|
r25502 | |||
Pierre-Yves David
|
r22020 | enc = pushkey.encode | ||
for newremotehead in pushop.outdatedphases: | ||||
Augie Fackler
|
r43347 | part = bundler.newpart(b'pushkey') | ||
part.addparam(b'namespace', enc(b'phases')) | ||||
part.addparam(b'key', enc(newremotehead.hex())) | ||||
part.addparam(b'old', enc(b'%d' % phases.draft)) | ||||
part.addparam(b'new', enc(b'%d' % phases.public)) | ||||
Pierre-Yves David
|
r22020 | part2node.append((part.id, newremotehead)) | ||
Pierre-Yves David
|
r25502 | pushop.pkfailcb[part.id] = handlefailure | ||
Pierre-Yves David
|
r22020 | def handlereply(op): | ||
for partid, node in part2node: | ||||
partrep = op.records.getreplies(partid) | ||||
Augie Fackler
|
r43347 | results = partrep[b'pushkey'] | ||
Pierre-Yves David
|
r22020 | assert len(results) <= 1 | ||
msg = None | ||||
if not results: | ||||
Augie Fackler
|
r43347 | msg = _(b'server ignored update of %s to public!\n') % node | ||
elif not int(results[0][b'return']): | ||||
msg = _(b'updating %s to public failed!\n') % node | ||||
Pierre-Yves David
|
r22020 | if msg is not None: | ||
pushop.ui.warn(msg) | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22020 | return handlereply | ||
Pierre-Yves David
|
r21904 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @b2partsgenerator(b'obsmarkers') | ||
Pierre-Yves David
|
r22347 | def _pushb2obsmarkers(pushop, bundler): | ||
Augie Fackler
|
r43347 | if b'obsmarkers' in pushop.stepsdone: | ||
Pierre-Yves David
|
r22347 | return | ||
remoteversions = bundle2.obsmarkersversion(bundler.capabilities) | ||||
if obsolete.commonversion(remoteversions) is None: | ||||
return | ||||
Augie Fackler
|
r43347 | pushop.stepsdone.add(b'obsmarkers') | ||
Pierre-Yves David
|
r22347 | if pushop.outobsmarkers: | ||
Denis Laxalde
|
r43858 | markers = obsutil.sortedmarkers(pushop.outobsmarkers) | ||
r32515 | bundle2.buildobsmarkerspart(bundler, markers) | |||
Pierre-Yves David
|
r22347 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @b2partsgenerator(b'bookmarks') | ||
Pierre-Yves David
|
r22242 | def _pushb2bookmarks(pushop, bundler): | ||
Martin von Zweigbergk
|
r25895 | """handle bookmark push through bundle2""" | ||
Augie Fackler
|
r43347 | if b'bookmarks' in pushop.stepsdone: | ||
Pierre-Yves David
|
r22242 | return | ||
b2caps = bundle2.bundle2caps(pushop.remote) | ||||
Boris Feld
|
r35265 | |||
Augie Fackler
|
r43347 | legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange') | ||
legacybooks = b'bookmarks' in legacy | ||||
if not legacybooks and b'bookmarks' in b2caps: | ||||
Boris Feld
|
r35265 | return _pushb2bookmarkspart(pushop, bundler) | ||
Augie Fackler
|
r43347 | elif b'pushkey' in b2caps: | ||
Boris Feld
|
r35263 | return _pushb2bookmarkspushkey(pushop, bundler) | ||
Augie Fackler
|
r43346 | |||
Boris Feld
|
r35265 | def _bmaction(old, new): | ||
"""small utility for bookmark pushing""" | ||||
if not old: | ||||
Augie Fackler
|
r43347 | return b'export' | ||
Boris Feld
|
r35265 | elif not new: | ||
Augie Fackler
|
r43347 | return b'delete' | ||
return b'update' | ||||
Boris Feld
|
r35265 | |||
Augie Fackler
|
r43346 | |||
Navaneeth Suresh
|
r43082 | def _abortonsecretctx(pushop, node, b): | ||
"""abort if a given bookmark points to a secret changeset""" | ||||
if node and pushop.repo[node].phase() == phases.secret: | ||||
Augie Fackler
|
r43346 | raise error.Abort( | ||
Martin von Zweigbergk
|
r43387 | _(b'cannot push bookmark %s as it points to a secret changeset') % b | ||
Augie Fackler
|
r43346 | ) | ||
Navaneeth Suresh
|
r43082 | |||
Boris Feld
|
r35265 | def _pushb2bookmarkspart(pushop, bundler): | ||
Augie Fackler
|
r43347 | pushop.stepsdone.add(b'bookmarks') | ||
Boris Feld
|
r35265 | if not pushop.outbookmarks: | ||
return | ||||
allactions = [] | ||||
data = [] | ||||
for book, old, new in pushop.outbookmarks: | ||||
Navaneeth Suresh
|
r43082 | _abortonsecretctx(pushop, new, book) | ||
Boris Feld
|
r35265 | data.append((book, new)) | ||
allactions.append((book, _bmaction(old, new))) | ||||
checkdata = bookmod.binaryencode(data) | ||||
Augie Fackler
|
r43347 | bundler.newpart(b'bookmarks', data=checkdata) | ||
Boris Feld
|
r35265 | |||
def handlereply(op): | ||||
ui = pushop.ui | ||||
# if success | ||||
for book, action in allactions: | ||||
ui.status(bookmsgmap[action][0] % book) | ||||
return handlereply | ||||
Augie Fackler
|
r43346 | |||
Boris Feld
|
r35263 | def _pushb2bookmarkspushkey(pushop, bundler): | ||
Augie Fackler
|
r43347 | pushop.stepsdone.add(b'bookmarks') | ||
Pierre-Yves David
|
r22242 | part2book = [] | ||
enc = pushkey.encode | ||||
Pierre-Yves David
|
r25501 | |||
def handlefailure(pushop, exc): | ||||
targetid = int(exc.partid) | ||||
for partid, book, action in part2book: | ||||
if partid == targetid: | ||||
raise error.Abort(bookmsgmap[action][1].rstrip() % book) | ||||
# we should not be called for part we did not generated | ||||
assert False | ||||
Pierre-Yves David
|
r22242 | for book, old, new in pushop.outbookmarks: | ||
Navaneeth Suresh
|
r43082 | _abortonsecretctx(pushop, new, book) | ||
Augie Fackler
|
r43347 | part = bundler.newpart(b'pushkey') | ||
part.addparam(b'namespace', enc(b'bookmarks')) | ||||
part.addparam(b'key', enc(book)) | ||||
part.addparam(b'old', enc(hex(old))) | ||||
part.addparam(b'new', enc(hex(new))) | ||||
action = b'update' | ||||
Pierre-Yves David
|
r22650 | if not old: | ||
Augie Fackler
|
r43347 | action = b'export' | ||
Pierre-Yves David
|
r22650 | elif not new: | ||
Augie Fackler
|
r43347 | action = b'delete' | ||
Pierre-Yves David
|
r22650 | part2book.append((part.id, book, action)) | ||
Pierre-Yves David
|
r25501 | pushop.pkfailcb[part.id] = handlefailure | ||
Pierre-Yves David
|
r22650 | |||
Pierre-Yves David
|
r22242 | def handlereply(op): | ||
Pierre-Yves David
|
r22650 | ui = pushop.ui | ||
for partid, book, action in part2book: | ||||
Pierre-Yves David
|
r22242 | partrep = op.records.getreplies(partid) | ||
Augie Fackler
|
r43347 | results = partrep[b'pushkey'] | ||
Pierre-Yves David
|
r22242 | assert len(results) <= 1 | ||
if not results: | ||||
Augie Fackler
|
r43347 | pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book) | ||
Pierre-Yves David
|
r22242 | else: | ||
Augie Fackler
|
r43347 | ret = int(results[0][b'return']) | ||
Pierre-Yves David
|
r22242 | if ret: | ||
Pierre-Yves David
|
r22650 | ui.status(bookmsgmap[action][0] % book) | ||
Pierre-Yves David
|
r22242 | else: | ||
Pierre-Yves David
|
r22650 | ui.warn(bookmsgmap[action][1] % book) | ||
Pierre-Yves David
|
r22649 | if pushop.bkresult is not None: | ||
pushop.bkresult = 1 | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22242 | return handlereply | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @b2partsgenerator(b'pushvars', idx=0) | ||
Pulkit Goyal
|
r33656 | def _getbundlesendvars(pushop, bundler): | ||
'''send shellvars via bundle2''' | ||||
Jun Wu
|
r33886 | pushvars = pushop.pushvars | ||
if pushvars: | ||||
shellvars = {} | ||||
for raw in pushvars: | ||||
Augie Fackler
|
r43347 | if b'=' not in raw: | ||
Augie Fackler
|
r43346 | msg = ( | ||
Augie Fackler
|
r43347 | b"unable to parse variable '%s', should follow " | ||
b"'KEY=VALUE' or 'KEY=' format" | ||||
Augie Fackler
|
r43346 | ) | ||
Jun Wu
|
r33886 | raise error.Abort(msg % raw) | ||
Augie Fackler
|
r43347 | k, v = raw.split(b'=', 1) | ||
Jun Wu
|
r33886 | shellvars[k] = v | ||
Augie Fackler
|
r43347 | part = bundler.newpart(b'pushvars') | ||
Pulkit Goyal
|
r33656 | |||
Gregory Szorc
|
r43376 | for key, value in pycompat.iteritems(shellvars): | ||
Pulkit Goyal
|
r33656 | part.addparam(key, value, mandatory=False) | ||
Pierre-Yves David
|
r22242 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r21061 | def _pushbundle2(pushop): | ||
"""push data to the remote using bundle2 | ||||
The only currently supported type of data is changegroup but this will | ||||
evolve in the future.""" | ||||
Pierre-Yves David
|
r21644 | bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote)) | ||
Augie Fackler
|
r43346 | pushback = pushop.trmanager and pushop.ui.configbool( | ||
Augie Fackler
|
r43347 | b'experimental', b'bundle2.pushback' | ||
Augie Fackler
|
r43346 | ) | ||
Eric Sumner
|
r23439 | |||
Pierre-Yves David
|
r21142 | # create reply capability | ||
Augie Fackler
|
r43346 | capsblob = bundle2.encodecaps( | ||
Augie Fackler
|
r43347 | bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client') | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r43347 | bundler.newpart(b'replycaps', data=capsblob) | ||
Pierre-Yves David
|
r21904 | replyhandlers = [] | ||
Pierre-Yves David
|
r22017 | for partgenname in b2partsgenorder: | ||
partgen = b2partsgenmapping[partgenname] | ||||
Pierre-Yves David
|
r21904 | ret = partgen(pushop, bundler) | ||
Pierre-Yves David
|
r21941 | if callable(ret): | ||
replyhandlers.append(ret) | ||||
Pierre-Yves David
|
r21904 | # do not push if nothing to push | ||
Pierre-Yves David
|
r21903 | if bundler.nbparts <= 1: | ||
return | ||||
Pierre-Yves David
|
r21061 | stream = util.chunkbuffer(bundler.getchunks()) | ||
Pierre-Yves David
|
r21182 | try: | ||
Pierre-Yves David
|
r25485 | try: | ||
Gregory Szorc
|
r37664 | with pushop.remote.commandexecutor() as e: | ||
Augie Fackler
|
r43346 | reply = e.callcommand( | ||
Augie Fackler
|
r43347 | b'unbundle', | ||
Augie Fackler
|
r43346 | { | ||
Augie Fackler
|
r43347 | b'bundle': stream, | ||
b'heads': [b'force'], | ||||
b'url': pushop.remote.url(), | ||||
Augie Fackler
|
r43346 | }, | ||
).result() | ||||
Gregory Szorc
|
r25660 | except error.BundleValueError as exc: | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b'missing support for %s') % exc) | ||
Pierre-Yves David
|
r25485 | try: | ||
trgetter = None | ||||
if pushback: | ||||
trgetter = pushop.trmanager.transaction | ||||
op = bundle2.processbundle(pushop.repo, reply, trgetter) | ||||
Gregory Szorc
|
r25660 | except error.BundleValueError as exc: | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b'missing support for %s') % exc) | ||
Gregory Szorc
|
r26829 | except bundle2.AbortFromPart as exc: | ||
Augie Fackler
|
r43347 | pushop.ui.status(_(b'remote: %s\n') % exc) | ||
Pierre-Yves David
|
r30908 | if exc.hint is not None: | ||
Augie Fackler
|
r43347 | pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint)) | ||
raise error.Abort(_(b'push failed on remote')) | ||||
Gregory Szorc
|
r25660 | except error.PushkeyFailed as exc: | ||
Pierre-Yves David
|
r25485 | partid = int(exc.partid) | ||
if partid not in pushop.pkfailcb: | ||||
raise | ||||
pushop.pkfailcb[partid](pushop, exc) | ||||
Pierre-Yves David
|
r21904 | for rephand in replyhandlers: | ||
rephand(op) | ||||
Pierre-Yves David
|
r21061 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r20463 | def _pushchangeset(pushop): | ||
"""Make the actual push of changeset bundle to remote repo""" | ||||
Augie Fackler
|
r43347 | if b'changesets' in pushop.stepsdone: | ||
Pierre-Yves David
|
r21902 | return | ||
Augie Fackler
|
r43347 | pushop.stepsdone.add(b'changesets') | ||
Pierre-Yves David
|
r21903 | if not _pushcheckoutgoing(pushop): | ||
return | ||||
Gregory Szorc
|
r33667 | |||
# Should have verified this in push(). | ||||
Augie Fackler
|
r43347 | assert pushop.remote.capable(b'unbundle') | ||
Gregory Szorc
|
r33667 | |||
Mads Kiilerich
|
r28876 | pushop.repo.prepushoutgoinghooks(pushop) | ||
Pierre-Yves David
|
r20463 | outgoing = pushop.outgoing | ||
# TODO: get bundlecaps from remote | ||||
bundlecaps = None | ||||
# create a changegroup from local | ||||
Augie Fackler
|
r43346 | if pushop.revs is None and not ( | ||
outgoing.excluded or pushop.repo.changelog.filteredrevs | ||||
): | ||||
Pierre-Yves David
|
r20463 | # push everything, | ||
# use the fast path, no race possible on push | ||||
Augie Fackler
|
r43346 | cg = changegroup.makechangegroup( | ||
pushop.repo, | ||||
outgoing, | ||||
Augie Fackler
|
r43347 | b'01', | ||
b'push', | ||||
Augie Fackler
|
r43346 | fastpath=True, | ||
bundlecaps=bundlecaps, | ||||
) | ||||
Pierre-Yves David
|
r20463 | else: | ||
Augie Fackler
|
r43346 | cg = changegroup.makechangegroup( | ||
Augie Fackler
|
r43347 | pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps | ||
Augie Fackler
|
r43346 | ) | ||
Pierre-Yves David
|
r20463 | |||
# apply changegroup to remote | ||||
Gregory Szorc
|
r33667 | # local repo finds heads on server, finds out what | ||
# revs it must push. once revs transferred, if server | ||||
# finds it has different heads (someone else won | ||||
# commit/push race), server aborts. | ||||
if pushop.force: | ||||
Augie Fackler
|
r43347 | remoteheads = [b'force'] | ||
Pierre-Yves David
|
r20463 | else: | ||
Gregory Szorc
|
r33667 | remoteheads = pushop.remoteheads | ||
# ssh: return remote's addchangegroup() | ||||
# http: return remote's addchangegroup() or 0 for error | ||||
Augie Fackler
|
r43346 | pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url()) | ||
Pierre-Yves David
|
r20463 | |||
Pierre-Yves David
|
r20441 | def _pushsyncphase(pushop): | ||
Mads Kiilerich
|
r21024 | """synchronise phase information locally and remotely""" | ||
Pierre-Yves David
|
r20468 | cheads = pushop.commonheads | ||
Pierre-Yves David
|
r20441 | # even when we don't push, exchanging phase data is useful | ||
Augie Fackler
|
r43347 | remotephases = listkeys(pushop.remote, b'phases') | ||
Augie Fackler
|
r43346 | if ( | ||
Augie Fackler
|
r43347 | pushop.ui.configbool(b'ui', b'_usedassubrepo') | ||
Augie Fackler
|
r43346 | and remotephases # server supports phases | ||
and pushop.cgresult is None # nothing was pushed | ||||
Augie Fackler
|
r43347 | and remotephases.get(b'publishing', False) | ||
Augie Fackler
|
r43346 | ): | ||
Pierre-Yves David
|
r20441 | # When: | ||
# - this is a subrepo push | ||||
# - and remote support phase | ||||
# - and no changeset was pushed | ||||
# - and remote is publishing | ||||
# We may be in issue 3871 case! | ||||
# We drop the possible phase synchronisation done by | ||||
# courtesy to publish changesets possibly locally draft | ||||
# on the remote. | ||||
Augie Fackler
|
r43347 | remotephases = {b'publishing': b'True'} | ||
Augie Fackler
|
r43346 | if not remotephases: # old server or public only reply from non-publishing | ||
Pierre-Yves David
|
r20441 | _localphasemove(pushop, cheads) | ||
# don't push any phase data as there is nothing to push | ||||
else: | ||||
Augie Fackler
|
r43346 | ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases) | ||
Pierre-Yves David
|
r20441 | pheads, droots = ana | ||
### Apply remote phase on local | ||||
Augie Fackler
|
r43347 | if remotephases.get(b'publishing', False): | ||
Pierre-Yves David
|
r20441 | _localphasemove(pushop, cheads) | ||
Augie Fackler
|
r43346 | else: # publish = False | ||
Pierre-Yves David
|
r20441 | _localphasemove(pushop, pheads) | ||
_localphasemove(pushop, cheads, phases.draft) | ||||
### Apply local phase on remote | ||||
Pierre-Yves David
|
r22615 | if pushop.cgresult: | ||
Augie Fackler
|
r43347 | if b'phases' in pushop.stepsdone: | ||
Pierre-Yves David
|
r22020 | # phases already pushed though bundle2 | ||
return | ||||
Pierre-Yves David
|
r22019 | outdated = pushop.outdatedphases | ||
else: | ||||
outdated = pushop.fallbackoutdatedphases | ||||
Augie Fackler
|
r43347 | pushop.stepsdone.add(b'phases') | ||
Pierre-Yves David
|
r22020 | |||
Pierre-Yves David
|
r22019 | # filter heads already turned public by the push | ||
outdated = [c for c in outdated if c.node() not in pheads] | ||||
Pierre-Yves David
|
r23376 | # fallback to independent pushkey command | ||
for newremotehead in outdated: | ||||
Gregory Szorc
|
r37665 | with pushop.remote.commandexecutor() as e: | ||
Augie Fackler
|
r43346 | r = e.callcommand( | ||
Augie Fackler
|
r43347 | b'pushkey', | ||
Augie Fackler
|
r43346 | { | ||
Augie Fackler
|
r43347 | b'namespace': b'phases', | ||
b'key': newremotehead.hex(), | ||||
b'old': b'%d' % phases.draft, | ||||
b'new': b'%d' % phases.public, | ||||
Augie Fackler
|
r43346 | }, | ||
).result() | ||||
Gregory Szorc
|
r37665 | |||
Pierre-Yves David
|
r23376 | if not r: | ||
Augie Fackler
|
r43346 | pushop.ui.warn( | ||
Augie Fackler
|
r43347 | _(b'updating %s to public failed!\n') % newremotehead | ||
Augie Fackler
|
r43346 | ) | ||
Pierre-Yves David
|
r20441 | |||
Pierre-Yves David
|
r20438 | def _localphasemove(pushop, nodes, phase=phases.public): | ||
"""move <nodes> to <phase> in the local source repo""" | ||||
Eric Sumner
|
r23437 | if pushop.trmanager: | ||
Augie Fackler
|
r43346 | phases.advanceboundary( | ||
pushop.repo, pushop.trmanager.transaction(), phase, nodes | ||||
) | ||||
Pierre-Yves David
|
r20438 | else: | ||
# repo is not locked, do not change any phases! | ||||
# Informs the user that phases should have been moved when | ||||
# applicable. | ||||
actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()] | ||||
phasestr = phases.phasenames[phase] | ||||
if actualmoves: | ||||
Augie Fackler
|
r43346 | pushop.ui.status( | ||
_( | ||||
Augie Fackler
|
r43347 | b'cannot lock source repo, skipping ' | ||
b'local %s phase update\n' | ||||
Augie Fackler
|
r43346 | ) | ||
% phasestr | ||||
) | ||||
Pierre-Yves David
|
r20438 | |||
Pierre-Yves David
|
r20433 | def _pushobsolete(pushop): | ||
Pierre-Yves David
|
r20434 | """utility function to push obsolete markers to a remote""" | ||
Augie Fackler
|
r43347 | if b'obsmarkers' in pushop.stepsdone: | ||
Pierre-Yves David
|
r22036 | return | ||
Pierre-Yves David
|
r20433 | repo = pushop.repo | ||
remote = pushop.remote | ||||
Augie Fackler
|
r43347 | pushop.stepsdone.add(b'obsmarkers') | ||
Pierre-Yves David
|
r22350 | if pushop.outobsmarkers: | ||
Augie Fackler
|
r43347 | pushop.ui.debug(b'try to push obsolete markers to remote\n') | ||
Pierre-Yves David
|
r20432 | rslts = [] | ||
Denis Laxalde
|
r43858 | markers = obsutil.sortedmarkers(pushop.outobsmarkers) | ||
Denis Laxalde
|
r43572 | remotedata = obsolete._pushkeyescape(markers) | ||
Pierre-Yves David
|
r20432 | for key in sorted(remotedata, reverse=True): | ||
# reverse sort to ensure we end with dump0 | ||||
data = remotedata[key] | ||||
Augie Fackler
|
r43347 | rslts.append(remote.pushkey(b'obsolete', key, b'', data)) | ||
Pierre-Yves David
|
r20432 | if [r for r in rslts if not r]: | ||
Augie Fackler
|
r43347 | msg = _(b'failed to push some obsolete markers!\n') | ||
Pierre-Yves David
|
r20432 | repo.ui.warn(msg) | ||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r20431 | def _pushbookmark(pushop): | ||
Pierre-Yves David
|
r20352 | """Update bookmark position on remote""" | ||
Augie Fackler
|
r43347 | if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone: | ||
Pierre-Yves David
|
r22228 | return | ||
Augie Fackler
|
r43347 | pushop.stepsdone.add(b'bookmarks') | ||
Pierre-Yves David
|
r20431 | ui = pushop.ui | ||
remote = pushop.remote | ||||
Pierre-Yves David
|
r22650 | |||
Pierre-Yves David
|
r22239 | for b, old, new in pushop.outbookmarks: | ||
Augie Fackler
|
r43347 | action = b'update' | ||
Pierre-Yves David
|
r22650 | if not old: | ||
Augie Fackler
|
r43347 | action = b'export' | ||
Pierre-Yves David
|
r22650 | elif not new: | ||
Augie Fackler
|
r43347 | action = b'delete' | ||
Gregory Szorc
|
r37665 | |||
with remote.commandexecutor() as e: | ||||
Augie Fackler
|
r43346 | r = e.callcommand( | ||
Augie Fackler
|
r43347 | b'pushkey', | ||
Augie Fackler
|
r43346 | { | ||
Augie Fackler
|
r43347 | b'namespace': b'bookmarks', | ||
b'key': b, | ||||
b'old': hex(old), | ||||
b'new': hex(new), | ||||
Augie Fackler
|
r43346 | }, | ||
).result() | ||||
Gregory Szorc
|
r37665 | |||
if r: | ||||
Pierre-Yves David
|
r22650 | ui.status(bookmsgmap[action][0] % b) | ||
Pierre-Yves David
|
r20352 | else: | ||
Pierre-Yves David
|
r22650 | ui.warn(bookmsgmap[action][1] % b) | ||
# discovery can have set the value form invalid entry | ||||
if pushop.bkresult is not None: | ||||
pushop.bkresult = 1 | ||||
Pierre-Yves David
|
r20469 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r20472 | class pulloperation(object): | ||
"""A object that represent a single pull operation | ||||
Mike Edgar
|
r23219 | It purpose is to carry pull related state and very common operation. | ||
Pierre-Yves David
|
r20472 | |||
Mads Kiilerich
|
r21024 | A new should be created at the beginning of each pull and discarded | ||
Pierre-Yves David
|
r20472 | afterward. | ||
""" | ||||
Augie Fackler
|
r43346 | def __init__( | ||
self, | ||||
repo, | ||||
remote, | ||||
heads=None, | ||||
force=False, | ||||
bookmarks=(), | ||||
remotebookmarks=None, | ||||
streamclonerequested=None, | ||||
includepats=None, | ||||
excludepats=None, | ||||
depth=None, | ||||
): | ||||
Siddharth Agarwal
|
r20596 | # repo we pull into | ||
Pierre-Yves David
|
r20472 | self.repo = repo | ||
Siddharth Agarwal
|
r20596 | # repo we pull from | ||
Pierre-Yves David
|
r20473 | self.remote = remote | ||
Pierre-Yves David
|
r20474 | # revision we try to pull (None is "all") | ||
self.heads = heads | ||||
Pierre-Yves David
|
r22654 | # bookmark pulled explicitly | ||
Augie Fackler
|
r43346 | self.explicitbookmarks = [ | ||
repo._bookmarks.expandname(bookmark) for bookmark in bookmarks | ||||
] | ||||
Pierre-Yves David
|
r20475 | # do we force pull? | ||
self.force = force | ||||
Gregory Szorc
|
r26448 | # whether a streaming clone was requested | ||
self.streamclonerequested = streamclonerequested | ||||
Eric Sumner
|
r23436 | # transaction manager | ||
self.trmanager = None | ||||
Pierre-Yves David
|
r20487 | # set of common changeset between local and remote before pull | ||
self.common = None | ||||
# set of pulled head | ||||
self.rheads = None | ||||
Mads Kiilerich
|
r21024 | # list of missing changeset to fetch remotely | ||
Pierre-Yves David
|
r20488 | self.fetch = None | ||
Pierre-Yves David
|
r22654 | # remote bookmarks data | ||
Pierre-Yves David
|
r25446 | self.remotebookmarks = remotebookmarks | ||
Mads Kiilerich
|
r21024 | # result of changegroup pulling (used as return code by pull) | ||
Pierre-Yves David
|
r20898 | self.cgresult = None | ||
Pierre-Yves David
|
r22937 | # list of step already done | ||
self.stepsdone = set() | ||||
Gregory Szorc
|
r26689 | # Whether we attempted a clone from pre-generated bundles. | ||
self.clonebundleattempted = False | ||||
Gregory Szorc
|
r39589 | # Set of file patterns to include. | ||
self.includepats = includepats | ||||
# Set of file patterns to exclude. | ||||
self.excludepats = excludepats | ||||
Gregory Szorc
|
r40367 | # Number of ancestor changesets to pull from each pulled head. | ||
self.depth = depth | ||||
Pierre-Yves David
|
r20487 | |||
@util.propertycache | ||||
def pulledsubset(self): | ||||
"""heads of the set of changeset target by the pull""" | ||||
# compute target subset | ||||
if self.heads is None: | ||||
# We pulled every thing possible | ||||
# sync on everything common | ||||
Pierre-Yves David
|
r20878 | c = set(self.common) | ||
ret = list(self.common) | ||||
for n in self.rheads: | ||||
if n not in c: | ||||
ret.append(n) | ||||
return ret | ||||
Pierre-Yves David
|
r20487 | else: | ||
# We pulled a specific subset | ||||
# sync on this subset | ||||
return self.heads | ||||
Pierre-Yves David
|
r20477 | |||
Gregory Szorc
|
r26464 | @util.propertycache | ||
Gregory Szorc
|
r26465 | def canusebundle2(self): | ||
Pierre-Yves David
|
r29682 | return not _forcebundle1(self) | ||
Gregory Szorc
|
r26465 | |||
@util.propertycache | ||||
Gregory Szorc
|
r26464 | def remotebundle2caps(self): | ||
return bundle2.bundle2caps(self.remote) | ||||
Pierre-Yves David
|
r20477 | def gettransaction(self): | ||
Eric Sumner
|
r23436 | # deprecated; talk to trmanager directly | ||
return self.trmanager.transaction() | ||||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r33790 | class transactionmanager(util.transactional): | ||
Mads Kiilerich
|
r23543 | """An object to manage the life cycle of a transaction | ||
Eric Sumner
|
r23436 | |||
It creates the transaction on demand and calls the appropriate hooks when | ||||
closing the transaction.""" | ||||
Augie Fackler
|
r43346 | |||
Eric Sumner
|
r23436 | def __init__(self, repo, source, url): | ||
self.repo = repo | ||||
self.source = source | ||||
self.url = url | ||||
self._tr = None | ||||
def transaction(self): | ||||
"""Return an open transaction object, constructing if necessary""" | ||||
if not self._tr: | ||||
Augie Fackler
|
r43347 | trname = b'%s\n%s' % (self.source, util.hidepassword(self.url)) | ||
Eric Sumner
|
r23436 | self._tr = self.repo.transaction(trname) | ||
Augie Fackler
|
r43347 | self._tr.hookargs[b'source'] = self.source | ||
self._tr.hookargs[b'url'] = self.url | ||||
Pierre-Yves David
|
r20477 | return self._tr | ||
Eric Sumner
|
r23436 | def close(self): | ||
Pierre-Yves David
|
r20477 | """close transaction if created""" | ||
if self._tr is not None: | ||||
Pierre-Yves David
|
r23222 | self._tr.close() | ||
Pierre-Yves David
|
r20477 | |||
Eric Sumner
|
r23436 | def release(self): | ||
Pierre-Yves David
|
r20477 | """release transaction if created""" | ||
if self._tr is not None: | ||||
self._tr.release() | ||||
Pierre-Yves David
|
r20469 | |||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r37775 | def listkeys(remote, namespace): | ||
with remote.commandexecutor() as e: | ||||
Augie Fackler
|
r43347 | return e.callcommand(b'listkeys', {b'namespace': namespace}).result() | ||
Gregory Szorc
|
r37775 | |||
Augie Fackler
|
r43346 | |||
Joerg Sonnenberger
|
r37516 | def _fullpullbundle2(repo, pullop): | ||
# The server may send a partial reply, i.e. when inlining | ||||
# pre-computed bundles. In that case, update the common | ||||
# set based on the results and pull another bundle. | ||||
# | ||||
# There are two indicators that the process is finished: | ||||
# - no changeset has been added, or | ||||
# - all remote heads are known locally. | ||||
# The head check must use the unfiltered view as obsoletion | ||||
# markers can hide heads. | ||||
unfi = repo.unfiltered() | ||||
unficl = unfi.changelog | ||||
Augie Fackler
|
r43346 | |||
Joerg Sonnenberger
|
r37516 | def headsofdiff(h1, h2): | ||
"""Returns heads(h1 % h2)""" | ||||
Augie Fackler
|
r43347 | res = unfi.set(b'heads(%ln %% %ln)', h1, h2) | ||
Joerg Sonnenberger
|
r37516 | return set(ctx.node() for ctx in res) | ||
Augie Fackler
|
r43346 | |||
Joerg Sonnenberger
|
r37516 | def headsofunion(h1, h2): | ||
"""Returns heads((h1 + h2) - null)""" | ||||
Augie Fackler
|
r43347 | res = unfi.set(b'heads((%ln + %ln - null))', h1, h2) | ||
Joerg Sonnenberger
|
r37516 | return set(ctx.node() for ctx in res) | ||
Augie Fackler
|
r43346 | |||
Joerg Sonnenberger
|
r37516 | while True: | ||
old_heads = unficl.heads() | ||||
clstart = len(unficl) | ||||
_pullbundle2(pullop) | ||||
Martin von Zweigbergk
|
r38871 | if repository.NARROW_REQUIREMENT in repo.requirements: | ||
Joerg Sonnenberger
|
r37516 | # XXX narrow clones filter the heads on the server side during | ||
# XXX getbundle and result in partial replies as well. | ||||
# XXX Disable pull bundles in this case as band aid to avoid | ||||
# XXX extra round trips. | ||||
break | ||||
if clstart == len(unficl): | ||||
break | ||||
if all(unficl.hasnode(n) for n in pullop.rheads): | ||||
break | ||||
new_heads = headsofdiff(unficl.heads(), old_heads) | ||||
pullop.common = headsofunion(new_heads, pullop.common) | ||||
pullop.rheads = set(pullop.rheads) - pullop.common | ||||
Augie Fackler
|
r43346 | |||
def pull( | ||||
repo, | ||||
remote, | ||||
heads=None, | ||||
force=False, | ||||
bookmarks=(), | ||||
opargs=None, | ||||
streamclonerequested=None, | ||||
includepats=None, | ||||
excludepats=None, | ||||
depth=None, | ||||
): | ||||
Gregory Szorc
|
r26440 | """Fetch repository data from a remote. | ||
This is the main function used to retrieve data from a remote repository. | ||||
``repo`` is the local repository to clone into. | ||||
``remote`` is a peer instance. | ||||
``heads`` is an iterable of revisions we want to pull. ``None`` (the | ||||
default) means to pull everything from the remote. | ||||
``bookmarks`` is an iterable of bookmarks requesting to be pulled. By | ||||
default, all remote bookmarks are pulled. | ||||
``opargs`` are additional keyword arguments to pass to ``pulloperation`` | ||||
initialization. | ||||
Gregory Szorc
|
r26448 | ``streamclonerequested`` is a boolean indicating whether a "streaming | ||
clone" is requested. A "streaming clone" is essentially a raw file copy | ||||
of revlogs from the server. This only works when the local repository is | ||||
empty. The default value of ``None`` means to respect the server | ||||
configuration for preferring stream clones. | ||||
Gregory Szorc
|
r39589 | ``includepats`` and ``excludepats`` define explicit file patterns to | ||
include and exclude in storage, respectively. If not defined, narrow | ||||
patterns from the repo instance are used, if available. | ||||
Gregory Szorc
|
r40367 | ``depth`` is an integer indicating the DAG depth of history we're | ||
interested in. If defined, for each revision specified in ``heads``, we | ||||
will fetch up to this many of its ancestors and data associated with them. | ||||
Gregory Szorc
|
r26440 | |||
Returns the ``pulloperation`` created for this pull. | ||||
""" | ||||
Pierre-Yves David
|
r25445 | if opargs is None: | ||
opargs = {} | ||||
Gregory Szorc
|
r39589 | |||
# We allow the narrow patterns to be passed in explicitly to provide more | ||||
# flexibility for API consumers. | ||||
if includepats or excludepats: | ||||
includepats = includepats or set() | ||||
excludepats = excludepats or set() | ||||
else: | ||||
includepats, excludepats = repo.narrowpats | ||||
narrowspec.validatepatterns(includepats) | ||||
narrowspec.validatepatterns(excludepats) | ||||
Augie Fackler
|
r43346 | pullop = pulloperation( | ||
repo, | ||||
remote, | ||||
heads, | ||||
force, | ||||
bookmarks=bookmarks, | ||||
streamclonerequested=streamclonerequested, | ||||
includepats=includepats, | ||||
excludepats=excludepats, | ||||
depth=depth, | ||||
**pycompat.strkwargs(opargs) | ||||
) | ||||
Gregory Szorc
|
r33668 | |||
peerlocal = pullop.remote.local() | ||||
if peerlocal: | ||||
missing = set(peerlocal.requirements) - pullop.repo.supported | ||||
Pierre-Yves David
|
r20469 | if missing: | ||
Augie Fackler
|
r43346 | msg = _( | ||
Augie Fackler
|
r43347 | b"required features are not" | ||
b" supported in the destination:" | ||||
b" %s" | ||||
) % (b', '.join(sorted(missing))) | ||||
Pierre-Yves David
|
r26587 | raise error.Abort(msg) | ||
Pierre-Yves David
|
r20469 | |||
Augie Fackler
|
r43347 | pullop.trmanager = transactionmanager(repo, b'pull', remote.url()) | ||
Martin von Zweigbergk
|
r42513 | wlock = util.nullcontextmanager() | ||
if not bookmod.bookmarksinstore(repo): | ||||
wlock = repo.wlock() | ||||
with wlock, repo.lock(), pullop.trmanager: | ||||
Gregory Szorc
|
r39665 | # Use the modern wire protocol, if available. | ||
Augie Fackler
|
r43347 | if remote.capable(b'command-changesetdata'): | ||
Gregory Szorc
|
r39665 | exchangev2.pull(pullop) | ||
else: | ||||
# This should ideally be in _pullbundle2(). However, it needs to run | ||||
# before discovery to avoid extra work. | ||||
_maybeapplyclonebundle(pullop) | ||||
streamclone.maybeperformlegacystreamclone(pullop) | ||||
_pulldiscovery(pullop) | ||||
if pullop.canusebundle2: | ||||
_fullpullbundle2(repo, pullop) | ||||
_pullchangeset(pullop) | ||||
_pullphase(pullop) | ||||
_pullbookmarks(pullop) | ||||
_pullobsolete(pullop) | ||||
Pierre-Yves David
|
r20469 | |||
Pulkit Goyal
|
r35236 | # storing remotenames | ||
Augie Fackler
|
r43347 | if repo.ui.configbool(b'experimental', b'remotenames'): | ||
Pulkit Goyal
|
r35348 | logexchange.pullremotenames(repo, remote) | ||
Pulkit Goyal
|
r35236 | |||
Pierre-Yves David
|
r22693 | return pullop | ||
Pierre-Yves David
|
r20476 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22936 | # list of steps to perform discovery before pull | ||
pulldiscoveryorder = [] | ||||
# Mapping between step name and function | ||||
# | ||||
# This exists to help extensions wrap steps if necessary | ||||
pulldiscoverymapping = {} | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22936 | def pulldiscovery(stepname): | ||
"""decorator for function performing discovery before pull | ||||
The function is added to the step -> function mapping and appended to the | ||||
list of steps. Beware that decorated function will be added in order (this | ||||
may matter). | ||||
You can only use this decorator for a new step, if you want to wrap a step | ||||
from an extension, change the pulldiscovery dictionary directly.""" | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22936 | def dec(func): | ||
assert stepname not in pulldiscoverymapping | ||||
pulldiscoverymapping[stepname] = func | ||||
pulldiscoveryorder.append(stepname) | ||||
return func | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22936 | return dec | ||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r20900 | def _pulldiscovery(pullop): | ||
Pierre-Yves David
|
r22936 | """Run all discovery steps""" | ||
for stepname in pulldiscoveryorder: | ||||
step = pulldiscoverymapping[stepname] | ||||
step(pullop) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @pulldiscovery(b'b1:bookmarks') | ||
Pierre-Yves David
|
r25369 | def _pullbookmarkbundle1(pullop): | ||
"""fetch bookmark data in bundle1 case | ||||
If not using bundle2, we have to fetch bookmarks before changeset | ||||
discovery to reduce the chance and impact of race conditions.""" | ||||
Pierre-Yves David
|
r25443 | if pullop.remotebookmarks is not None: | ||
return | ||||
Augie Fackler
|
r43347 | if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps: | ||
Pierre-Yves David
|
r25479 | # all known bundle2 servers now support listkeys, but lets be nice with | ||
# new implementation. | ||||
return | ||||
Augie Fackler
|
r43347 | books = listkeys(pullop.remote, b'bookmarks') | ||
Boris Feld
|
r35030 | pullop.remotebookmarks = bookmod.unhexlifybookmarks(books) | ||
Pierre-Yves David
|
r25369 | |||
Augie Fackler
|
r43347 | @pulldiscovery(b'changegroup') | ||
Pierre-Yves David
|
r22936 | def _pulldiscoverychangegroup(pullop): | ||
Pierre-Yves David
|
r20900 | """discovery phase for the pull | ||
Current handle changeset discovery only, will change handle all discovery | ||||
at some point.""" | ||||
Augie Fackler
|
r43346 | tmp = discovery.findcommonincoming( | ||
pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force | ||||
) | ||||
Pierre-Yves David
|
r23848 | common, fetch, rheads = tmp | ||
r43945 | has_node = pullop.repo.unfiltered().changelog.index.has_node | |||
Pierre-Yves David
|
r23848 | if fetch and rheads: | ||
Boris Feld
|
r34318 | # If a remote heads is filtered locally, put in back in common. | ||
Pierre-Yves David
|
r23848 | # | ||
# This is a hackish solution to catch most of "common but locally | ||||
# hidden situation". We do not performs discovery on unfiltered | ||||
# repository because it end up doing a pathological amount of round | ||||
# trip for w huge amount of changeset we do not care about. | ||||
# | ||||
# If a set of such "common but filtered" changeset exist on the server | ||||
# but are not including a remote heads, we'll not be able to detect it, | ||||
scommon = set(common) | ||||
for n in rheads: | ||||
r43945 | if has_node(n): | |||
Pierre-Yves David
|
r23975 | if n not in scommon: | ||
common.append(n) | ||||
Boris Feld
|
r34318 | if set(rheads).issubset(set(common)): | ||
Pierre-Yves David
|
r23848 | fetch = [] | ||
pullop.common = common | ||||
pullop.fetch = fetch | ||||
pullop.rheads = rheads | ||||
Pierre-Yves David
|
r20900 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r20955 | def _pullbundle2(pullop): | ||
"""pull data using bundle2 | ||||
For now, the only supported data are changegroup.""" | ||||
Augie Fackler
|
r43347 | kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')} | ||
Gregory Szorc
|
r26471 | |||
Boris Feld
|
r35779 | # make ui easier to access | ||
ui = pullop.repo.ui | ||||
Siddharth Agarwal
|
r32257 | # At the moment we don't do stream clones over bundle2. If that is | ||
# implemented then here's where the check for that will go. | ||||
Boris Feld
|
r35781 | streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0] | ||
Gregory Szorc
|
r26471 | |||
Boris Feld
|
r35779 | # declare pull perimeters | ||
Augie Fackler
|
r43347 | kwargs[b'common'] = pullop.common | ||
kwargs[b'heads'] = pullop.heads or pullop.rheads | ||||
Boris Feld
|
r35779 | |||
Pulkit Goyal
|
r40527 | # check server supports narrow and then adding includepats and excludepats | ||
servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP) | ||||
if servernarrow and pullop.includepats: | ||||
Augie Fackler
|
r43347 | kwargs[b'includepats'] = pullop.includepats | ||
Pulkit Goyal
|
r40527 | if servernarrow and pullop.excludepats: | ||
Augie Fackler
|
r43347 | kwargs[b'excludepats'] = pullop.excludepats | ||
Pulkit Goyal
|
r40527 | |||
Boris Feld
|
r35781 | if streaming: | ||
Augie Fackler
|
r43347 | kwargs[b'cg'] = False | ||
kwargs[b'stream'] = True | ||||
pullop.stepsdone.add(b'changegroup') | ||||
pullop.stepsdone.add(b'phases') | ||||
Boris Feld
|
r35781 | |||
else: | ||||
Boris Feld
|
r35780 | # pulling changegroup | ||
Augie Fackler
|
r43347 | pullop.stepsdone.add(b'changegroup') | ||
kwargs[b'cg'] = pullop.fetch | ||||
legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange') | ||||
hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ()) | ||||
Augie Fackler
|
r43346 | if not legacyphase and hasbinaryphase: | ||
Augie Fackler
|
r43347 | kwargs[b'phases'] = True | ||
pullop.stepsdone.add(b'phases') | ||||
if b'listkeys' in pullop.remotebundle2caps: | ||||
if b'phases' not in pullop.stepsdone: | ||||
kwargs[b'listkeys'] = [b'phases'] | ||||
Boris Feld
|
r35779 | |||
Boris Feld
|
r35269 | bookmarksrequested = False | ||
Augie Fackler
|
r43347 | legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange') | ||
hasbinarybook = b'bookmarks' in pullop.remotebundle2caps | ||||
Boris Feld
|
r35269 | |||
if pullop.remotebookmarks is not None: | ||||
Augie Fackler
|
r43347 | pullop.stepsdone.add(b'request-bookmarks') | ||
Boris Feld
|
r35269 | |||
Augie Fackler
|
r43346 | if ( | ||
Augie Fackler
|
r43347 | b'request-bookmarks' not in pullop.stepsdone | ||
Boris Feld
|
r35269 | and pullop.remotebookmarks is None | ||
Augie Fackler
|
r43346 | and not legacybookmark | ||
and hasbinarybook | ||||
): | ||||
Augie Fackler
|
r43347 | kwargs[b'bookmarks'] = True | ||
Boris Feld
|
r35269 | bookmarksrequested = True | ||
Augie Fackler
|
r43347 | if b'listkeys' in pullop.remotebundle2caps: | ||
if b'request-bookmarks' not in pullop.stepsdone: | ||||
Pierre-Yves David
|
r25444 | # make sure to always includes bookmark data when migrating | ||
# `hg incoming --bundle` to using this function. | ||||
Augie Fackler
|
r43347 | pullop.stepsdone.add(b'request-bookmarks') | ||
kwargs.setdefault(b'listkeys', []).append(b'bookmarks') | ||||
Gregory Szorc
|
r26690 | |||
# If this is a full pull / clone and the server supports the clone bundles | ||||
# feature, tell the server whether we attempted a clone bundle. The | ||||
# presence of this flag indicates the client supports clone bundles. This | ||||
# will enable the server to treat clients that support clone bundles | ||||
# differently from those that don't. | ||||
Augie Fackler
|
r43346 | if ( | ||
Augie Fackler
|
r43347 | pullop.remote.capable(b'clonebundles') | ||
Augie Fackler
|
r43346 | and pullop.heads is None | ||
and list(pullop.common) == [nullid] | ||||
): | ||||
Augie Fackler
|
r43347 | kwargs[b'cbattempted'] = pullop.clonebundleattempted | ||
Gregory Szorc
|
r26690 | |||
Gregory Szorc
|
r26471 | if streaming: | ||
Augie Fackler
|
r43347 | pullop.repo.ui.status(_(b'streaming all changes\n')) | ||
Gregory Szorc
|
r26471 | elif not pullop.fetch: | ||
Augie Fackler
|
r43347 | pullop.repo.ui.status(_(b"no changes found\n")) | ||
Pierre-Yves David
|
r21258 | pullop.cgresult = 0 | ||
Pierre-Yves David
|
r20955 | else: | ||
if pullop.heads is None and list(pullop.common) == [nullid]: | ||||
Augie Fackler
|
r43347 | pullop.repo.ui.status(_(b"requesting all changes\n")) | ||
Durham Goode
|
r22953 | if obsolete.isenabled(pullop.repo, obsolete.exchangeopt): | ||
Gregory Szorc
|
r26464 | remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps) | ||
Pierre-Yves David
|
r22354 | if obsolete.commonversion(remoteversions) is not None: | ||
Augie Fackler
|
r43347 | kwargs[b'obsmarkers'] = True | ||
pullop.stepsdone.add(b'obsmarkers') | ||||
Pierre-Yves David
|
r21159 | _pullbundle2extraprepare(pullop, kwargs) | ||
Gregory Szorc
|
r37666 | |||
with pullop.remote.commandexecutor() as e: | ||||
args = dict(kwargs) | ||||
Augie Fackler
|
r43347 | args[b'source'] = b'pull' | ||
bundle = e.callcommand(b'getbundle', args).result() | ||||
Gregory Szorc
|
r37666 | |||
try: | ||||
Augie Fackler
|
r43346 | op = bundle2.bundleoperation( | ||
Augie Fackler
|
r43347 | pullop.repo, pullop.gettransaction, source=b'pull' | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r43347 | op.modes[b'bookmarks'] = b'records' | ||
Gregory Szorc
|
r37666 | bundle2.processbundle(pullop.repo, bundle, op=op) | ||
except bundle2.AbortFromPart as exc: | ||||
Augie Fackler
|
r43347 | pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc) | ||
raise error.Abort(_(b'pull failed on remote'), hint=exc.hint) | ||||
Gregory Szorc
|
r37666 | except error.BundleValueError as exc: | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b'missing support for %s') % exc) | ||
Durham Goode
|
r21259 | |||
if pullop.fetch: | ||||
Martin von Zweigbergk
|
r33037 | pullop.cgresult = bundle2.combinechangegroupresults(op) | ||
Pierre-Yves David
|
r20955 | |||
Pierre-Yves David
|
r21658 | # processing phases change | ||
Augie Fackler
|
r43347 | for namespace, value in op.records[b'listkeys']: | ||
if namespace == b'phases': | ||||
Pierre-Yves David
|
r21658 | _pullapplyphases(pullop, value) | ||
Pierre-Yves David
|
r22656 | # processing bookmark update | ||
Boris Feld
|
r35269 | if bookmarksrequested: | ||
books = {} | ||||
Augie Fackler
|
r43347 | for record in op.records[b'bookmarks']: | ||
books[record[b'bookmark']] = record[b"node"] | ||||
Boris Feld
|
r35269 | pullop.remotebookmarks = books | ||
else: | ||||
Augie Fackler
|
r43347 | for namespace, value in op.records[b'listkeys']: | ||
if namespace == b'bookmarks': | ||||
Boris Feld
|
r35269 | pullop.remotebookmarks = bookmod.unhexlifybookmarks(value) | ||
Pierre-Yves David
|
r25444 | |||
# bookmark data were either already there or pulled in the bundle | ||||
if pullop.remotebookmarks is not None: | ||||
_pullbookmarks(pullop) | ||||
Pierre-Yves David
|
r22656 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r21159 | def _pullbundle2extraprepare(pullop, kwargs): | ||
"""hook function so that extensions can extend the getbundle call""" | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r20489 | def _pullchangeset(pullop): | ||
"""pull changeset from unbundle into the local repo""" | ||||
# We delay the open of the transaction as late as possible so we | ||||
# don't open transaction for nothing or you break future useful | ||||
# rollback call | ||||
Augie Fackler
|
r43347 | if b'changegroup' in pullop.stepsdone: | ||
Pierre-Yves David
|
r22653 | return | ||
Augie Fackler
|
r43347 | pullop.stepsdone.add(b'changegroup') | ||
Pierre-Yves David
|
r20899 | if not pullop.fetch: | ||
Augie Fackler
|
r43347 | pullop.repo.ui.status(_(b"no changes found\n")) | ||
Mike Edgar
|
r23217 | pullop.cgresult = 0 | ||
return | ||||
Martin von Zweigbergk
|
r32930 | tr = pullop.gettransaction() | ||
Pierre-Yves David
|
r20489 | if pullop.heads is None and list(pullop.common) == [nullid]: | ||
Augie Fackler
|
r43347 | pullop.repo.ui.status(_(b"requesting all changes\n")) | ||
elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'): | ||||
Pierre-Yves David
|
r20489 | # issue1320, avoid a race if remote changed after discovery | ||
pullop.heads = pullop.rheads | ||||
Augie Fackler
|
r43347 | if pullop.remote.capable(b'getbundle'): | ||
Pierre-Yves David
|
r20489 | # TODO: get bundlecaps from remote | ||
Augie Fackler
|
r43346 | cg = pullop.remote.getbundle( | ||
Augie Fackler
|
r43347 | b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads | ||
Augie Fackler
|
r43346 | ) | ||
Pierre-Yves David
|
r20489 | elif pullop.heads is None: | ||
Gregory Szorc
|
r37653 | with pullop.remote.commandexecutor() as e: | ||
Augie Fackler
|
r43346 | cg = e.callcommand( | ||
Augie Fackler
|
r43347 | b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',} | ||
Augie Fackler
|
r43346 | ).result() | ||
Gregory Szorc
|
r37653 | |||
Augie Fackler
|
r43347 | elif not pullop.remote.capable(b'changegroupsubset'): | ||
Augie Fackler
|
r43346 | raise error.Abort( | ||
_( | ||||
Augie Fackler
|
r43347 | b"partial pull cannot be done because " | ||
b"other repository doesn't support " | ||||
b"changegroupsubset." | ||||
Augie Fackler
|
r43346 | ) | ||
) | ||||
Pierre-Yves David
|
r20489 | else: | ||
Gregory Szorc
|
r37653 | with pullop.remote.commandexecutor() as e: | ||
Augie Fackler
|
r43346 | cg = e.callcommand( | ||
Augie Fackler
|
r43347 | b'changegroupsubset', | ||
Augie Fackler
|
r43346 | { | ||
Augie Fackler
|
r43347 | b'bases': pullop.fetch, | ||
b'heads': pullop.heads, | ||||
b'source': b'pull', | ||||
Augie Fackler
|
r43346 | }, | ||
).result() | ||||
bundleop = bundle2.applybundle( | ||||
Augie Fackler
|
r43347 | pullop.repo, cg, tr, b'pull', pullop.remote.url() | ||
Augie Fackler
|
r43346 | ) | ||
Martin von Zweigbergk
|
r33040 | pullop.cgresult = bundle2.combinechangegroupresults(bundleop) | ||
Pierre-Yves David
|
r20489 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r20486 | def _pullphase(pullop): | ||
# Get remote phases data from remote | ||||
Augie Fackler
|
r43347 | if b'phases' in pullop.stepsdone: | ||
Pierre-Yves David
|
r22653 | return | ||
Augie Fackler
|
r43347 | remotephases = listkeys(pullop.remote, b'phases') | ||
Pierre-Yves David
|
r21654 | _pullapplyphases(pullop, remotephases) | ||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r21654 | def _pullapplyphases(pullop, remotephases): | ||
"""apply phase movement from observed remote state""" | ||||
Augie Fackler
|
r43347 | if b'phases' in pullop.stepsdone: | ||
Pierre-Yves David
|
r22937 | return | ||
Augie Fackler
|
r43347 | pullop.stepsdone.add(b'phases') | ||
publishing = bool(remotephases.get(b'publishing', False)) | ||||
Pierre-Yves David
|
r20486 | if remotephases and not publishing: | ||
Mads Kiilerich
|
r30332 | # remote is new and non-publishing | ||
Augie Fackler
|
r43346 | pheads, _dr = phases.analyzeremotephases( | ||
pullop.repo, pullop.pulledsubset, remotephases | ||||
) | ||||
Pierre-Yves David
|
r22068 | dheads = pullop.pulledsubset | ||
Pierre-Yves David
|
r20486 | else: | ||
# Remote is old or publishing all common changesets | ||||
# should be seen as public | ||||
Pierre-Yves David
|
r22068 | pheads = pullop.pulledsubset | ||
dheads = [] | ||||
unfi = pullop.repo.unfiltered() | ||||
phase = unfi._phasecache.phase | ||||
r43963 | rev = unfi.changelog.index.get_rev | |||
Pierre-Yves David
|
r22068 | public = phases.public | ||
draft = phases.draft | ||||
# exclude changesets already public locally and update the others | ||||
pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public] | ||||
if pheads: | ||||
Pierre-Yves David
|
r22069 | tr = pullop.gettransaction() | ||
phases.advanceboundary(pullop.repo, tr, public, pheads) | ||||
Pierre-Yves David
|
r22068 | |||
# exclude changesets already draft locally and update the others | ||||
dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft] | ||||
if dheads: | ||||
Pierre-Yves David
|
r22069 | tr = pullop.gettransaction() | ||
phases.advanceboundary(pullop.repo, tr, draft, dheads) | ||||
Pierre-Yves David
|
r20486 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r22654 | def _pullbookmarks(pullop): | ||
"""process the remote bookmark information to update the local one""" | ||||
Augie Fackler
|
r43347 | if b'bookmarks' in pullop.stepsdone: | ||
Pierre-Yves David
|
r22654 | return | ||
Augie Fackler
|
r43347 | pullop.stepsdone.add(b'bookmarks') | ||
Pierre-Yves David
|
r22654 | repo = pullop.repo | ||
remotebookmarks = pullop.remotebookmarks | ||||
Augie Fackler
|
r43346 | bookmod.updatefromremote( | ||
repo.ui, | ||||
repo, | ||||
remotebookmarks, | ||||
pullop.remote.url(), | ||||
pullop.gettransaction, | ||||
explicit=pullop.explicitbookmarks, | ||||
) | ||||
Pierre-Yves David
|
r22654 | |||
Pierre-Yves David
|
r20478 | def _pullobsolete(pullop): | ||
Pierre-Yves David
|
r20476 | """utility function to pull obsolete markers from a remote | ||
The `gettransaction` is function that return the pull transaction, creating | ||||
one if necessary. We return the transaction to inform the calling code that | ||||
a new transaction have been created (when applicable). | ||||
Exists mostly to allow overriding for experimentation purpose""" | ||||
Augie Fackler
|
r43347 | if b'obsmarkers' in pullop.stepsdone: | ||
Pierre-Yves David
|
r22653 | return | ||
Augie Fackler
|
r43347 | pullop.stepsdone.add(b'obsmarkers') | ||
Pierre-Yves David
|
r20476 | tr = None | ||
Durham Goode
|
r22953 | if obsolete.isenabled(pullop.repo, obsolete.exchangeopt): | ||
Augie Fackler
|
r43347 | pullop.repo.ui.debug(b'fetching remote obsolete markers\n') | ||
remoteobs = listkeys(pullop.remote, b'obsolete') | ||||
if b'dump0' in remoteobs: | ||||
Pierre-Yves David
|
r20478 | tr = pullop.gettransaction() | ||
Matt Mackall
|
r27558 | markers = [] | ||
Pierre-Yves David
|
r20476 | for key in sorted(remoteobs, reverse=True): | ||
Augie Fackler
|
r43347 | if key.startswith(b'dump'): | ||
Yuya Nishihara
|
r32200 | data = util.b85decode(remoteobs[key]) | ||
Matt Mackall
|
r27558 | version, newmarks = obsolete._readmarkers(data) | ||
markers += newmarks | ||||
if markers: | ||||
pullop.repo.obsstore.add(tr, markers) | ||||
Pierre-Yves David
|
r20478 | pullop.repo.invalidatevolatilesets() | ||
Pierre-Yves David
|
r20476 | return tr | ||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r38826 | def applynarrowacl(repo, kwargs): | ||
"""Apply narrow fetch access control. | ||||
This massages the named arguments for getbundle wire protocol commands | ||||
so requested data is filtered through access control rules. | ||||
""" | ||||
ui = repo.ui | ||||
# TODO this assumes existence of HTTP and is a layering violation. | ||||
Augie Fackler
|
r43347 | username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username()) | ||
Gregory Szorc
|
r38826 | user_includes = ui.configlist( | ||
Augie Fackler
|
r43346 | _NARROWACL_SECTION, | ||
Augie Fackler
|
r43347 | username + b'.includes', | ||
ui.configlist(_NARROWACL_SECTION, b'default.includes'), | ||||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r38826 | user_excludes = ui.configlist( | ||
Augie Fackler
|
r43346 | _NARROWACL_SECTION, | ||
Augie Fackler
|
r43347 | username + b'.excludes', | ||
ui.configlist(_NARROWACL_SECTION, b'default.excludes'), | ||||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r38826 | if not user_includes: | ||
Augie Fackler
|
r43346 | raise error.Abort( | ||
Matt Harbison
|
r44208 | _(b"%s configuration for user %s is empty") | ||
% (_NARROWACL_SECTION, username) | ||||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r38826 | |||
user_includes = [ | ||||
Augie Fackler
|
r43347 | b'path:.' if p == b'*' else b'path:' + p for p in user_includes | ||
Augie Fackler
|
r43346 | ] | ||
Gregory Szorc
|
r38826 | user_excludes = [ | ||
Augie Fackler
|
r43347 | b'path:.' if p == b'*' else b'path:' + p for p in user_excludes | ||
Augie Fackler
|
r43346 | ] | ||
Gregory Szorc
|
r38826 | |||
Augie Fackler
|
r43906 | req_includes = set(kwargs.get('includepats', [])) | ||
req_excludes = set(kwargs.get('excludepats', [])) | ||||
Gregory Szorc
|
r38826 | |||
req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns( | ||||
Augie Fackler
|
r43346 | req_includes, req_excludes, user_includes, user_excludes | ||
) | ||||
Gregory Szorc
|
r38826 | |||
if invalid_includes: | ||||
raise error.Abort( | ||||
Matt Harbison
|
r44208 | _(b"The following includes are not accessible for %s: %s") | ||
Matt Harbison
|
r44273 | % (username, stringutil.pprint(invalid_includes)) | ||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r38826 | |||
new_args = {} | ||||
new_args.update(kwargs) | ||||
Augie Fackler
|
r43906 | new_args['narrow'] = True | ||
new_args['narrow_acl'] = True | ||||
new_args['includepats'] = req_includes | ||||
Gregory Szorc
|
r38826 | if req_excludes: | ||
Augie Fackler
|
r43906 | new_args['excludepats'] = req_excludes | ||
Gregory Szorc
|
r38843 | |||
Gregory Szorc
|
r38826 | return new_args | ||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r38827 | def _computeellipsis(repo, common, heads, known, match, depth=None): | ||
"""Compute the shape of a narrowed DAG. | ||||
Args: | ||||
repo: The repository we're transferring. | ||||
common: The roots of the DAG range we're transferring. | ||||
May be just [nullid], which means all ancestors of heads. | ||||
heads: The heads of the DAG range we're transferring. | ||||
match: The narrowmatcher that allows us to identify relevant changes. | ||||
depth: If not None, only consider nodes to be full nodes if they are at | ||||
most depth changesets away from one of heads. | ||||
Returns: | ||||
A tuple of (visitnodes, relevant_nodes, ellipsisroots) where: | ||||
visitnodes: The list of nodes (either full or ellipsis) which | ||||
need to be sent to the client. | ||||
relevant_nodes: The set of changelog nodes which change a file inside | ||||
the narrowspec. The client needs these as non-ellipsis nodes. | ||||
ellipsisroots: A dict of {rev: parents} that is used in | ||||
narrowchangegroup to produce ellipsis nodes with the | ||||
correct parents. | ||||
""" | ||||
cl = repo.changelog | ||||
mfl = repo.manifestlog | ||||
Gregory Szorc
|
r39194 | clrev = cl.rev | ||
commonrevs = {clrev(n) for n in common} | {nullrev} | ||||
headsrevs = {clrev(n) for n in heads} | ||||
Gregory Szorc
|
r38827 | if depth: | ||
revdepth = {h: 0 for h in headsrevs} | ||||
ellipsisheads = collections.defaultdict(set) | ||||
ellipsisroots = collections.defaultdict(set) | ||||
def addroot(head, curchange): | ||||
"""Add a root to an ellipsis head, splitting heads with 3 roots.""" | ||||
ellipsisroots[head].add(curchange) | ||||
# Recursively split ellipsis heads with 3 roots by finding the | ||||
# roots' youngest common descendant which is an elided merge commit. | ||||
# That descendant takes 2 of the 3 roots as its own, and becomes a | ||||
# root of the head. | ||||
while len(ellipsisroots[head]) > 2: | ||||
child, roots = splithead(head) | ||||
splitroots(head, child, roots) | ||||
head = child # Recurse in case we just added a 3rd root | ||||
def splitroots(head, child, roots): | ||||
ellipsisroots[head].difference_update(roots) | ||||
ellipsisroots[head].add(child) | ||||
ellipsisroots[child].update(roots) | ||||
ellipsisroots[child].discard(child) | ||||
def splithead(head): | ||||
r1, r2, r3 = sorted(ellipsisroots[head]) | ||||
for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)): | ||||
Augie Fackler
|
r43346 | mid = repo.revs( | ||
Augie Fackler
|
r43347 | b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head | ||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r38827 | for j in mid: | ||
if j == nr2: | ||||
return nr2, (nr1, nr2) | ||||
if j not in ellipsisroots or len(ellipsisroots[j]) < 2: | ||||
return j, (nr1, nr2) | ||||
Augie Fackler
|
r43346 | raise error.Abort( | ||
Augie Fackler
|
r43347 | _( | ||
b'Failed to split up ellipsis node! head: %d, ' | ||||
b'roots: %d %d %d' | ||||
) | ||||
Augie Fackler
|
r43346 | % (head, r1, r2, r3) | ||
) | ||||
Gregory Szorc
|
r38827 | |||
missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs)) | ||||
visit = reversed(missing) | ||||
relevant_nodes = set() | ||||
visitnodes = [cl.node(m) for m in missing] | ||||
required = set(headsrevs) | known | ||||
for rev in visit: | ||||
clrev = cl.changelogrevision(rev) | ||||
Gregory Szorc
|
r39194 | ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev] | ||
Gregory Szorc
|
r38827 | if depth is not None: | ||
curdepth = revdepth[rev] | ||||
for p in ps: | ||||
revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1)) | ||||
needed = False | ||||
shallow_enough = depth is None or revdepth[rev] <= depth | ||||
if shallow_enough: | ||||
curmf = mfl[clrev.manifest].read() | ||||
if ps: | ||||
# We choose to not trust the changed files list in | ||||
# changesets because it's not always correct. TODO: could | ||||
# we trust it for the non-merge case? | ||||
p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read() | ||||
needed = bool(curmf.diff(p1mf, match)) | ||||
if not needed and len(ps) > 1: | ||||
# For merge changes, the list of changed files is not | ||||
# helpful, since we need to emit the merge if a file | ||||
# in the narrow spec has changed on either side of the | ||||
# merge. As a result, we do a manifest diff to check. | ||||
p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read() | ||||
needed = bool(curmf.diff(p2mf, match)) | ||||
else: | ||||
# For a root node, we need to include the node if any | ||||
# files in the node match the narrowspec. | ||||
needed = any(curmf.walk(match)) | ||||
if needed: | ||||
for head in ellipsisheads[rev]: | ||||
addroot(head, rev) | ||||
for p in ps: | ||||
required.add(p) | ||||
relevant_nodes.add(cl.node(rev)) | ||||
else: | ||||
if not ps: | ||||
ps = [nullrev] | ||||
if rev in required: | ||||
for head in ellipsisheads[rev]: | ||||
addroot(head, rev) | ||||
for p in ps: | ||||
ellipsisheads[p].add(rev) | ||||
else: | ||||
for p in ps: | ||||
ellipsisheads[p] |= ellipsisheads[rev] | ||||
# add common changesets as roots of their reachable ellipsis heads | ||||
for c in commonrevs: | ||||
for head in ellipsisheads[c]: | ||||
addroot(head, c) | ||||
return visitnodes, relevant_nodes, ellipsisroots | ||||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r35801 | def caps20to10(repo, role): | ||
Pierre-Yves David
|
r21645 | """return a set with appropriate options to use bundle20 during getbundle""" | ||
Augie Fackler
|
r43347 | caps = {b'HG20'} | ||
Gregory Szorc
|
r35801 | capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role)) | ||
Augie Fackler
|
r43347 | caps.add(b'bundle2=' + urlreq.quote(capsblob)) | ||
Pierre-Yves David
|
r21645 | return caps | ||
Augie Fackler
|
r43346 | |||
Mike Hommey
|
r22542 | # List of names of steps to perform for a bundle2 for getbundle, order matters. | ||
getbundle2partsorder = [] | ||||
# Mapping between step name and function | ||||
# | ||||
# This exists to help extensions wrap steps if necessary | ||||
getbundle2partsmapping = {} | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r24732 | def getbundle2partsgenerator(stepname, idx=None): | ||
Mike Hommey
|
r22542 | """decorator for function generating bundle2 part for getbundle | ||
The function is added to the step -> function mapping and appended to the | ||||
list of steps. Beware that decorated functions will be added in order | ||||
(this may matter). | ||||
You can only use this decorator for new steps, if you want to wrap a step | ||||
from an extension, attack the getbundle2partsmapping dictionary directly.""" | ||||
Augie Fackler
|
r43346 | |||
Mike Hommey
|
r22542 | def dec(func): | ||
assert stepname not in getbundle2partsmapping | ||||
getbundle2partsmapping[stepname] = func | ||||
Pierre-Yves David
|
r24732 | if idx is None: | ||
getbundle2partsorder.append(stepname) | ||||
else: | ||||
getbundle2partsorder.insert(idx, stepname) | ||||
Mike Hommey
|
r22542 | return func | ||
Augie Fackler
|
r43346 | |||
Mike Hommey
|
r22542 | return dec | ||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r27244 | def bundle2requested(bundlecaps): | ||
if bundlecaps is not None: | ||||
Augie Fackler
|
r43347 | return any(cap.startswith(b'HG2') for cap in bundlecaps) | ||
Gregory Szorc
|
r27244 | return False | ||
Augie Fackler
|
r43346 | |||
def getbundlechunks( | ||||
repo, source, heads=None, common=None, bundlecaps=None, **kwargs | ||||
): | ||||
Gregory Szorc
|
r30187 | """Return chunks constituting a bundle's raw data. | ||
Pierre-Yves David
|
r20954 | |||
Pierre-Yves David
|
r24686 | Could be a bundle HG10 or a bundle HG20 depending on bundlecaps | ||
Gregory Szorc
|
r30187 | passed. | ||
Pierre-Yves David
|
r20954 | |||
Gregory Szorc
|
r35803 | Returns a 2-tuple of a dict with metadata about the generated bundle | ||
and an iterator over raw chunks (of varying sizes). | ||||
Pierre-Yves David
|
r20954 | """ | ||
Pulkit Goyal
|
r33016 | kwargs = pycompat.byteskwargs(kwargs) | ||
Gregory Szorc
|
r35803 | info = {} | ||
Gregory Szorc
|
r27244 | usebundle2 = bundle2requested(bundlecaps) | ||
Mike Hommey
|
r22542 | # bundle10 case | ||
Pierre-Yves David
|
r24649 | if not usebundle2: | ||
Augie Fackler
|
r43347 | if bundlecaps and not kwargs.get(b'cg', True): | ||
raise ValueError( | ||||
_(b'request for bundle10 must include changegroup') | ||||
) | ||||
Mike Hommey
|
r22542 | |||
Pierre-Yves David
|
r21656 | if kwargs: | ||
Augie Fackler
|
r43346 | raise ValueError( | ||
Augie Fackler
|
r43347 | _(b'unsupported getbundle arguments: %s') | ||
% b', '.join(sorted(kwargs.keys())) | ||||
Augie Fackler
|
r43346 | ) | ||
Pierre-Yves David
|
r29808 | outgoing = _computeoutgoing(repo, heads, common) | ||
Augie Fackler
|
r43347 | info[b'bundleversion'] = 1 | ||
Augie Fackler
|
r43346 | return ( | ||
info, | ||||
changegroup.makestream( | ||||
Augie Fackler
|
r43347 | repo, outgoing, b'01', source, bundlecaps=bundlecaps | ||
Augie Fackler
|
r43346 | ), | ||
) | ||||
Mike Hommey
|
r22542 | |||
# bundle20 case | ||||
Augie Fackler
|
r43347 | info[b'bundleversion'] = 2 | ||
Pierre-Yves David
|
r21143 | b2caps = {} | ||
for bcaps in bundlecaps: | ||||
Augie Fackler
|
r43347 | if bcaps.startswith(b'bundle2='): | ||
blob = urlreq.unquote(bcaps[len(b'bundle2=') :]) | ||||
Pierre-Yves David
|
r21143 | b2caps.update(bundle2.decodecaps(blob)) | ||
bundler = bundle2.bundle20(repo.ui, b2caps) | ||||
Mike Hommey
|
r22542 | |||
Augie Fackler
|
r43347 | kwargs[b'heads'] = heads | ||
kwargs[b'common'] = common | ||||
Mike Edgar
|
r23218 | |||
Mike Hommey
|
r22542 | for name in getbundle2partsorder: | ||
func = getbundle2partsmapping[name] | ||||
Augie Fackler
|
r43346 | func( | ||
bundler, | ||||
repo, | ||||
source, | ||||
bundlecaps=bundlecaps, | ||||
b2caps=b2caps, | ||||
**pycompat.strkwargs(kwargs) | ||||
) | ||||
Mike Hommey
|
r22542 | |||
Augie Fackler
|
r43347 | info[b'prefercompressed'] = bundler.prefercompressed | ||
Gregory Szorc
|
r35805 | |||
Gregory Szorc
|
r35803 | return info, bundler.getchunks() | ||
Mike Hommey
|
r22542 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @getbundle2partsgenerator(b'stream2') | ||
Boris Feld
|
r37184 | def _getbundlestream2(bundler, repo, *args, **kwargs): | ||
return bundle2.addpartbundlestream2(bundler, repo, **kwargs) | ||||
Boris Feld
|
r35777 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @getbundle2partsgenerator(b'changegroup') | ||
Augie Fackler
|
r43346 | def _getbundlechangegrouppart( | ||
bundler, | ||||
repo, | ||||
source, | ||||
bundlecaps=None, | ||||
b2caps=None, | ||||
heads=None, | ||||
common=None, | ||||
**kwargs | ||||
): | ||||
Mike Hommey
|
r22542 | """add a changegroup part to the requested bundle""" | ||
Matt Harbison
|
r44209 | if not kwargs.get('cg', True) or not b2caps: | ||
Gregory Szorc
|
r38828 | return | ||
Augie Fackler
|
r43347 | version = b'01' | ||
cgversions = b2caps.get(b'changegroup') | ||||
Gregory Szorc
|
r38828 | if cgversions: # 3.1 and 3.2 ship with an empty value | ||
Augie Fackler
|
r43346 | cgversions = [ | ||
v | ||||
for v in cgversions | ||||
if v in changegroup.supportedoutgoingversions(repo) | ||||
] | ||||
Gregory Szorc
|
r38828 | if not cgversions: | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b'no common changegroup version')) | ||
Gregory Szorc
|
r38828 | version = max(cgversions) | ||
Mike Hommey
|
r22542 | |||
Gregory Szorc
|
r38828 | outgoing = _computeoutgoing(repo, heads, common) | ||
if not outgoing.missing: | ||||
return | ||||
Augie Fackler
|
r43906 | if kwargs.get('narrow', False): | ||
include = sorted(filter(bool, kwargs.get('includepats', []))) | ||||
exclude = sorted(filter(bool, kwargs.get('excludepats', []))) | ||||
Martin von Zweigbergk
|
r40380 | matcher = narrowspec.match(repo.root, include=include, exclude=exclude) | ||
Gregory Szorc
|
r38844 | else: | ||
Martin von Zweigbergk
|
r40380 | matcher = None | ||
Gregory Szorc
|
r38844 | |||
Augie Fackler
|
r43346 | cgstream = changegroup.makestream( | ||
repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher | ||||
) | ||||
Gregory Szorc
|
r38828 | |||
Augie Fackler
|
r43347 | part = bundler.newpart(b'changegroup', data=cgstream) | ||
Gregory Szorc
|
r38828 | if cgversions: | ||
Augie Fackler
|
r43347 | part.addparam(b'version', version) | ||
part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False) | ||||
if b'treemanifest' in repo.requirements: | ||||
part.addparam(b'treemanifest', b'1') | ||||
Mike Hommey
|
r22542 | |||
r43401 | if b'exp-sidedata-flag' in repo.requirements: | |||
part.addparam(b'exp-sidedata', b'1') | ||||
Augie Fackler
|
r43346 | if ( | ||
Augie Fackler
|
r43906 | kwargs.get('narrow', False) | ||
and kwargs.get('narrow_acl', False) | ||||
Augie Fackler
|
r43346 | and (include or exclude) | ||
): | ||||
Pulkit Goyal
|
r42393 | # this is mandatory because otherwise ACL clients won't work | ||
Augie Fackler
|
r43347 | narrowspecpart = bundler.newpart(b'Narrow:responsespec') | ||
narrowspecpart.data = b'%s\0%s' % ( | ||||
b'\n'.join(include), | ||||
b'\n'.join(exclude), | ||||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r38844 | |||
Augie Fackler
|
r43347 | @getbundle2partsgenerator(b'bookmarks') | ||
Augie Fackler
|
r43346 | def _getbundlebookmarkpart( | ||
bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs | ||||
): | ||||
Boris Feld
|
r35268 | """add a bookmark part to the requested bundle""" | ||
Augie Fackler
|
r43906 | if not kwargs.get('bookmarks', False): | ||
Boris Feld
|
r35268 | return | ||
Matt Harbison
|
r44209 | if not b2caps or b'bookmarks' not in b2caps: | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b'no common bookmarks exchange method')) | ||
Augie Fackler
|
r43346 | books = bookmod.listbinbookmarks(repo) | ||
Boris Feld
|
r35268 | data = bookmod.binaryencode(books) | ||
if data: | ||||
Augie Fackler
|
r43347 | bundler.newpart(b'bookmarks', data=data) | ||
@getbundle2partsgenerator(b'listkeys') | ||||
Augie Fackler
|
r43346 | def _getbundlelistkeysparts( | ||
bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs | ||||
): | ||||
Mike Hommey
|
r22542 | """add parts containing listkeys namespaces to the requested bundle""" | ||
Augie Fackler
|
r43906 | listkeys = kwargs.get('listkeys', ()) | ||
Pierre-Yves David
|
r21657 | for namespace in listkeys: | ||
Augie Fackler
|
r43347 | part = bundler.newpart(b'listkeys') | ||
part.addparam(b'namespace', namespace) | ||||
Pierre-Yves David
|
r21657 | keys = repo.listkeys(namespace).items() | ||
part.data = pushkey.encodekeys(keys) | ||||
Pierre-Yves David
|
r20967 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @getbundle2partsgenerator(b'obsmarkers') | ||
Augie Fackler
|
r43346 | def _getbundleobsmarkerpart( | ||
bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs | ||||
): | ||||
Mike Hommey
|
r22541 | """add an obsolescence markers part to the requested bundle""" | ||
Augie Fackler
|
r43906 | if kwargs.get('obsmarkers', False): | ||
Pierre-Yves David
|
r22353 | if heads is None: | ||
heads = repo.heads() | ||||
Augie Fackler
|
r43347 | subset = [c.node() for c in repo.set(b'::%ln', heads)] | ||
Pierre-Yves David
|
r22353 | markers = repo.obsstore.relevantmarkers(subset) | ||
Denis Laxalde
|
r43858 | markers = obsutil.sortedmarkers(markers) | ||
r32515 | bundle2.buildobsmarkerspart(bundler, markers) | |||
Pierre-Yves David
|
r22353 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @getbundle2partsgenerator(b'phases') | ||
Augie Fackler
|
r43346 | def _getbundlephasespart( | ||
bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs | ||||
): | ||||
Boris Feld
|
r34323 | """add phase heads part to the requested bundle""" | ||
Augie Fackler
|
r43906 | if kwargs.get('phases', False): | ||
Martin von Zweigbergk
|
r44234 | if not b2caps or b'heads' not in b2caps.get(b'phases'): | ||
Augie Fackler
|
r43347 | raise error.Abort(_(b'no common phases exchange method')) | ||
Boris Feld
|
r34323 | if heads is None: | ||
heads = repo.heads() | ||||
headsbyphase = collections.defaultdict(set) | ||||
if repo.publishing(): | ||||
headsbyphase[phases.public] = heads | ||||
else: | ||||
# find the appropriate heads to move | ||||
phase = repo._phasecache.phase | ||||
node = repo.changelog.node | ||||
rev = repo.changelog.rev | ||||
for h in heads: | ||||
headsbyphase[phase(repo, rev(h))].add(h) | ||||
seenphases = list(headsbyphase.keys()) | ||||
# We do not handle anything but public and draft phase for now) | ||||
if seenphases: | ||||
assert max(seenphases) <= phases.draft | ||||
# if client is pulling non-public changesets, we need to find | ||||
# intermediate public heads. | ||||
draftheads = headsbyphase.get(phases.draft, set()) | ||||
if draftheads: | ||||
publicheads = headsbyphase.get(phases.public, set()) | ||||
Augie Fackler
|
r43347 | revset = b'heads(only(%ln, %ln) and public())' | ||
Boris Feld
|
r34323 | extraheads = repo.revs(revset, draftheads, publicheads) | ||
for r in extraheads: | ||||
headsbyphase[phases.public].add(node(r)) | ||||
# transform data in a format used by the encoding function | ||||
phasemapping = [] | ||||
for phase in phases.allphases: | ||||
phasemapping.append(sorted(headsbyphase[phase])) | ||||
# generate the actual part | ||||
phasedata = phases.binaryencode(phasemapping) | ||||
Augie Fackler
|
r43347 | bundler.newpart(b'phase-heads', data=phasedata) | ||
@getbundle2partsgenerator(b'hgtagsfnodes') | ||||
Augie Fackler
|
r43346 | def _getbundletagsfnodes( | ||
bundler, | ||||
repo, | ||||
source, | ||||
bundlecaps=None, | ||||
b2caps=None, | ||||
heads=None, | ||||
common=None, | ||||
**kwargs | ||||
): | ||||
Gregory Szorc
|
r25402 | """Transfer the .hgtags filenodes mapping. | ||
Only values for heads in this bundle will be transferred. | ||||
The part data consists of pairs of 20 byte changeset node and .hgtags | ||||
filenodes raw values. | ||||
""" | ||||
# Don't send unless: | ||||
# - changeset are being exchanged, | ||||
# - the client supports it. | ||||
Matt Harbison
|
r44209 | if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps): | ||
Gregory Szorc
|
r25402 | return | ||
Pierre-Yves David
|
r29808 | outgoing = _computeoutgoing(repo, heads, common) | ||
r32217 | bundle2.addparttagsfnodescache(repo, bundler, outgoing) | |||
Gregory Szorc
|
r25402 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r43347 | @getbundle2partsgenerator(b'cache:rev-branch-cache') | ||
Augie Fackler
|
r43346 | def _getbundlerevbranchcache( | ||
bundler, | ||||
repo, | ||||
source, | ||||
bundlecaps=None, | ||||
b2caps=None, | ||||
heads=None, | ||||
common=None, | ||||
**kwargs | ||||
): | ||||
Boris Feld
|
r36984 | """Transfer the rev-branch-cache mapping | ||
The payload is a series of data related to each branch | ||||
1) branch name length | ||||
2) number of open heads | ||||
3) number of closed heads | ||||
4) open heads nodes | ||||
5) closed heads nodes | ||||
""" | ||||
# Don't send unless: | ||||
# - changeset are being exchanged, | ||||
# - the client supports it. | ||||
Gregory Szorc
|
r38825 | # - narrow bundle isn't in play (not currently compatible). | ||
Augie Fackler
|
r43346 | if ( | ||
Augie Fackler
|
r43906 | not kwargs.get('cg', True) | ||
Matt Harbison
|
r44209 | or not b2caps | ||
Augie Fackler
|
r43347 | or b'rev-branch-cache' not in b2caps | ||
Augie Fackler
|
r43906 | or kwargs.get('narrow', False) | ||
Augie Fackler
|
r43346 | or repo.ui.has_section(_NARROWACL_SECTION) | ||
): | ||||
Boris Feld
|
r36984 | return | ||
Gregory Szorc
|
r38825 | |||
Boris Feld
|
r36984 | outgoing = _computeoutgoing(repo, heads, common) | ||
bundle2.addpartrevbranchcache(repo, bundler, outgoing) | ||||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r20967 | def check_heads(repo, their_heads, context): | ||
"""check if the heads of a repo have been modified | ||||
Used by peer for unbundling. | ||||
""" | ||||
heads = repo.heads() | ||||
Augie Fackler
|
r44517 | heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest() | ||
Augie Fackler
|
r43346 | if not ( | ||
Augie Fackler
|
r43347 | their_heads == [b'force'] | ||
Augie Fackler
|
r43346 | or their_heads == heads | ||
Augie Fackler
|
r43347 | or their_heads == [b'hashed', heads_hash] | ||
Augie Fackler
|
r43346 | ): | ||
Pierre-Yves David
|
r20967 | # someone else committed/pushed/unbundled while we | ||
# were transferring data | ||||
Augie Fackler
|
r43346 | raise error.PushRaced( | ||
Martin von Zweigbergk
|
r43387 | b'repository changed while %s - please try again' % context | ||
Augie Fackler
|
r43346 | ) | ||
Pierre-Yves David
|
r20968 | |||
def unbundle(repo, cg, heads, source, url): | ||||
"""Apply a bundle to a repo. | ||||
this function makes sure the repo is locked during the application and have | ||||
Mads Kiilerich
|
r21024 | mechanism to check that no push race occurred between the creation of the | ||
Pierre-Yves David
|
r20968 | bundle and its application. | ||
If the push was raced as PushRaced exception is raised.""" | ||||
r = 0 | ||||
Pierre-Yves David
|
r21061 | # need a transaction when processing a bundle2 stream | ||
Durham Goode
|
r26566 | # [wlock, lock, tr] - needs to be an array so nested functions can modify it | ||
lockandtr = [None, None, None] | ||||
Pierre-Yves David
|
r24847 | recordout = None | ||
Pierre-Yves David
|
r24878 | # quick fix for output mismatch with bundle2 in 3.4 | ||
Augie Fackler
|
r43347 | captureoutput = repo.ui.configbool( | ||
b'experimental', b'bundle2-output-capture' | ||||
) | ||||
if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'): | ||||
Pierre-Yves David
|
r24878 | captureoutput = True | ||
Pierre-Yves David
|
r20968 | try: | ||
Pierre-Yves David
|
r30868 | # note: outside bundle1, 'heads' is expected to be empty and this | ||
# 'check_heads' call wil be a no-op | ||||
Augie Fackler
|
r43347 | check_heads(repo, heads, b'uploading changes') | ||
Pierre-Yves David
|
r20968 | # push can proceed | ||
Martin von Zweigbergk
|
r32891 | if not isinstance(cg, bundle2.unbundle20): | ||
Pierre-Yves David
|
r30870 | # legacy case: bundle1 (changegroup 01) | ||
Augie Fackler
|
r43347 | txnname = b"\n".join([source, util.hidepassword(url)]) | ||
Martin von Zweigbergk
|
r32930 | with repo.lock(), repo.transaction(txnname) as tr: | ||
Martin von Zweigbergk
|
r33043 | op = bundle2.applybundle(repo, cg, tr, source, url) | ||
Martin von Zweigbergk
|
r33040 | r = bundle2.combinechangegroupresults(op) | ||
Pierre-Yves David
|
r30870 | else: | ||
Pierre-Yves David
|
r24795 | r = None | ||
Pierre-Yves David
|
r21187 | try: | ||
Augie Fackler
|
r43346 | |||
Durham Goode
|
r26566 | def gettransaction(): | ||
if not lockandtr[2]: | ||||
Martin von Zweigbergk
|
r42513 | if not bookmod.bookmarksinstore(repo): | ||
lockandtr[0] = repo.wlock() | ||||
Durham Goode
|
r26566 | lockandtr[1] = repo.lock() | ||
lockandtr[2] = repo.transaction(source) | ||||
Augie Fackler
|
r43347 | lockandtr[2].hookargs[b'source'] = source | ||
lockandtr[2].hookargs[b'url'] = url | ||||
lockandtr[2].hookargs[b'bundle2'] = b'1' | ||||
Durham Goode
|
r26566 | return lockandtr[2] | ||
# Do greedy locking by default until we're satisfied with lazy | ||||
# locking. | ||||
Augie Fackler
|
r43347 | if not repo.ui.configbool( | ||
b'experimental', b'bundle2lazylocking' | ||||
): | ||||
Durham Goode
|
r26566 | gettransaction() | ||
Augie Fackler
|
r43346 | op = bundle2.bundleoperation( | ||
repo, | ||||
gettransaction, | ||||
captureoutput=captureoutput, | ||||
Augie Fackler
|
r43347 | source=b'push', | ||
Augie Fackler
|
r43346 | ) | ||
Pierre-Yves David
|
r24851 | try: | ||
Martin von Zweigbergk
|
r25896 | op = bundle2.processbundle(repo, cg, op=op) | ||
Pierre-Yves David
|
r24851 | finally: | ||
r = op.reply | ||||
Pierre-Yves David
|
r24878 | if captureoutput and r is not None: | ||
Pierre-Yves David
|
r24851 | repo.ui.pushbuffer(error=True, subproc=True) | ||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r24851 | def recordout(output): | ||
Augie Fackler
|
r43347 | r.newpart(b'output', data=output, mandatory=False) | ||
Augie Fackler
|
r43346 | |||
Durham Goode
|
r26566 | if lockandtr[2] is not None: | ||
lockandtr[2].close() | ||||
Gregory Szorc
|
r25660 | except BaseException as exc: | ||
Pierre-Yves David
|
r21187 | exc.duringunbundle2 = True | ||
Pierre-Yves David
|
r24878 | if captureoutput and r is not None: | ||
Pierre-Yves David
|
r24847 | parts = exc._bundle2salvagedoutput = r.salvageoutput() | ||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r24847 | def recordout(output): | ||
Augie Fackler
|
r43346 | part = bundle2.bundlepart( | ||
Augie Fackler
|
r43347 | b'output', data=output, mandatory=False | ||
Augie Fackler
|
r43346 | ) | ||
Pierre-Yves David
|
r24847 | parts.append(part) | ||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r21187 | raise | ||
Pierre-Yves David
|
r20968 | finally: | ||
Durham Goode
|
r26566 | lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0]) | ||
Pierre-Yves David
|
r24847 | if recordout is not None: | ||
recordout(repo.ui.popbuffer()) | ||||
Pierre-Yves David
|
r20968 | return r | ||
Gregory Szorc
|
r26623 | |||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r26623 | def _maybeapplyclonebundle(pullop): | ||
"""Apply a clone bundle from a remote, if possible.""" | ||||
repo = pullop.repo | ||||
remote = pullop.remote | ||||
Augie Fackler
|
r43347 | if not repo.ui.configbool(b'ui', b'clonebundles'): | ||
Gregory Szorc
|
r26623 | return | ||
Gregory Szorc
|
r26855 | # Only run if local repo is empty. | ||
if len(repo): | ||||
return | ||||
Gregory Szorc
|
r26623 | if pullop.heads: | ||
return | ||||
Augie Fackler
|
r43347 | if not remote.capable(b'clonebundles'): | ||
Gregory Szorc
|
r26623 | return | ||
Gregory Szorc
|
r37667 | with remote.commandexecutor() as e: | ||
Augie Fackler
|
r43347 | res = e.callcommand(b'clonebundles', {}).result() | ||
Gregory Szorc
|
r26689 | |||
# If we call the wire protocol command, that's good enough to record the | ||||
# attempt. | ||||
pullop.clonebundleattempted = True | ||||
Gregory Szorc
|
r26647 | entries = parseclonebundlesmanifest(repo, res) | ||
Gregory Szorc
|
r26623 | if not entries: | ||
Augie Fackler
|
r43346 | repo.ui.note( | ||
_( | ||||
Augie Fackler
|
r43347 | b'no clone bundles available on remote; ' | ||
b'falling back to regular clone\n' | ||||
Augie Fackler
|
r43346 | ) | ||
) | ||||
Gregory Szorc
|
r26623 | return | ||
Gregory Szorc
|
r34360 | entries = filterclonebundleentries( | ||
Augie Fackler
|
r43346 | repo, entries, streamclonerequested=pullop.streamclonerequested | ||
) | ||||
Gregory Szorc
|
r34360 | |||
Gregory Szorc
|
r26644 | if not entries: | ||
# There is a thundering herd concern here. However, if a server | ||||
# operator doesn't advertise bundles appropriate for its clients, | ||||
# they deserve what's coming. Furthermore, from a client's | ||||
# perspective, no automatic fallback would mean not being able to | ||||
# clone! | ||||
Augie Fackler
|
r43346 | repo.ui.warn( | ||
_( | ||||
Augie Fackler
|
r43347 | b'no compatible clone bundles available on server; ' | ||
b'falling back to regular clone\n' | ||||
Augie Fackler
|
r43346 | ) | ||
) | ||||
repo.ui.warn( | ||||
Martin von Zweigbergk
|
r43387 | _(b'(you may want to report this to the server operator)\n') | ||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r26644 | return | ||
Gregory Szorc
|
r26648 | entries = sortclonebundleentries(repo.ui, entries) | ||
Gregory Szorc
|
r26644 | |||
Augie Fackler
|
r43347 | url = entries[0][b'URL'] | ||
repo.ui.status(_(b'applying clone bundle from %s\n') % url) | ||||
Gregory Szorc
|
r26623 | if trypullbundlefromurl(repo.ui, repo, url): | ||
Augie Fackler
|
r43347 | repo.ui.status(_(b'finished applying clone bundle\n')) | ||
Gregory Szorc
|
r26623 | # Bundle failed. | ||
# | ||||
# We abort by default to avoid the thundering herd of | ||||
# clients flooding a server that was expecting expensive | ||||
# clone load to be offloaded. | ||||
Augie Fackler
|
r43347 | elif repo.ui.configbool(b'ui', b'clonebundlefallback'): | ||
repo.ui.warn(_(b'falling back to normal clone\n')) | ||||
Gregory Szorc
|
r26623 | else: | ||
Augie Fackler
|
r43346 | raise error.Abort( | ||
Augie Fackler
|
r43347 | _(b'error applying bundle'), | ||
Augie Fackler
|
r43346 | hint=_( | ||
Augie Fackler
|
r43347 | b'if this error persists, consider contacting ' | ||
b'the server operator or disable clone ' | ||||
b'bundles via ' | ||||
b'"--config ui.clonebundles=false"' | ||||
Augie Fackler
|
r43346 | ), | ||
) | ||||
Gregory Szorc
|
r26623 | |||
Gregory Szorc
|
r26647 | def parseclonebundlesmanifest(repo, s): | ||
Gregory Szorc
|
r26623 | """Parses the raw text of a clone bundles manifest. | ||
Returns a list of dicts. The dicts have a ``URL`` key corresponding | ||||
to the URL and other keys are the attributes for the entry. | ||||
""" | ||||
m = [] | ||||
for line in s.splitlines(): | ||||
fields = line.split() | ||||
if not fields: | ||||
continue | ||||
Augie Fackler
|
r43347 | attrs = {b'URL': fields[0]} | ||
Gregory Szorc
|
r26623 | for rawattr in fields[1:]: | ||
Augie Fackler
|
r43347 | key, value = rawattr.split(b'=', 1) | ||
timeless
|
r28883 | key = urlreq.unquote(key) | ||
value = urlreq.unquote(value) | ||||
Gregory Szorc
|
r26647 | attrs[key] = value | ||
# Parse BUNDLESPEC into components. This makes client-side | ||||
# preferences easier to specify since you can prefer a single | ||||
# component of the BUNDLESPEC. | ||||
Augie Fackler
|
r43347 | if key == b'BUNDLESPEC': | ||
Gregory Szorc
|
r26647 | try: | ||
Joerg Sonnenberger
|
r37786 | bundlespec = parsebundlespec(repo, value) | ||
Augie Fackler
|
r43347 | attrs[b'COMPRESSION'] = bundlespec.compression | ||
attrs[b'VERSION'] = bundlespec.version | ||||
Gregory Szorc
|
r26647 | except error.InvalidBundleSpecification: | ||
pass | ||||
except error.UnsupportedBundleSpecification: | ||||
pass | ||||
Gregory Szorc
|
r26623 | |||
m.append(attrs) | ||||
return m | ||||
Augie Fackler
|
r43346 | |||
Boris Feld
|
r37187 | def isstreamclonespec(bundlespec): | ||
# Stream clone v1 | ||||
Augie Fackler
|
r43347 | if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1': | ||
Boris Feld
|
r37187 | return True | ||
# Stream clone v2 | ||||
Augie Fackler
|
r43346 | if ( | ||
Augie Fackler
|
r43347 | bundlespec.wirecompression == b'UN' | ||
and bundlespec.wireversion == b'02' | ||||
and bundlespec.contentopts.get(b'streamv2') | ||||
Augie Fackler
|
r43346 | ): | ||
Boris Feld
|
r37187 | return True | ||
return False | ||||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r34360 | def filterclonebundleentries(repo, entries, streamclonerequested=False): | ||
Gregory Szorc
|
r26687 | """Remove incompatible clone bundle manifest entries. | ||
Accepts a list of entries parsed with ``parseclonebundlesmanifest`` | ||||
and returns a new list consisting of only the entries that this client | ||||
should be able to apply. | ||||
There is no guarantee we'll be able to apply all returned entries because | ||||
the metadata we use to filter on may be missing or wrong. | ||||
""" | ||||
Gregory Szorc
|
r26644 | newentries = [] | ||
for entry in entries: | ||||
Augie Fackler
|
r43347 | spec = entry.get(b'BUNDLESPEC') | ||
Gregory Szorc
|
r26644 | if spec: | ||
try: | ||||
Boris Feld
|
r37181 | bundlespec = parsebundlespec(repo, spec, strict=True) | ||
Gregory Szorc
|
r34360 | |||
# If a stream clone was requested, filter out non-streamclone | ||||
# entries. | ||||
Boris Feld
|
r37187 | if streamclonerequested and not isstreamclonespec(bundlespec): | ||
Augie Fackler
|
r43346 | repo.ui.debug( | ||
Augie Fackler
|
r43347 | b'filtering %s because not a stream clone\n' | ||
% entry[b'URL'] | ||||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r34360 | continue | ||
Gregory Szorc
|
r26644 | except error.InvalidBundleSpecification as e: | ||
Augie Fackler
|
r43347 | repo.ui.debug(stringutil.forcebytestr(e) + b'\n') | ||
Gregory Szorc
|
r26644 | continue | ||
except error.UnsupportedBundleSpecification as e: | ||||
Augie Fackler
|
r43346 | repo.ui.debug( | ||
Augie Fackler
|
r43347 | b'filtering %s because unsupported bundle ' | ||
b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e)) | ||||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r26644 | continue | ||
Gregory Szorc
|
r34360 | # If we don't have a spec and requested a stream clone, we don't know | ||
# what the entry is so don't attempt to apply it. | ||||
elif streamclonerequested: | ||||
Augie Fackler
|
r43346 | repo.ui.debug( | ||
Augie Fackler
|
r43347 | b'filtering %s because cannot determine if a stream ' | ||
b'clone bundle\n' % entry[b'URL'] | ||||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r34360 | continue | ||
Gregory Szorc
|
r26644 | |||
Augie Fackler
|
r43347 | if b'REQUIRESNI' in entry and not sslutil.hassni: | ||
Augie Fackler
|
r43346 | repo.ui.debug( | ||
Augie Fackler
|
r43347 | b'filtering %s because SNI not supported\n' % entry[b'URL'] | ||
Augie Fackler
|
r43346 | ) | ||
Gregory Szorc
|
r26645 | continue | ||
Gregory Szorc
|
r26644 | newentries.append(entry) | ||
return newentries | ||||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r30685 | class clonebundleentry(object): | ||
"""Represents an item in a clone bundles manifest. | ||||
This rich class is needed to support sorting since sorted() in Python 3 | ||||
doesn't support ``cmp`` and our comparison is complex enough that ``key=`` | ||||
won't work. | ||||
""" | ||||
Gregory Szorc
|
r26648 | |||
Gregory Szorc
|
r30685 | def __init__(self, value, prefers): | ||
self.value = value | ||||
self.prefers = prefers | ||||
Gregory Szorc
|
r26648 | |||
Gregory Szorc
|
r30685 | def _cmp(self, other): | ||
for prefkey, prefvalue in self.prefers: | ||||
avalue = self.value.get(prefkey) | ||||
bvalue = other.value.get(prefkey) | ||||
Gregory Szorc
|
r26648 | |||
# Special case for b missing attribute and a matches exactly. | ||||
if avalue is not None and bvalue is None and avalue == prefvalue: | ||||
return -1 | ||||
# Special case for a missing attribute and b matches exactly. | ||||
if bvalue is not None and avalue is None and bvalue == prefvalue: | ||||
return 1 | ||||
# We can't compare unless attribute present on both. | ||||
if avalue is None or bvalue is None: | ||||
continue | ||||
# Same values should fall back to next attribute. | ||||
if avalue == bvalue: | ||||
continue | ||||
# Exact matches come first. | ||||
if avalue == prefvalue: | ||||
return -1 | ||||
if bvalue == prefvalue: | ||||
return 1 | ||||
# Fall back to next attribute. | ||||
continue | ||||
# If we got here we couldn't sort by attributes and prefers. Fall | ||||
# back to index order. | ||||
return 0 | ||||
Gregory Szorc
|
r30685 | def __lt__(self, other): | ||
return self._cmp(other) < 0 | ||||
def __gt__(self, other): | ||||
return self._cmp(other) > 0 | ||||
def __eq__(self, other): | ||||
return self._cmp(other) == 0 | ||||
def __le__(self, other): | ||||
return self._cmp(other) <= 0 | ||||
def __ge__(self, other): | ||||
return self._cmp(other) >= 0 | ||||
def __ne__(self, other): | ||||
return self._cmp(other) != 0 | ||||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r30685 | def sortclonebundleentries(ui, entries): | ||
Augie Fackler
|
r43347 | prefers = ui.configlist(b'ui', b'clonebundleprefers') | ||
Gregory Szorc
|
r30685 | if not prefers: | ||
return list(entries) | ||||
Matt Harbison
|
r44763 | def _split(p): | ||
if b'=' not in p: | ||||
hint = _(b"each comma separated item should be key=value pairs") | ||||
raise error.Abort( | ||||
_(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint | ||||
) | ||||
return p.split(b'=', 1) | ||||
prefers = [_split(p) for p in prefers] | ||||
Gregory Szorc
|
r30685 | |||
items = sorted(clonebundleentry(v, prefers) for v in entries) | ||||
return [i.value for i in items] | ||||
Gregory Szorc
|
r26648 | |||
Augie Fackler
|
r43346 | |||
Gregory Szorc
|
r26623 | def trypullbundlefromurl(ui, repo, url): | ||
"""Attempt to apply a bundle from a URL.""" | ||||
Augie Fackler
|
r43347 | with repo.lock(), repo.transaction(b'bundleurl') as tr: | ||
Gregory Szorc
|
r26623 | try: | ||
Martin von Zweigbergk
|
r32843 | fh = urlmod.open(ui, url) | ||
Augie Fackler
|
r43347 | cg = readbundle(ui, fh, b'stream') | ||
Gregory Szorc
|
r26643 | |||
Martin von Zweigbergk
|
r33043 | if isinstance(cg, streamclone.streamcloneapplier): | ||
Martin von Zweigbergk
|
r32843 | cg.apply(repo) | ||
else: | ||||
Augie Fackler
|
r43347 | bundle2.applybundle(repo, cg, tr, b'clonebundles', url) | ||
Martin von Zweigbergk
|
r32843 | return True | ||
except urlerr.httperror as e: | ||||
Augie Fackler
|
r43346 | ui.warn( | ||
Augie Fackler
|
r43347 | _(b'HTTP error fetching bundle: %s\n') | ||
Augie Fackler
|
r43346 | % stringutil.forcebytestr(e) | ||
) | ||||
Martin von Zweigbergk
|
r32843 | except urlerr.urlerror as e: | ||
Augie Fackler
|
r43346 | ui.warn( | ||
Augie Fackler
|
r43347 | _(b'error fetching bundle: %s\n') | ||
Augie Fackler
|
r43346 | % stringutil.forcebytestr(e.reason) | ||
) | ||||
Gregory Szorc
|
r26623 | |||
Martin von Zweigbergk
|
r32843 | return False | ||