##// END OF EJS Templates
narrow: drop support for remote expansion (BC)...
narrow: drop support for remote expansion (BC) Previous patches to validate narrow patterns accidentically dropped support for the include: syntax that allows patterns to be expanded from a remote. This feature was never implemented in core and is only implemented on Google's custom server. Per @martinvonz's review comment in D4522, it is OK to drop this feature since it isn't used. The concept of this feature does seem useful. I anticipate it making a comeback some day in some shape or form. But for now, let's jettison the dead code. Differential Revision: https://phab.mercurial-scm.org/D4530

File last commit:

r37471:2735d08e default
r39581:10a8472f default
Show More
bundleparts.py
115 lines | 3.5 KiB | text/x-python | PythonLexer
Pulkit Goyal
infinitepush: move the extension to core from fb-hgext...
r37204 # Copyright 2017 Facebook, Inc.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
from mercurial.i18n import _
from mercurial import (
bundle2,
changegroup,
error,
extensions,
revsetlang,
util,
)
from . import common
isremotebooksenabled = common.isremotebooksenabled
scratchbranchparttype = 'b2x:infinitepush'
Pulkit Goyal
infinitepush: delete the non-forward-move flag for hg push...
r37220 def getscratchbranchparts(repo, peer, outgoing, ui, bookmark):
Pulkit Goyal
infinitepush: move the extension to core from fb-hgext...
r37204 if not outgoing.missing:
raise error.Abort(_('no commits to push'))
if scratchbranchparttype not in bundle2.bundle2caps(peer):
raise error.Abort(_('no server support for %r') % scratchbranchparttype)
_validaterevset(repo, revsetlang.formatspec('%ln', outgoing.missing),
bookmark)
supportedversions = changegroup.supportedoutgoingversions(repo)
# Explicitly avoid using '01' changegroup version in infinitepush to
# support general delta
supportedversions.discard('01')
cgversion = min(supportedversions)
_handlelfs(repo, outgoing.missing)
cg = changegroup.makestream(repo, outgoing, cgversion, 'push')
params = {}
params['cgversion'] = cgversion
if bookmark:
params['bookmark'] = bookmark
# 'prevbooknode' is necessary for pushkey reply part
params['bookprevnode'] = ''
Martin von Zweigbergk
infinitepush: look up bookmarks only among bookmarks...
r37471 bookmarks = repo._bookmarks
if bookmark in bookmarks:
params['bookprevnode'] = bookmarks.changectx(bookmark).hex()
Pulkit Goyal
infinitepush: move the extension to core from fb-hgext...
r37204
# Do not send pushback bundle2 part with bookmarks if remotenames extension
# is enabled. It will be handled manually in `_push()`
if not isremotebooksenabled(ui):
params['pushbackbookmarks'] = '1'
parts = []
# .upper() marks this as a mandatory part: server will abort if there's no
# handler
parts.append(bundle2.bundlepart(
scratchbranchparttype.upper(),
advisoryparams=params.iteritems(),
data=cg))
return parts
def _validaterevset(repo, revset, bookmark):
"""Abort if the revs to be pushed aren't valid for a scratch branch."""
if not repo.revs(revset):
raise error.Abort(_('nothing to push'))
if bookmark:
# Allow bundle with many heads only if no bookmark is specified
heads = repo.revs('heads(%r)', revset)
if len(heads) > 1:
raise error.Abort(
_('cannot push more than one head to a scratch branch'))
def _handlelfs(repo, missing):
'''Special case if lfs is enabled
If lfs is enabled then we need to call prepush hook
to make sure large files are uploaded to lfs
'''
try:
lfsmod = extensions.find('lfs')
lfsmod.wrapper.uploadblobsfromrevs(repo, missing)
except KeyError:
# Ignore if lfs extension is not enabled
return
class copiedpart(object):
"""a copy of unbundlepart content that can be consumed later"""
def __init__(self, part):
# copy "public properties"
self.type = part.type
self.id = part.id
self.mandatory = part.mandatory
self.mandatoryparams = part.mandatoryparams
self.advisoryparams = part.advisoryparams
self.params = part.params
self.mandatorykeys = part.mandatorykeys
# copy the buffer
self._io = util.stringio(part.read())
def consume(self):
return
def read(self, size=None):
if size is None:
return self._io.read()
else:
return self._io.read(size)