hg.py
1164 lines
| 42.6 KiB
| text/x-python
|
PythonLexer
r1043 | # RhodeCode VCSServer provides access to different vcs backends via network. | |||
r1126 | # Copyright (C) 2014-2023 RhodeCode GmbH | |||
r1043 | # | |||
# This program is free software; you can redistribute it and/or modify | ||||
# it under the terms of the GNU General Public License as published by | ||||
# the Free Software Foundation; either version 3 of the License, or | ||||
# (at your option) any later version. | ||||
# | ||||
# This program is distributed in the hope that it will be useful, | ||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||||
# GNU General Public License for more details. | ||||
# | ||||
# You should have received a copy of the GNU General Public License | ||||
# along with this program; if not, write to the Free Software Foundation, | ||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||||
r1085 | import binascii | |||
r1043 | import io | |||
import logging | ||||
import stat | ||||
r1076 | import urllib.request | |||
import urllib.parse | ||||
r1043 | import traceback | |||
r1076 | import hashlib | |||
r1043 | ||||
from hgext import largefiles, rebase, purge | ||||
from mercurial import commands | ||||
from mercurial import unionrepo | ||||
from mercurial import verify | ||||
from mercurial import repair | ||||
r1141 | from mercurial.error import AmbiguousPrefixLookupError | |||
r1043 | ||||
import vcsserver | ||||
from vcsserver import exceptions | ||||
r1124 | from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, store_archive_in_cache, ArchiveNode, BytesEnvelope, \ | |||
BinaryEnvelope | ||||
r1043 | from vcsserver.hgcompat import ( | |||
archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx, | ||||
hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler, | ||||
makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge, | ||||
patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError, | ||||
RepoLookupError, InterventionRequired, RequirementError, | ||||
alwaysmatcher, patternmatcher, hgutil, hgext_strip) | ||||
r1060 | from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes | |||
r1043 | from vcsserver.vcs_base import RemoteBase | |||
r1124 | from vcsserver.config import hooks as hooks_config | |||
r1043 | ||||
log = logging.getLogger(__name__) | ||||
def make_ui_from_config(repo_config): | ||||
class LoggingUI(ui.ui): | ||||
r1048 | ||||
r1043 | def status(self, *msg, **opts): | |||
r1048 | str_msg = map(safe_str, msg) | |||
log.info(' '.join(str_msg).rstrip('\n')) | ||||
#super(LoggingUI, self).status(*msg, **opts) | ||||
r1043 | ||||
def warn(self, *msg, **opts): | ||||
r1048 | str_msg = map(safe_str, msg) | |||
log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n')) | ||||
#super(LoggingUI, self).warn(*msg, **opts) | ||||
r1043 | ||||
def error(self, *msg, **opts): | ||||
r1048 | str_msg = map(safe_str, msg) | |||
log.error('ui_logger:'+' '.join(str_msg).rstrip('\n')) | ||||
#super(LoggingUI, self).error(*msg, **opts) | ||||
r1043 | ||||
def note(self, *msg, **opts): | ||||
r1048 | str_msg = map(safe_str, msg) | |||
log.info('ui_logger:'+' '.join(str_msg).rstrip('\n')) | ||||
#super(LoggingUI, self).note(*msg, **opts) | ||||
r1043 | ||||
def debug(self, *msg, **opts): | ||||
r1048 | str_msg = map(safe_str, msg) | |||
log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n')) | ||||
#super(LoggingUI, self).debug(*msg, **opts) | ||||
r1043 | ||||
baseui = LoggingUI() | ||||
# clean the baseui object | ||||
baseui._ocfg = hgconfig.config() | ||||
baseui._ucfg = hgconfig.config() | ||||
baseui._tcfg = hgconfig.config() | ||||
for section, option, value in repo_config: | ||||
r1048 | baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value)) | |||
r1043 | ||||
# make our hgweb quiet so it doesn't print output | ||||
r1048 | baseui.setconfig(b'ui', b'quiet', b'true') | |||
r1043 | ||||
r1048 | baseui.setconfig(b'ui', b'paginate', b'never') | |||
r1043 | # for better Error reporting of Mercurial | |||
r1048 | baseui.setconfig(b'ui', b'message-output', b'stderr') | |||
r1043 | ||||
# force mercurial to only use 1 thread, otherwise it may try to set a | ||||
# signal in a non-main thread, thus generating a ValueError. | ||||
r1048 | baseui.setconfig(b'worker', b'numcpus', 1) | |||
r1043 | ||||
# If there is no config for the largefiles extension, we explicitly disable | ||||
# it here. This overrides settings from repositories hgrc file. Recent | ||||
# mercurial versions enable largefiles in hgrc on clone from largefile | ||||
# repo. | ||||
r1048 | if not baseui.hasconfig(b'extensions', b'largefiles'): | |||
r1043 | log.debug('Explicitly disable largefiles extension for repo.') | |||
r1048 | baseui.setconfig(b'extensions', b'largefiles', b'!') | |||
r1043 | ||||
return baseui | ||||
def reraise_safe_exceptions(func): | ||||
"""Decorator for converting mercurial exceptions to something neutral.""" | ||||
def wrapper(*args, **kwargs): | ||||
try: | ||||
return func(*args, **kwargs) | ||||
except (Abort, InterventionRequired) as e: | ||||
r1048 | raise_from_original(exceptions.AbortException(e), e) | |||
r1043 | except RepoLookupError as e: | |||
r1048 | raise_from_original(exceptions.LookupException(e), e) | |||
r1043 | except RequirementError as e: | |||
r1048 | raise_from_original(exceptions.RequirementException(e), e) | |||
r1043 | except RepoError as e: | |||
r1048 | raise_from_original(exceptions.VcsException(e), e) | |||
r1043 | except LookupError as e: | |||
r1048 | raise_from_original(exceptions.LookupException(e), e) | |||
r1043 | except Exception as e: | |||
if not hasattr(e, '_vcs_kind'): | ||||
log.exception("Unhandled exception in hg remote call") | ||||
r1048 | raise_from_original(exceptions.UnhandledException(e), e) | |||
r1043 | ||||
raise | ||||
return wrapper | ||||
class MercurialFactory(RepoFactory): | ||||
repo_type = 'hg' | ||||
def _create_config(self, config, hooks=True): | ||||
if not hooks: | ||||
r1124 | ||||
hooks_to_clean = { | ||||
hooks_config.HOOK_REPO_SIZE, | ||||
hooks_config.HOOK_PRE_PULL, | ||||
hooks_config.HOOK_PULL, | ||||
hooks_config.HOOK_PRE_PUSH, | ||||
# TODO: what about PRETXT, this was disabled in pre 5.0.0 | ||||
hooks_config.HOOK_PRETX_PUSH, | ||||
} | ||||
r1043 | new_config = [] | |||
for section, option, value in config: | ||||
if section == 'hooks' and option in hooks_to_clean: | ||||
continue | ||||
new_config.append((section, option, value)) | ||||
config = new_config | ||||
baseui = make_ui_from_config(config) | ||||
return baseui | ||||
def _create_repo(self, wire, create): | ||||
baseui = self._create_config(wire["config"]) | ||||
r1106 | repo = instance(baseui, safe_bytes(wire["path"]), create) | |||
log.debug('repository created: got HG object: %s', repo) | ||||
return repo | ||||
r1043 | ||||
def repo(self, wire, create=False): | ||||
""" | ||||
Get a repository instance for the given path. | ||||
""" | ||||
return self._create_repo(wire, create) | ||||
def patch_ui_message_output(baseui): | ||||
r1048 | baseui.setconfig(b'ui', b'quiet', b'false') | |||
r1043 | output = io.BytesIO() | |||
def write(data, **unused_kwargs): | ||||
output.write(data) | ||||
baseui.status = write | ||||
baseui.write = write | ||||
baseui.warn = write | ||||
baseui.debug = write | ||||
return baseui, output | ||||
r1124 | def get_obfuscated_url(url_obj): | |||
url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd | ||||
url_obj.query = obfuscate_qs(url_obj.query) | ||||
obfuscated_uri = str(url_obj) | ||||
return obfuscated_uri | ||||
def normalize_url_for_hg(url: str): | ||||
_proto = None | ||||
if '+' in url[:url.find('://')]: | ||||
_proto = url[0:url.find('+')] | ||||
url = url[url.find('+') + 1:] | ||||
return url, _proto | ||||
r1043 | class HgRemote(RemoteBase): | |||
def __init__(self, factory): | ||||
self._factory = factory | ||||
self._bulk_methods = { | ||||
"affected_files": self.ctx_files, | ||||
"author": self.ctx_user, | ||||
"branch": self.ctx_branch, | ||||
"children": self.ctx_children, | ||||
"date": self.ctx_date, | ||||
"message": self.ctx_description, | ||||
"parents": self.ctx_parents, | ||||
"status": self.ctx_status, | ||||
"obsolete": self.ctx_obsolete, | ||||
"phase": self.ctx_phase, | ||||
"hidden": self.ctx_hidden, | ||||
"_file_paths": self.ctx_list, | ||||
} | ||||
r1124 | self._bulk_file_methods = { | |||
"size": self.fctx_size, | ||||
"data": self.fctx_node_data, | ||||
"flags": self.fctx_flags, | ||||
"is_binary": self.is_binary, | ||||
"md5": self.md5_hash, | ||||
} | ||||
r1043 | ||||
def _get_ctx(self, repo, ref): | ||||
return get_ctx(repo, ref) | ||||
@reraise_safe_exceptions | ||||
def discover_hg_version(self): | ||||
from mercurial import util | ||||
r1070 | return safe_str(util.version()) | |||
r1043 | ||||
@reraise_safe_exceptions | ||||
def is_empty(self, wire): | ||||
repo = self._factory.repo(wire) | ||||
try: | ||||
return len(repo) == 0 | ||||
except Exception: | ||||
log.exception("failed to read object_store") | ||||
return False | ||||
@reraise_safe_exceptions | ||||
def bookmarks(self, wire): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1070 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _bookmarks(_context_uid, _repo_id): | ||||
repo = self._factory.repo(wire) | ||||
r1070 | return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()} | |||
r1043 | ||||
return _bookmarks(context_uid, repo_id) | ||||
@reraise_safe_exceptions | ||||
def branches(self, wire, normal, closed): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1070 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _branches(_context_uid, _repo_id, _normal, _closed): | ||||
repo = self._factory.repo(wire) | ||||
iter_branches = repo.branchmap().iterbranches() | ||||
bt = {} | ||||
r1070 | for branch_name, _heads, tip_node, is_closed in iter_branches: | |||
r1043 | if normal and not is_closed: | |||
r1070 | bt[safe_str(branch_name)] = ascii_str(hex(tip_node)) | |||
r1043 | if closed and is_closed: | |||
r1070 | bt[safe_str(branch_name)] = ascii_str(hex(tip_node)) | |||
r1043 | ||||
return bt | ||||
return _branches(context_uid, repo_id, normal, closed) | ||||
@reraise_safe_exceptions | ||||
def bulk_request(self, wire, commit_id, pre_load): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1070 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _bulk_request(_repo_id, _commit_id, _pre_load): | ||||
result = {} | ||||
for attr in pre_load: | ||||
try: | ||||
method = self._bulk_methods[attr] | ||||
r1075 | wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache | |||
r1043 | result[attr] = method(wire, commit_id) | |||
except KeyError as e: | ||||
raise exceptions.VcsException(e)( | ||||
'Unknown bulk attribute: "%s"' % attr) | ||||
return result | ||||
return _bulk_request(repo_id, commit_id, sorted(pre_load)) | ||||
@reraise_safe_exceptions | ||||
def ctx_branch(self, wire, commit_id): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1070 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _ctx_branch(_repo_id, _commit_id): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, commit_id) | ||||
return ctx.branch() | ||||
return _ctx_branch(repo_id, commit_id) | ||||
@reraise_safe_exceptions | ||||
def ctx_date(self, wire, commit_id): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1070 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _ctx_date(_repo_id, _commit_id): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, commit_id) | ||||
return ctx.date() | ||||
return _ctx_date(repo_id, commit_id) | ||||
@reraise_safe_exceptions | ||||
def ctx_description(self, wire, revision): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
return ctx.description() | ||||
@reraise_safe_exceptions | ||||
def ctx_files(self, wire, commit_id): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1070 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _ctx_files(_repo_id, _commit_id): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, commit_id) | ||||
return ctx.files() | ||||
return _ctx_files(repo_id, commit_id) | ||||
@reraise_safe_exceptions | ||||
def ctx_list(self, path, revision): | ||||
repo = self._factory.repo(path) | ||||
ctx = self._get_ctx(repo, revision) | ||||
return list(ctx) | ||||
@reraise_safe_exceptions | ||||
def ctx_parents(self, wire, commit_id): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1070 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _ctx_parents(_repo_id, _commit_id): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, commit_id) | ||||
return [parent.hex() for parent in ctx.parents() | ||||
if not (parent.hidden() or parent.obsolete())] | ||||
return _ctx_parents(repo_id, commit_id) | ||||
@reraise_safe_exceptions | ||||
def ctx_children(self, wire, commit_id): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1070 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _ctx_children(_repo_id, _commit_id): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, commit_id) | ||||
return [child.hex() for child in ctx.children() | ||||
if not (child.hidden() or child.obsolete())] | ||||
return _ctx_children(repo_id, commit_id) | ||||
@reraise_safe_exceptions | ||||
def ctx_phase(self, wire, commit_id): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1070 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _ctx_phase(_context_uid, _repo_id, _commit_id): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, commit_id) | ||||
# public=0, draft=1, secret=3 | ||||
return ctx.phase() | ||||
return _ctx_phase(context_uid, repo_id, commit_id) | ||||
@reraise_safe_exceptions | ||||
def ctx_obsolete(self, wire, commit_id): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1070 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _ctx_obsolete(_context_uid, _repo_id, _commit_id): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, commit_id) | ||||
return ctx.obsolete() | ||||
return _ctx_obsolete(context_uid, repo_id, commit_id) | ||||
@reraise_safe_exceptions | ||||
def ctx_hidden(self, wire, commit_id): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1070 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _ctx_hidden(_context_uid, _repo_id, _commit_id): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, commit_id) | ||||
return ctx.hidden() | ||||
return _ctx_hidden(context_uid, repo_id, commit_id) | ||||
@reraise_safe_exceptions | ||||
def ctx_substate(self, wire, revision): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
return ctx.substate | ||||
@reraise_safe_exceptions | ||||
def ctx_status(self, wire, revision): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
status = repo[ctx.p1().node()].status(other=ctx.node()) | ||||
# object of status (odd, custom named tuple in mercurial) is not | ||||
# correctly serializable, we make it a list, as the underling | ||||
# API expects this to be a list | ||||
return list(status) | ||||
@reraise_safe_exceptions | ||||
def ctx_user(self, wire, revision): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
return ctx.user() | ||||
@reraise_safe_exceptions | ||||
def check_url(self, url, config): | ||||
r1124 | url, _proto = normalize_url_for_hg(url) | |||
url_obj = url_parser(safe_bytes(url)) | ||||
test_uri = safe_str(url_obj.authinfo()[0]) | ||||
authinfo = url_obj.authinfo()[1] | ||||
obfuscated_uri = get_obfuscated_url(url_obj) | ||||
log.info("Checking URL for remote cloning/import: %s", obfuscated_uri) | ||||
r1043 | handlers = [] | |||
if authinfo: | ||||
# create a password manager | ||||
passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() | ||||
passmgr.add_password(*authinfo) | ||||
handlers.extend((httpbasicauthhandler(passmgr), | ||||
httpdigestauthhandler(passmgr))) | ||||
o = urllib.request.build_opener(*handlers) | ||||
o.addheaders = [('Content-Type', 'application/mercurial-0.1'), | ||||
('Accept', 'application/mercurial-0.1')] | ||||
q = {"cmd": 'between'} | ||||
r1114 | q.update({'pairs': "{}-{}".format('0' * 40, '0' * 40)}) | |||
r1043 | qs = '?%s' % urllib.parse.urlencode(q) | |||
r1130 | cu = f"{test_uri}{qs}" | |||
r1043 | req = urllib.request.Request(cu, None, {}) | |||
try: | ||||
r1124 | log.debug("Trying to open URL %s", obfuscated_uri) | |||
r1043 | resp = o.open(req) | |||
if resp.code != 200: | ||||
raise exceptions.URLError()('Return Code is not 200') | ||||
except Exception as e: | ||||
r1124 | log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True) | |||
r1043 | # means it cannot be cloned | |||
r1130 | raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}") | |||
r1043 | ||||
# now check if it's a proper hg repo, but don't do it for svn | ||||
try: | ||||
if _proto == 'svn': | ||||
pass | ||||
else: | ||||
# check for pure hg repos | ||||
log.debug( | ||||
r1124 | "Verifying if URL is a Mercurial repository: %s", obfuscated_uri) | |||
r1043 | ui = make_ui_from_config(config) | |||
r1124 | peer_checker = makepeer(ui, safe_bytes(url)) | |||
peer_checker.lookup(b'tip') | ||||
r1043 | except Exception as e: | |||
log.warning("URL is not a valid Mercurial repository: %s", | ||||
r1124 | obfuscated_uri) | |||
r1043 | raise exceptions.URLError(e)( | |||
"url [%s] does not look like an hg repo org_exc: %s" | ||||
r1124 | % (obfuscated_uri, e)) | |||
r1043 | ||||
r1124 | log.info("URL is a valid Mercurial repository: %s", obfuscated_uri) | |||
r1043 | return True | |||
@reraise_safe_exceptions | ||||
def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context): | ||||
repo = self._factory.repo(wire) | ||||
if file_filter: | ||||
r1085 | # unpack the file-filter | |||
repo_path, node_path = file_filter | ||||
match_filter = match(safe_bytes(repo_path), b'', [safe_bytes(node_path)]) | ||||
r1043 | else: | |||
match_filter = file_filter | ||||
opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1) | ||||
try: | ||||
r1070 | diff_iter = patch.diff( | |||
repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts) | ||||
r1124 | return BytesEnvelope(b"".join(diff_iter)) | |||
r1043 | except RepoLookupError as e: | |||
raise exceptions.LookupException(e)() | ||||
@reraise_safe_exceptions | ||||
def node_history(self, wire, revision, path, limit): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1048 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _node_history(_context_uid, _repo_id, _revision, _path, _limit): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
r1050 | fctx = ctx.filectx(safe_bytes(path)) | |||
r1043 | ||||
def history_iter(): | ||||
limit_rev = fctx.rev() | ||||
for obj in reversed(list(fctx.filelog())): | ||||
obj = fctx.filectx(obj) | ||||
ctx = obj.changectx() | ||||
if ctx.hidden() or ctx.obsolete(): | ||||
continue | ||||
if limit_rev >= obj.rev(): | ||||
yield obj | ||||
history = [] | ||||
for cnt, obj in enumerate(history_iter()): | ||||
if limit and cnt >= limit: | ||||
break | ||||
history.append(hex(obj.node())) | ||||
return [x for x in history] | ||||
return _node_history(context_uid, repo_id, revision, path, limit) | ||||
@reraise_safe_exceptions | ||||
def node_history_untill(self, wire, revision, path, limit): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1048 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _node_history_until(_context_uid, _repo_id): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
r1050 | fctx = ctx.filectx(safe_bytes(path)) | |||
r1043 | ||||
file_log = list(fctx.filelog()) | ||||
if limit: | ||||
# Limit to the last n items | ||||
file_log = file_log[-limit:] | ||||
return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)] | ||||
return _node_history_until(context_uid, repo_id, revision, path, limit) | ||||
@reraise_safe_exceptions | ||||
r1124 | def bulk_file_request(self, wire, commit_id, path, pre_load): | |||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
@region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load): | ||||
result = {} | ||||
for attr in pre_load: | ||||
try: | ||||
method = self._bulk_file_methods[attr] | ||||
wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache | ||||
result[attr] = method(wire, _commit_id, _path) | ||||
except KeyError as e: | ||||
raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"') | ||||
r1136 | return result | |||
r1124 | ||||
r1136 | return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load))) | |||
r1124 | ||||
@reraise_safe_exceptions | ||||
r1043 | def fctx_annotate(self, wire, revision, path): | |||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
r1050 | fctx = ctx.filectx(safe_bytes(path)) | |||
r1043 | ||||
result = [] | ||||
for i, annotate_obj in enumerate(fctx.annotate(), 1): | ||||
ln_no = i | ||||
sha = hex(annotate_obj.fctx.node()) | ||||
content = annotate_obj.text | ||||
r1139 | result.append((ln_no, ascii_str(sha), content)) | |||
return BinaryEnvelope(result) | ||||
r1043 | ||||
@reraise_safe_exceptions | ||||
def fctx_node_data(self, wire, revision, path): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
r1050 | fctx = ctx.filectx(safe_bytes(path)) | |||
r1124 | return BytesEnvelope(fctx.data()) | |||
r1043 | ||||
@reraise_safe_exceptions | ||||
def fctx_flags(self, wire, commit_id, path): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1050 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _fctx_flags(_repo_id, _commit_id, _path): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, commit_id) | ||||
r1050 | fctx = ctx.filectx(safe_bytes(path)) | |||
r1043 | return fctx.flags() | |||
return _fctx_flags(repo_id, commit_id, path) | ||||
@reraise_safe_exceptions | ||||
def fctx_size(self, wire, commit_id, path): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1050 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _fctx_size(_repo_id, _revision, _path): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, commit_id) | ||||
r1050 | fctx = ctx.filectx(safe_bytes(path)) | |||
r1043 | return fctx.size() | |||
return _fctx_size(repo_id, commit_id, path) | ||||
@reraise_safe_exceptions | ||||
def get_all_commit_ids(self, wire, name): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1048 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _get_all_commit_ids(_context_uid, _repo_id, _name): | ||||
repo = self._factory.repo(wire) | ||||
r1048 | revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()] | |||
r1043 | return revs | |||
return _get_all_commit_ids(context_uid, repo_id, name) | ||||
@reraise_safe_exceptions | ||||
def get_config_value(self, wire, section, name, untrusted=False): | ||||
repo = self._factory.repo(wire) | ||||
r1070 | return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted) | |||
r1043 | ||||
@reraise_safe_exceptions | ||||
def is_large_file(self, wire, commit_id, path): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1048 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _is_large_file(_context_uid, _repo_id, _commit_id, _path): | ||||
r1050 | return largefiles.lfutil.isstandin(safe_bytes(path)) | |||
r1043 | ||||
return _is_large_file(context_uid, repo_id, commit_id, path) | ||||
@reraise_safe_exceptions | ||||
def is_binary(self, wire, revision, path): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
r1048 | region = self._region(wire) | |||
r1043 | ||||
@region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _is_binary(_repo_id, _sha, _path): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
r1050 | fctx = ctx.filectx(safe_bytes(path)) | |||
r1043 | return fctx.isbinary() | |||
return _is_binary(repo_id, revision, path) | ||||
@reraise_safe_exceptions | ||||
r1074 | def md5_hash(self, wire, revision, path): | |||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
@region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _md5_hash(_repo_id, _sha, _path): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
fctx = ctx.filectx(safe_bytes(path)) | ||||
return hashlib.md5(fctx.data()).hexdigest() | ||||
return _md5_hash(repo_id, revision, path) | ||||
@reraise_safe_exceptions | ||||
r1043 | def in_largefiles_store(self, wire, sha): | |||
repo = self._factory.repo(wire) | ||||
return largefiles.lfutil.instore(repo, sha) | ||||
@reraise_safe_exceptions | ||||
def in_user_cache(self, wire, sha): | ||||
repo = self._factory.repo(wire) | ||||
return largefiles.lfutil.inusercache(repo.ui, sha) | ||||
@reraise_safe_exceptions | ||||
def store_path(self, wire, sha): | ||||
repo = self._factory.repo(wire) | ||||
return largefiles.lfutil.storepath(repo, sha) | ||||
@reraise_safe_exceptions | ||||
def link(self, wire, sha, path): | ||||
repo = self._factory.repo(wire) | ||||
largefiles.lfutil.link( | ||||
largefiles.lfutil.usercachepath(repo.ui, sha), path) | ||||
@reraise_safe_exceptions | ||||
def localrepository(self, wire, create=False): | ||||
self._factory.repo(wire, create=create) | ||||
@reraise_safe_exceptions | ||||
def lookup(self, wire, revision, both): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
r1050 | region = self._region(wire) | |||
r1043 | ||||
@region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _lookup(_context_uid, _repo_id, _revision, _both): | ||||
repo = self._factory.repo(wire) | ||||
rev = _revision | ||||
if isinstance(rev, int): | ||||
# NOTE(marcink): | ||||
# since Mercurial doesn't support negative indexes properly | ||||
# we need to shift accordingly by one to get proper index, e.g | ||||
# repo[-1] => repo[-2] | ||||
# repo[0] => repo[-1] | ||||
if rev <= 0: | ||||
rev = rev + -1 | ||||
try: | ||||
ctx = self._get_ctx(repo, rev) | ||||
r1141 | except (AmbiguousPrefixLookupError) as e: | |||
e = RepoLookupError(rev) | ||||
e._org_exc_tb = traceback.format_exc() | ||||
raise exceptions.LookupException(e)(rev) | ||||
r1085 | except (TypeError, RepoLookupError, binascii.Error) as e: | |||
r1043 | e._org_exc_tb = traceback.format_exc() | |||
raise exceptions.LookupException(e)(rev) | ||||
except LookupError as e: | ||||
e._org_exc_tb = traceback.format_exc() | ||||
raise exceptions.LookupException(e)(e.name) | ||||
if not both: | ||||
return ctx.hex() | ||||
ctx = repo[ctx.hex()] | ||||
return ctx.hex(), ctx.rev() | ||||
return _lookup(context_uid, repo_id, revision, both) | ||||
@reraise_safe_exceptions | ||||
def sync_push(self, wire, url): | ||||
if not self.check_url(url, wire['config']): | ||||
return | ||||
repo = self._factory.repo(wire) | ||||
# Disable any prompts for this repo | ||||
r1048 | repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y') | |||
r1043 | ||||
bookmarks = list(dict(repo._bookmarks).keys()) | ||||
r1070 | remote = peer(repo, {}, safe_bytes(url)) | |||
r1043 | # Disable any prompts for this remote | |||
r1048 | remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y') | |||
r1043 | ||||
return exchange.push( | ||||
repo, remote, newbranch=True, bookmarks=bookmarks).cgresult | ||||
@reraise_safe_exceptions | ||||
def revision(self, wire, rev): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, rev) | ||||
return ctx.rev() | ||||
@reraise_safe_exceptions | ||||
def rev_range(self, wire, commit_filter): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
r1050 | region = self._region(wire) | |||
r1043 | ||||
@region.conditional_cache_on_arguments(condition=cache_on) | ||||
def _rev_range(_context_uid, _repo_id, _filter): | ||||
repo = self._factory.repo(wire) | ||||
r1050 | revisions = [ | |||
ascii_str(repo[rev].hex()) | ||||
for rev in revrange(repo, list(map(ascii_bytes, commit_filter))) | ||||
] | ||||
r1043 | return revisions | |||
return _rev_range(context_uid, repo_id, sorted(commit_filter)) | ||||
@reraise_safe_exceptions | ||||
def rev_range_hash(self, wire, node): | ||||
repo = self._factory.repo(wire) | ||||
def get_revs(repo, rev_opt): | ||||
if rev_opt: | ||||
revs = revrange(repo, rev_opt) | ||||
if len(revs) == 0: | ||||
return (nullrev, nullrev) | ||||
return max(revs), min(revs) | ||||
else: | ||||
return len(repo) - 1, 0 | ||||
stop, start = get_revs(repo, [node + ':']) | ||||
r1050 | revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)] | |||
r1043 | return revs | |||
@reraise_safe_exceptions | ||||
def revs_from_revspec(self, wire, rev_spec, *args, **kwargs): | ||||
r1080 | org_path = safe_bytes(wire["path"]) | |||
other_path = safe_bytes(kwargs.pop('other_path', '')) | ||||
r1043 | ||||
# case when we want to compare two independent repositories | ||||
if other_path and other_path != wire["path"]: | ||||
baseui = self._factory._create_config(wire["config"]) | ||||
r1080 | repo = unionrepo.makeunionrepository(baseui, other_path, org_path) | |||
r1043 | else: | |||
repo = self._factory.repo(wire) | ||||
return list(repo.revs(rev_spec, *args)) | ||||
@reraise_safe_exceptions | ||||
def verify(self, wire,): | ||||
repo = self._factory.repo(wire) | ||||
baseui = self._factory._create_config(wire['config']) | ||||
baseui, output = patch_ui_message_output(baseui) | ||||
repo.ui = baseui | ||||
verify.verify(repo) | ||||
return output.getvalue() | ||||
@reraise_safe_exceptions | ||||
def hg_update_cache(self, wire,): | ||||
repo = self._factory.repo(wire) | ||||
baseui = self._factory._create_config(wire['config']) | ||||
baseui, output = patch_ui_message_output(baseui) | ||||
repo.ui = baseui | ||||
with repo.wlock(), repo.lock(): | ||||
repo.updatecaches(full=True) | ||||
return output.getvalue() | ||||
@reraise_safe_exceptions | ||||
def hg_rebuild_fn_cache(self, wire,): | ||||
repo = self._factory.repo(wire) | ||||
baseui = self._factory._create_config(wire['config']) | ||||
baseui, output = patch_ui_message_output(baseui) | ||||
repo.ui = baseui | ||||
repair.rebuildfncache(baseui, repo) | ||||
return output.getvalue() | ||||
@reraise_safe_exceptions | ||||
def tags(self, wire): | ||||
cache_on, context_uid, repo_id = self._cache_on(wire) | ||||
region = self._region(wire) | ||||
r1050 | ||||
r1043 | @region.conditional_cache_on_arguments(condition=cache_on) | |||
def _tags(_context_uid, _repo_id): | ||||
repo = self._factory.repo(wire) | ||||
r1070 | return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()} | |||
r1043 | ||||
return _tags(context_uid, repo_id) | ||||
@reraise_safe_exceptions | ||||
r1080 | def update(self, wire, node='', clean=False): | |||
r1043 | repo = self._factory.repo(wire) | |||
baseui = self._factory._create_config(wire['config']) | ||||
r1080 | node = safe_bytes(node) | |||
r1043 | commands.update(baseui, repo, node=node, clean=clean) | |||
@reraise_safe_exceptions | ||||
def identify(self, wire): | ||||
repo = self._factory.repo(wire) | ||||
baseui = self._factory._create_config(wire['config']) | ||||
output = io.BytesIO() | ||||
baseui.write = output.write | ||||
# This is required to get a full node id | ||||
baseui.debugflag = True | ||||
commands.identify(baseui, repo, id=True) | ||||
return output.getvalue() | ||||
@reraise_safe_exceptions | ||||
def heads(self, wire, branch=None): | ||||
repo = self._factory.repo(wire) | ||||
baseui = self._factory._create_config(wire['config']) | ||||
output = io.BytesIO() | ||||
def write(data, **unused_kwargs): | ||||
output.write(data) | ||||
baseui.write = write | ||||
if branch: | ||||
r1070 | args = [safe_bytes(branch)] | |||
r1043 | else: | |||
args = [] | ||||
r1070 | commands.heads(baseui, repo, template=b'{node} ', *args) | |||
r1043 | ||||
return output.getvalue() | ||||
@reraise_safe_exceptions | ||||
def ancestor(self, wire, revision1, revision2): | ||||
repo = self._factory.repo(wire) | ||||
changelog = repo.changelog | ||||
lookup = repo.lookup | ||||
r1080 | a = changelog.ancestor(lookup(safe_bytes(revision1)), lookup(safe_bytes(revision2))) | |||
r1043 | return hex(a) | |||
@reraise_safe_exceptions | ||||
def clone(self, wire, source, dest, update_after_clone=False, hooks=True): | ||||
baseui = self._factory._create_config(wire["config"], hooks=hooks) | ||||
r1070 | clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone) | |||
r1043 | ||||
@reraise_safe_exceptions | ||||
def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated): | ||||
repo = self._factory.repo(wire) | ||||
baseui = self._factory._create_config(wire['config']) | ||||
r1070 | publishing = baseui.configbool(b'phases', b'publish') | |||
r1043 | ||||
r1070 | def _filectxfn(_repo, ctx, path: bytes): | |||
r1043 | """ | |||
Marks given path as added/changed/removed in a given _repo. This is | ||||
for internal mercurial commit function. | ||||
""" | ||||
# check if this path is removed | ||||
r1070 | if safe_str(path) in removed: | |||
r1043 | # returning None is a way to mark node for removal | |||
return None | ||||
# check if this path is added | ||||
for node in updated: | ||||
r1070 | if safe_bytes(node['path']) == path: | |||
r1043 | return memfilectx( | |||
_repo, | ||||
changectx=ctx, | ||||
r1070 | path=safe_bytes(node['path']), | |||
data=safe_bytes(node['content']), | ||||
r1043 | islink=False, | |||
isexec=bool(node['mode'] & stat.S_IXUSR), | ||||
copysource=False) | ||||
r1070 | abort_exc = exceptions.AbortException() | |||
raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})") | ||||
r1043 | ||||
r1070 | if publishing: | |||
new_commit_phase = b'public' | ||||
else: | ||||
new_commit_phase = b'draft' | ||||
with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}): | ||||
kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()} | ||||
r1043 | commit_ctx = memctx( | |||
repo=repo, | ||||
parents=parents, | ||||
r1070 | text=safe_bytes(message), | |||
files=[safe_bytes(x) for x in files], | ||||
r1043 | filectxfn=_filectxfn, | |||
r1070 | user=safe_bytes(user), | |||
r1043 | date=(commit_time, commit_timezone), | |||
r1070 | extra=kwargs) | |||
r1043 | ||||
n = repo.commitctx(commit_ctx) | ||||
new_id = hex(n) | ||||
return new_id | ||||
@reraise_safe_exceptions | ||||
def pull(self, wire, url, commit_ids=None): | ||||
repo = self._factory.repo(wire) | ||||
# Disable any prompts for this repo | ||||
r1048 | repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y') | |||
r1043 | ||||
r1070 | remote = peer(repo, {}, safe_bytes(url)) | |||
r1043 | # Disable any prompts for this remote | |||
r1048 | remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y') | |||
r1043 | ||||
if commit_ids: | ||||
commit_ids = [bin(commit_id) for commit_id in commit_ids] | ||||
return exchange.pull( | ||||
repo, remote, heads=commit_ids, force=None).cgresult | ||||
@reraise_safe_exceptions | ||||
r1080 | def pull_cmd(self, wire, source, bookmark='', branch='', revision='', hooks=True): | |||
r1043 | repo = self._factory.repo(wire) | |||
baseui = self._factory._create_config(wire['config'], hooks=hooks) | ||||
r1080 | source = safe_bytes(source) | |||
r1043 | # Mercurial internally has a lot of logic that checks ONLY if | |||
# option is defined, we just pass those if they are defined then | ||||
opts = {} | ||||
r1124 | ||||
r1043 | if bookmark: | |||
r1124 | opts['bookmark'] = [safe_bytes(x) for x in bookmark] \ | |||
if isinstance(bookmark, list) else safe_bytes(bookmark) | ||||
r1043 | if branch: | |||
r1124 | opts['branch'] = [safe_bytes(x) for x in branch] \ | |||
if isinstance(branch, list) else safe_bytes(branch) | ||||
r1043 | if revision: | |||
r1124 | opts['rev'] = [safe_bytes(x) for x in revision] \ | |||
if isinstance(revision, list) else safe_bytes(revision) | ||||
r1043 | ||||
commands.pull(baseui, repo, source, **opts) | ||||
@reraise_safe_exceptions | ||||
r1124 | def push(self, wire, revisions, dest_path, hooks: bool = True, push_branches: bool = False): | |||
r1043 | repo = self._factory.repo(wire) | |||
baseui = self._factory._create_config(wire['config'], hooks=hooks) | ||||
r1124 | ||||
revisions = [safe_bytes(x) for x in revisions] \ | ||||
if isinstance(revisions, list) else safe_bytes(revisions) | ||||
commands.push(baseui, repo, safe_bytes(dest_path), | ||||
rev=revisions, | ||||
r1043 | new_branch=push_branches) | |||
@reraise_safe_exceptions | ||||
def strip(self, wire, revision, update, backup): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
r1124 | hgext_strip.strip( | |||
r1043 | repo.baseui, repo, ctx.node(), update=update, backup=backup) | |||
@reraise_safe_exceptions | ||||
def get_unresolved_files(self, wire): | ||||
repo = self._factory.repo(wire) | ||||
log.debug('Calculating unresolved files for repo: %s', repo) | ||||
output = io.BytesIO() | ||||
def write(data, **unused_kwargs): | ||||
output.write(data) | ||||
baseui = self._factory._create_config(wire['config']) | ||||
baseui.write = write | ||||
commands.resolve(baseui, repo, list=True) | ||||
unresolved = output.getvalue().splitlines(0) | ||||
return unresolved | ||||
@reraise_safe_exceptions | ||||
def merge(self, wire, revision): | ||||
repo = self._factory.repo(wire) | ||||
baseui = self._factory._create_config(wire['config']) | ||||
r1048 | repo.ui.setconfig(b'ui', b'merge', b'internal:dump') | |||
r1043 | ||||
# In case of sub repositories are used mercurial prompts the user in | ||||
# case of merge conflicts or different sub repository sources. By | ||||
# setting the interactive flag to `False` mercurial doesn't prompt the | ||||
# used but instead uses a default value. | ||||
r1048 | repo.ui.setconfig(b'ui', b'interactive', False) | |||
r1124 | commands.merge(baseui, repo, rev=safe_bytes(revision)) | |||
r1043 | ||||
@reraise_safe_exceptions | ||||
def merge_state(self, wire): | ||||
repo = self._factory.repo(wire) | ||||
r1048 | repo.ui.setconfig(b'ui', b'merge', b'internal:dump') | |||
r1043 | ||||
# In case of sub repositories are used mercurial prompts the user in | ||||
# case of merge conflicts or different sub repository sources. By | ||||
# setting the interactive flag to `False` mercurial doesn't prompt the | ||||
# used but instead uses a default value. | ||||
r1048 | repo.ui.setconfig(b'ui', b'interactive', False) | |||
r1043 | ms = hg_merge.mergestate(repo) | |||
return [x for x in ms.unresolved()] | ||||
@reraise_safe_exceptions | ||||
def commit(self, wire, message, username, close_branch=False): | ||||
repo = self._factory.repo(wire) | ||||
baseui = self._factory._create_config(wire['config']) | ||||
r1124 | repo.ui.setconfig(b'ui', b'username', safe_bytes(username)) | |||
commands.commit(baseui, repo, message=safe_bytes(message), close_branch=close_branch) | ||||
r1043 | ||||
@reraise_safe_exceptions | ||||
r1124 | def rebase(self, wire, source='', dest='', abort=False): | |||
r1043 | repo = self._factory.repo(wire) | |||
baseui = self._factory._create_config(wire['config']) | ||||
r1048 | repo.ui.setconfig(b'ui', b'merge', b'internal:dump') | |||
r1043 | # In case of sub repositories are used mercurial prompts the user in | |||
# case of merge conflicts or different sub repository sources. By | ||||
# setting the interactive flag to `False` mercurial doesn't prompt the | ||||
# used but instead uses a default value. | ||||
r1048 | repo.ui.setconfig(b'ui', b'interactive', False) | |||
r1124 | ||||
rebase.rebase(baseui, repo, base=safe_bytes(source or ''), dest=safe_bytes(dest or ''), | ||||
abort=abort, keep=not abort) | ||||
r1043 | ||||
@reraise_safe_exceptions | ||||
def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone): | ||||
repo = self._factory.repo(wire) | ||||
ctx = self._get_ctx(repo, revision) | ||||
node = ctx.node() | ||||
date = (tag_time, tag_timezone) | ||||
try: | ||||
r1124 | hg_tag.tag(repo, safe_bytes(name), node, safe_bytes(message), local, safe_bytes(user), date) | |||
r1043 | except Abort as e: | |||
log.exception("Tag operation aborted") | ||||
# Exception can contain unicode which we convert | ||||
raise exceptions.AbortException(e)(repr(e)) | ||||
@reraise_safe_exceptions | ||||
r1080 | def bookmark(self, wire, bookmark, revision=''): | |||
r1043 | repo = self._factory.repo(wire) | |||
baseui = self._factory._create_config(wire['config']) | ||||
r1124 | revision = revision or '' | |||
r1080 | commands.bookmark(baseui, repo, safe_bytes(bookmark), rev=safe_bytes(revision), force=True) | |||
r1043 | ||||
@reraise_safe_exceptions | ||||
def install_hooks(self, wire, force=False): | ||||
# we don't need any special hooks for Mercurial | ||||
pass | ||||
@reraise_safe_exceptions | ||||
def get_hooks_info(self, wire): | ||||
return { | ||||
'pre_version': vcsserver.__version__, | ||||
'post_version': vcsserver.__version__, | ||||
} | ||||
@reraise_safe_exceptions | ||||
def set_head_ref(self, wire, head_name): | ||||
pass | ||||
@reraise_safe_exceptions | ||||
r1124 | def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path, | |||
archive_dir_name, commit_id, cache_config): | ||||
r1043 | ||||
def file_walker(_commit_id, path): | ||||
repo = self._factory.repo(wire) | ||||
ctx = repo[_commit_id] | ||||
is_root = path in ['', '/'] | ||||
if is_root: | ||||
matcher = alwaysmatcher(badfn=None) | ||||
else: | ||||
r1137 | matcher = patternmatcher('', [(b'glob', safe_bytes(path)+b'/**', b'')], badfn=None) | |||
r1043 | file_iter = ctx.manifest().walk(matcher) | |||
for fn in file_iter: | ||||
file_path = fn | ||||
flags = ctx.flags(fn) | ||||
mode = b'x' in flags and 0o755 or 0o644 | ||||
is_link = b'l' in flags | ||||
r1050 | yield ArchiveNode(file_path, mode, is_link, ctx[fn].data) | |||
r1043 | ||||
r1124 | return store_archive_in_cache( | |||
file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config) | ||||
r1043 | ||||