##// END OF EJS Templates
caches: fixed dbm keys calls to use bytes
caches: fixed dbm keys calls to use bytes

File last commit:

r1072:6a99d09b python3
r1082:58fdadac python3
Show More
base.py
136 lines | 4.4 KiB | text/x-python | PythonLexer
# RhodeCode VCSServer provides access to different vcs backends via network.
# Copyright (C) 2014-2020 RhodeCode GmbH
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import os
import sys
import traceback
import logging
import urllib.parse
from vcsserver.lib.rc_cache import region_meta
from vcsserver import exceptions
from vcsserver.exceptions import NoContentException
from vcsserver.hgcompat import archival
from vcsserver.str_utils import safe_bytes
log = logging.getLogger(__name__)
class RepoFactory(object):
"""
Utility to create instances of repository
It provides internal caching of the `repo` object based on
the :term:`call context`.
"""
repo_type = None
def __init__(self):
self._cache_region = region_meta.dogpile_cache_regions['repo_object']
def _create_config(self, path, config):
config = {}
return config
def _create_repo(self, wire, create):
raise NotImplementedError()
def repo(self, wire, create=False):
raise NotImplementedError()
def obfuscate_qs(query_string):
if query_string is None:
return None
parsed = []
for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
if k in ['auth_token', 'api_key']:
v = "*****"
parsed.append((k, v))
return '&'.join('{}{}'.format(
k, '={}'.format(v) if v else '') for k, v in parsed)
def raise_from_original(new_type, org_exc: Exception):
"""
Raise a new exception type with original args and traceback.
"""
exc_type, exc_value, exc_traceback = sys.exc_info()
new_exc = new_type(*exc_value.args)
# store the original traceback into the new exc
new_exc._org_exc_tb = traceback.format_tb(exc_traceback)
try:
raise new_exc.with_traceback(exc_traceback)
finally:
del exc_traceback
class ArchiveNode(object):
def __init__(self, path, mode, is_link, raw_bytes):
self.path = path
self.mode = mode
self.is_link = is_link
self.raw_bytes = raw_bytes
def archive_repo(walker, archive_dest_path, kind, mtime, archive_at_path,
archive_dir_name, commit_id, write_metadata=True, extra_metadata=None):
"""
walker should be a file walker, for example:
def walker():
for file_info in files:
yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
"""
extra_metadata = extra_metadata or {}
archive_dest_path = safe_bytes(archive_dest_path)
if kind == "tgz":
archiver = archival.tarit(archive_dest_path, mtime, b"gz")
elif kind == "tbz2":
archiver = archival.tarit(archive_dest_path, mtime, b"bz2")
elif kind == 'zip':
archiver = archival.zipit(archive_dest_path, mtime)
else:
raise exceptions.ArchiveException()(
f'Remote does not support: "{kind}" archive type.')
for f in walker(commit_id, archive_at_path):
f_path = os.path.join(safe_bytes(archive_dir_name), f.path.lstrip(b'/'))
try:
archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
except NoContentException:
# NOTE(marcink): this is a special case for SVN so we can create "empty"
# directories which arent supported by archiver
archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, '')
if write_metadata:
metadata = dict([
('commit_id', commit_id),
('mtime', mtime),
])
metadata.update(extra_metadata)
meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
return archiver.done()